removed all classes connected with marytts lib

This commit is contained in:
lucio.lelii 2021-01-19 15:48:12 +01:00
parent 66869f23ba
commit 78c22c303b
18 changed files with 0 additions and 5283 deletions

View File

@ -1,799 +0,0 @@
package marytts.signalproc.display;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import javax.swing.Box;
import javax.swing.BoxLayout;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.ScrollPaneConstants;
import marytts.util.string.PrintfFormat;
public class FunctionGraphCustom extends FunctionGraph
{
private static final long serialVersionUID = 1L;
public static final int DEFAULT_WIDTH=640;
public static final int DEFAULT_HEIGHT=480;
public static final int DRAW_LINE = 1;
public static final int DRAW_DOTS = 2;
public static final int DRAW_LINEWITHDOTS = 3;
public static final int DRAW_HISTOGRAM = 4;
public static final int DOT_FULLCIRCLE = 1;
public static final int DOT_FULLSQUARE = 2;
public static final int DOT_FULLDIAMOND = 3;
public static final int DOT_EMPTYCIRCLE = 11;
public static final int DOT_EMPTYSQUARE = 12;
public static final int DOT_EMPTYDIAMOND = 13;
protected int paddingLeft = 40;
protected int paddingRight = 10;
protected int paddingTop = 10;
protected int paddingBottom = 40;
protected double x0;
protected double xStep;
protected List<double[]> dataseries = new ArrayList<double[]>();
protected double ymin;
protected double ymax;
protected boolean showXAxis = true;
protected boolean showYAxis = true;
protected BufferedImage graphImage = null;
protected Color backgroundColor = Color.WHITE;
protected Color axisColor = Color.BLACK;
protected List<Color> graphColor = new ArrayList<Color>();
protected Color histogramBorderColor = Color.BLACK;
protected List<Integer> graphStyle = new ArrayList<Integer>();
protected List<Integer> dotStyle = new ArrayList<Integer>();
protected int dotSize = 6;
protected int histogramWidth = 10;
protected boolean autoYMinMax = true; // automatically determine ymin and ymax
// data to be used for drawing cursor et al on the GlassPane:
// x and y coordinates, in data space
protected DoublePoint positionCursor = new DoublePoint();
protected DoublePoint rangeCursor = new DoublePoint();
protected List cursorListeners = new ArrayList();
/**
* Display a 2d graph showing y(x), with labelled scales.
* This constructor is for subclasses only, which may need
* to perform some operations before calling initialise().
*/
protected FunctionGraphCustom()
{
super();
}
/**
* Display a 2d graph showing y(x), with labelled scales.
*/
public FunctionGraphCustom(double x0, double xStep, double[] y) {
this(DEFAULT_WIDTH, DEFAULT_HEIGHT, x0, xStep, y);
}
/**
* Display a 2d graph showing y(x), with labelled scales.
*/
public FunctionGraphCustom(int width, int height,
double x0, double xStep, double[] y) {
super();
initialise(width, height, x0, xStep, y);
}
public void initialise(int width, int height,
double newX0, double newXStep, double[] data)
{
setPreferredSize(new Dimension(width, height));
setOpaque(true);
this.addMouseListener(new MouseListener() {
public void mouseClicked(MouseEvent e) {
//System.err.println("Mouse clicked");
if (e.getButton() == MouseEvent.BUTTON1) { // left mouse button
// set position cursor; if we are to the right of rangeCursor,
// delete rangeCursor.
positionCursor.x = imageX2X(e.getX()-paddingLeft);
positionCursor.y = imageY2Y(getHeight()-paddingBottom-e.getY());
if (!Double.isNaN(rangeCursor.x) && positionCursor.x > rangeCursor.x) {
rangeCursor.x = Double.NaN;
}
} else if (e.getButton() == MouseEvent.BUTTON3) { // right mouse button
// set range cursor, but only if we are to the right of positionCursor
rangeCursor.x = imageX2X(e.getX()-paddingLeft);
rangeCursor.y = imageY2Y(getHeight()-paddingBottom-e.getY());
if (positionCursor.x > rangeCursor.x) {
rangeCursor.x = Double.NaN;
}
}
FunctionGraphCustom.this.notifyCursorListeners();
FunctionGraphCustom.this.requestFocusInWindow();
}
public void mousePressed(MouseEvent e) {}
public void mouseReleased(MouseEvent e) {}
public void mouseEntered(MouseEvent e) {}
public void mouseExited(MouseEvent e) {}
});
updateData(newX0, newXStep, data);
// set styles for primary data series:
graphColor.add(Color.BLUE);
graphStyle.add(DRAW_LINE);
dotStyle.add(DOT_FULLCIRCLE);
}
/**
* Replace the previous data with the given new data.
* Any secondary data series added using {{@link #addDataSeries(double[], Color, int, int)} are removed.
* @param newX0 x position of first data point
* @param newXStep distance between data points on X axis
* @param data all data points
*/
public void updateData(double newX0, double newXStep, double[] data)
{
if (newXStep <= 0) {
throw new IllegalArgumentException("newXStep must be >0");
}
if (data == null || data.length == 0) {
throw new IllegalArgumentException("No data");
}
this.x0 = newX0;
this.xStep = newXStep;
double[] series = new double[data.length];
System.arraycopy(data, 0, series, 0, data.length);
// Do not allow old secondary data sets with a new primary one:
while (dataseries.size() > 0) {
dataseries.remove(0);
}
// Also remove the styles of the secondary data sets:
while (graphColor.size() > 1) {
graphColor.remove(1);
}
while (graphStyle.size() > 1) {
graphStyle.remove(1);
}
while (dotStyle.size() > 1) {
dotStyle.remove(1);
}
this.dataseries.add(0, series);
if (autoYMinMax) {
ymin = Double.NaN;
ymax = Double.NaN;
for (int i=0; i<data.length; i++) {
if (Double.isNaN(data[i])) // missing value -- skip
continue;
if (Double.isNaN(ymin)) {
assert Double.isNaN(ymax);
ymin = data[i];
ymax = data[i];
continue;
}
if (data[i] < ymin) ymin = data[i];
else if (data[i] > ymax) ymax = data[i];
}
// If the x axis is painted in the middle (ymin << 0),
// we need much less paddingBottom:
if (ymin < 0) {
paddingBottom = paddingTop;
}
}
// And invalidate any previous graph image:
graphImage = null;
}
public void setPrimaryDataSeriesStyle(Color newGraphColor, int newGraphStyle, int newDotStyle)
{
graphColor.remove(0);
graphColor.add(0, newGraphColor);
graphStyle.remove(0);
graphStyle.add(0, newGraphStyle);
dotStyle.remove(0);
dotStyle.add(0, newDotStyle);
}
/**
* Manually set the min and max values for the y axis.
* @param theYMin
* @param theYMax
*/
public void setYMinMax(double theYMin, double theYMax)
{
autoYMinMax = false;
ymin = theYMin;
ymax = theYMax;
// If the x axis is painted in the middle (ymin << 0),
// we need much less paddingBottom:
if (ymin < 0) {
paddingBottom = paddingTop;
}
}
/**
* Add a secondary data series to this graph.
* @param data the function data, which must be of same length as the original data. {@link #updateData(double, double, double[])}
* @param newGraphColor a colour
* @param newGraphStyle the style for painting this data series. One of {@link #DRAW_LINE}, {@link #DRAW_DOTS}, {@value #DRAW_LINEWITHDOTS}, {@link #DRAW_HISTOGRAM}.
* @param newDotStyle the shape of any dots to use (meaningful only with newGraphStyle == {@link #DRAW_DOTS} or {@link #DRAW_LINEWITHDOTS}).
* One of {@link #DOT_EMPTYCIRCLE}, {@link #DOT_EMPTYDIAMOND}, {@link #DOT_EMPTYSQUARE}, {@link #DOT_FULLCIRCLE}, {@link #DOT_FULLDIAMOND}, {@link #DOT_FULLSQUARE}.
* For other graph styles, this is ignored, and it is recommended to set it to -1 for clarity.
*/
public void addDataSeries(double[] data, Color newGraphColor, int newGraphStyle, int newDotStyle)
{
if (data == null) throw new NullPointerException("Cannot add null data");
if (dataseries.get(0).length != data.length)
throw new IllegalArgumentException("Can only add data of the exact same length as the original data series; len(orig)="
+dataseries.get(0).length+", len(data)="+data.length);
double[] series = new double[data.length];
System.arraycopy(data, 0, series, 0, data.length);
dataseries.add(series);
graphColor.add(newGraphColor);
graphStyle.add(newGraphStyle);
dotStyle.add(newDotStyle);
if (autoYMinMax) {
for (int i=0; i<data.length; i++) {
if (Double.isNaN(data[i])) // missing value -- skip
continue;
if (Double.isNaN(ymin)) {
assert Double.isNaN(ymax);
ymin = data[i];
ymax = data[i];
continue;
}
if (data[i] < ymin) ymin = data[i];
else if (data[i] > ymax) ymax = data[i];
}
// If the x axis is painted in the middle (ymin << 0),
// we need much less paddingBottom:
if (ymin < 0) {
paddingBottom = paddingTop;
}
}
// And invalidate any previous graph image:
graphImage = null;
}
public double getZoomX()
{
double[] data = dataseries.get(0);
double zoom = ((double)getPreferredSize().width-paddingLeft-paddingRight) / data.length;
//System.err.println("Current Zoom: " + zoom + "(pref. size: " + getPreferredSize().width + "x" + getPreferredSize().height + ")");
return zoom;
}
/**
* Set the zoom of the X
* @param factor the zoom factor for X; 1 means that each data point corresponds to one pixel;
* 0.5 means that 2 data points are mapped onto one pixel; etc.
*/
public void setZoomX(double factor)
{
//System.err.println("New zoom factor requested: " + factor);
// Old visible rectangle:
Rectangle r = getVisibleRect();
int oldWidth = getPreferredSize().width;
double[] data = dataseries.get(0);
int newWidth = (int)(data.length*factor)+paddingLeft+paddingRight;
if (isVisible()) {
setVisible(false);
setPreferredSize(new Dimension(newWidth, getPreferredSize().height));
// Only scroll to center of what was previous visible if not at left end:
if (r.x != 0) {
Rectangle newVisibleRect = new Rectangle((r.x+r.width/2-paddingLeft)*newWidth/oldWidth-r.width/2+paddingLeft, r.y, r.width, r.height);
scrollRectToVisible(newVisibleRect);
}
setVisible(true);
} else {
setPreferredSize(new Dimension(newWidth, getPreferredSize().height));
createGraphImage();
}
//System.err.print("updated ");
getZoomX();
}
protected void createGraphImage()
{
graphImage = new BufferedImage(getWidth(), getHeight(), BufferedImage.TYPE_INT_RGB);
if (graphImage == null) {
throw new NullPointerException("Cannot create image for drawing graph");
}
Graphics2D g = (Graphics2D) graphImage.createGraphics();
double width = getWidth();
double height = getHeight();
int image_fromX = 0;
int image_toX = (int) width;
g.setBackground(backgroundColor);
g.clearRect(0, 0, (int) width, (int) height);
g.setFont(new Font("Courier", 0, 10));
// Now reduce the drawing area:
int startX = paddingLeft;
int startY = (int)height - paddingBottom;
width -= paddingLeft + paddingRight;
height -= paddingTop + paddingBottom;
// Make sure we are not trying to draw the function outside its area:
if (image_fromX < startX) image_fromX = startX;
if (image_toX > startX + width) image_toX = (int) (startX + width);
int image_y_origin;
if (getYRange() == 0) image_y_origin = startY;
else image_y_origin = startY - (int) ((-ymin/getYRange()) * height);
int image_x_origin = startX + (int) ((-x0/getXRange()) * width);
// Draw the function itself:
if (getYRange() > 0) {
for (int s=0; s<dataseries.size(); s++) {
drawData(g, image_fromX-startX, image_toX-startX, startX, image_y_origin, startY, (int) height,
dataseries.get(s), graphColor.get(s), graphStyle.get(s), dotStyle.get(s));
}
}
// Draw the x axis, if requested:
if (showXAxis) {
if (startY >= image_y_origin && image_y_origin >= startY-height) {
drawXAxis(g, width, startX, startY, image_y_origin);
} else { // draw x axis at the bottom, even if that is not 0:
drawXAxis(g, width, startX, startY, startY);
}
}
// Draw the y axis, if requested:
if (showYAxis) {
if (image_fromX <= image_x_origin && image_x_origin <= image_toX) {
drawYAxis(g, height, startX, startY, image_x_origin);
} else { // draw y axis at the left, even if that is not 0:
drawYAxis(g, height, startX, startY, startX);
}
}
}
/**
* While painting the graph, draw the actual function data.
* @param g the graphics2d object to paint in
* @param image_fromX first visible X coordinate of the Graph display area (= after subtracting space reserved for Y axis)
* @param image_toX last visible X coordinate of the Graph display area (= after subtracting space reserved for Y axis)
* @param image_refX X coordinate of the origin, in the display area
* @param image_refY Y coordinate of the origin, in the display area
* @param xScaleFactor conversion factor between data space and image space, image_x = xScaleFactor * data_x
* @param yScaleFactor conversion factor between data space and image space, image_y = yScaleFactor * data_y
* @param startY the start position on the Y axis (= the lower bound of the drawing area)
* @param image_height the height of the drawable region for the y values
*/
protected void drawData(Graphics2D g,
int image_fromX, int image_toX,
int image_refX, int image_refY,
int startY, int image_height,
double[] data, Color currentGraphColor, int currentGraphStyle, int currentDotStyle)
{
int index_fromX = imageX2indexX(image_fromX);
if (index_fromX < 0) index_fromX = 0;
int index_toX = imageX2indexX(image_toX);
if (index_toX < data.length) index_toX += 20;
if (index_toX > data.length) index_toX = data.length;
//System.err.println("Drawing values " + index_fromX + " to " + index_toX + " of " + y.length);
double xo = 0.0;
double yo = 0.0;
double xp = 0.0;
double yp = 0.0;
g.setColor(currentGraphColor);
for (int i = index_fromX; i < index_toX; i++) {
if (!Double.isNaN(data[i])) {
xp = indexX2imageX(i);
yp = y2imageY(data[i]);
//System.err.println("Point "+i+": ("+(image_refX+(int)xp)+","+(image_refY-(int)yp)+")");
if (currentGraphStyle == DRAW_LINE || currentGraphStyle == DRAW_LINEWITHDOTS) {
g.drawLine(image_refX+(int)xo, image_refY-(int)yo, image_refX+(int)xp, image_refY-(int)yp);
}
if (currentGraphStyle == DRAW_DOTS || currentGraphStyle == DRAW_LINEWITHDOTS) {
drawDot(g, image_refX+(int)xp, image_refY-(int)yp, currentDotStyle);
}
if (currentGraphStyle == DRAW_HISTOGRAM) {
int topY = image_refY;
if (yp>0) topY = image_refY-(int)yp;
int histHeight = (int) Math.abs(yp);
// cut to drawing area if x axis not at y==0:
if (topY+histHeight>startY) {
histHeight = startY-topY;
}
g.setColor(currentGraphColor);
g.fillRect(image_refX+(int)xp-histogramWidth/2, topY, histogramWidth, histHeight);
g.setColor(histogramBorderColor);
g.drawRect(image_refX+(int)xp-histogramWidth/2, topY, histogramWidth, histHeight);
}
xo = xp;
yo = yp;
}
}
}
protected void drawDot(Graphics2D g, int x, int y, int currentDotStyle)
{
switch(currentDotStyle) {
case DOT_FULLCIRCLE:
g.fillOval(x-dotSize/2, y-dotSize/2, dotSize, dotSize);
break;
case DOT_FULLSQUARE:
g.fillRect(x-dotSize/2, y-dotSize/2, dotSize, dotSize);
break;
case DOT_FULLDIAMOND:
g.fillPolygon(new int[]{x-dotSize/2,x, x+dotSize/2, x},
new int[]{y, y-dotSize/2, y, y+dotSize/2}, 4);
break;
case DOT_EMPTYCIRCLE:
g.drawOval(x-dotSize/2, y-dotSize/2, dotSize, dotSize);
break;
case DOT_EMPTYSQUARE:
g.drawRect(x-dotSize/2, y-dotSize/2, dotSize, dotSize);
break;
case DOT_EMPTYDIAMOND:
g.drawPolygon(new int[]{x-dotSize/2,x, x+dotSize/2, x},
new int[]{y, y-dotSize/2, y, y+dotSize/2}, 4);
break;
default:
break;
}
}
protected void drawYAxis(Graphics2D g, double height, int startX, int startY, int image_x_origin) {
g.setColor(axisColor);
double yRange = getYRange();
g.drawLine(image_x_origin, startY, image_x_origin, startY-(int)height);
// Do not try to draw units if yRange is 0:
if (yRange == 0) return;
// Units on the y axis:
// major units with labels every 50-100 pixels
int unitOrder = (int)Math.floor(Math.log(yRange/5)/Math.log(10));
double unitDistance = Math.pow(10, unitOrder);
double image_unitDistance = unitDistance/yRange * height;
if (image_unitDistance < 20) {
unitDistance *= 5;
} else if (image_unitDistance < 50) {
unitDistance *= 2;
}
double unitStart = ymin;
double modulo = ymin%unitDistance;
if (modulo != 0) {
if (modulo > 0)
unitStart += unitDistance - modulo;
else // < 0
unitStart += Math.abs(modulo);
}
PrintfFormat labelFormat;
if (unitOrder > 0) {
labelFormat = new PrintfFormat("%.0f");
} else {
labelFormat = new PrintfFormat("%." + (-unitOrder) + "f");
}
boolean intLabels = ((int)unitDistance == (int)Math.ceil(unitDistance));
//System.err.println("y axis: yRange=" + yRange + ", unitDistance=" + unitDistance + ", unitStart=" + unitStart + ", ymin=" + ymin + ", ymin%unitDistance=" + (ymin%unitDistance));
for (double i=unitStart; i<ymax; i+= unitDistance) {
double yunit = (i-ymin)/yRange * height;
g.drawLine(image_x_origin+5, startY-(int)yunit, image_x_origin-5, startY-(int)yunit);
// labels to the left of y axis:
g.drawString(labelFormat.sprintf(i), image_x_origin-30, startY-(int)yunit+5);
}
}
protected void drawXAxis(Graphics2D g, double width, int startX, int startY, int image_y_origin) {
g.setColor(axisColor);
double xRange = getXRange();
//System.err.println("Drawing X axis from " + startX + " to " + startX+(int)width + "; startY="+startY+", image_y_origin="+image_y_origin);
g.drawLine(startX, image_y_origin, startX+(int)width, image_y_origin);
// Units on the x axis:
// major units with labels every 50-100 pixels
int nUnits = (int) width / 50;
int unitOrder = (int) Math.floor(Math.log(xRange/nUnits)/Math.log(10));
double unitDistance = Math.pow(10, unitOrder);
double image_unitDistance = unitDistance/xRange * width;
if (image_unitDistance < 20) {
unitDistance *= 5;
} else if (image_unitDistance < 50) {
unitDistance *= 2;
}
double unitStart = x0;
double modulo = x0%unitDistance;
if (modulo != 0) {
if (modulo > 0)
unitStart += unitDistance - modulo;
else // < 0
unitStart += Math.abs(modulo);
}
PrintfFormat labelFormat;
if (unitOrder > 0) {
labelFormat = new PrintfFormat("%.0f");
} else {
labelFormat = new PrintfFormat("%." + (-unitOrder) + "f");
}
//System.err.println("x axis: xRange=" + xRange + ", unitDistance=" + unitDistance + ", unitStart=" + unitStart + ", x0=" + x0 + ", image_unitDistance=" + image_unitDistance);
for (double i=unitStart; i<x0+xRange; i+= unitDistance) {
double xunit = (i-x0)/xRange * width;
//System.err.println("Drawing unit at " + (startX+(int)xunit));
g.drawLine(startX+(int)xunit, image_y_origin+5, startX+(int)xunit, image_y_origin-5);
// labels below x axis:
g.drawString(labelFormat.sprintf(i), startX + (int)xunit-10, image_y_origin+20);
}
}
public void paintComponent(Graphics gr) {
if (graphImage == null
|| getWidth() != graphImage.getWidth()
|| getHeight() != graphImage.getHeight()) {
createGraphImage();
}
Graphics2D g = (Graphics2D) gr;
g.drawImage(graphImage, null, null);
}
protected int imageX2indexX(int imageX)
{
if (dataseries.isEmpty()) return 0;
double[] data = dataseries.get(0);
if (data == null) return 0;
double xScaleFactor = ((double) getWidth()-paddingLeft-paddingRight)/data.length;
return (int) (imageX / xScaleFactor);
}
protected double imageX2X(int imageX)
{
double[] data = dataseries.get(0);
double xScaleFactor = ((double)getWidth()-paddingLeft-paddingRight)/(data.length*xStep);
return x0 + imageX / xScaleFactor;
}
protected int indexX2imageX(int indexX)
{
if (dataseries.isEmpty()) return 0;
double[] data = dataseries.get(0);
if (data == null) return 0;
double xScaleFactor = ((double)getWidth()-paddingLeft-paddingRight)/data.length;
return (int) (indexX * xScaleFactor);
}
protected int X2imageX(double x)
{
double[] data = dataseries.get(0);
double xScaleFactor = ((double)getWidth()-paddingLeft-paddingRight)/(data.length*xStep);
return (int) ((x - x0) * xScaleFactor);
}
protected int X2indexX(double x)
{
return (int) ((x - x0) / xStep);
}
protected double imageY2Y(int imageY)
{
double yScaleFactor = ((double) getHeight()-paddingTop-paddingBottom)/getYRange();
return imageY / yScaleFactor;
}
protected int y2imageY(double y)
{
double yScaleFactor = ((double) getHeight()-paddingTop-paddingBottom)/getYRange();
return (int) (y * yScaleFactor);
}
protected double getYRange()
{
double yRange = ymax - ymin;
if (Double.isNaN(yRange)) yRange = 0;
return yRange;
}
protected double getXRange()
{
double[] data = dataseries.get(0);
double xRange = data.length * xStep;
return xRange;
}
public CursorDisplayer.CursorLine getPositionCursor()
{
if (Double.isNaN(positionCursor.x)) return null;
return new CursorDisplayer.CursorLine(this, paddingLeft+X2imageX(positionCursor.x),
paddingTop, getHeight()-paddingBottom);
}
public CursorDisplayer.CursorLine getRangeCursor()
{
if (Double.isNaN(rangeCursor.x)) return null;
int imageX = X2imageX(rangeCursor.x);
return new CursorDisplayer.CursorLine(this, paddingLeft+X2imageX(rangeCursor.x),
paddingTop, getHeight()-paddingBottom, Color.YELLOW);
}
public CursorDisplayer.Label getValueLabel()
{
if (Double.isNaN(positionCursor.x)) return null;
int imageX = X2imageX(positionCursor.x) + 10;
int imageY = paddingTop + 10;
return new CursorDisplayer.Label(this, getLabel(positionCursor.x, positionCursor.y),
imageX, imageY);
}
public void addCursorListener(CursorListener l)
{
cursorListeners.add(l);
}
public CursorListener[] getCursorListeners()
{
return (CursorListener[]) cursorListeners.toArray(new CursorListener[0]);
}
public boolean removeCursorListener(CursorListener l)
{
return cursorListeners.remove(l);
}
protected void notifyCursorListeners()
{
for (Iterator it = cursorListeners.iterator(); it.hasNext(); ) {
CursorListener l = (CursorListener) it.next();
l.updateCursorPosition(new CursorEvent(this));
}
}
/**
* Used when keeping several FunctionGraphs' cursor positions in synchrony.
* Register each other as cursor listeners before the glass pane; whichever gets
* clicked causes the others to be updated. Make sure to add any peers _before_
* any displaying cursor listeners, to make sure all are in line before being
* displayed.
*/
public void updateCursorPosition(CursorEvent e)
{
FunctionGraph source = e.getSource();
positionCursor.x = source.positionCursor.x;
rangeCursor.x = source.rangeCursor.x;
}
public JFrame showInJFrame(String title, boolean allowZoom, boolean exitOnClose)
{
return showInJFrame(title, DEFAULT_WIDTH, DEFAULT_HEIGHT + 50, allowZoom, true, exitOnClose);
}
public JFrame showInJFrame(String title, boolean allowZoom, boolean showControls, boolean exitOnClose)
{
return showInJFrame(title, DEFAULT_WIDTH, DEFAULT_HEIGHT + 50, allowZoom, showControls, exitOnClose);
}
public JFrame showInJFrame(String title, int width, int height, boolean allowZoom, boolean exitOnClose)
{
return showInJFrame(title, width, height, allowZoom, true, exitOnClose);
}
public JFrame showInJFrame(String title, int width, int height, boolean allowZoom, boolean showControls, boolean exitOnClose)
{
final JFrame main = new JFrame(title);
int mainWidth = width;
JScrollPane scroll = new JScrollPane(this);
scroll.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_AS_NEEDED);
//JLayeredPane layers = new JLayeredPane();
//layers.add(scroll, new Integer(1));
//scroll.setBounds(0, 0, this.getPreferredSize().width, this.getPreferredSize().height);
//glass.setBounds(0, 0, this.getPreferredSize().width, this.getPreferredSize().height);
//layers.add(glass, new Integer(50));
main.getContentPane().add(scroll, BorderLayout.CENTER);
final CursorDisplayer glass = new CursorDisplayer();
main.setGlassPane(glass);
glass.setVisible(true);
glass.addCursorSource(this);
this.addCursorListener(glass);
if (allowZoom) {
JPanel zoomPanel = new JPanel();
zoomPanel.setLayout(new BoxLayout(zoomPanel, BoxLayout.Y_AXIS));
main.getContentPane().add(zoomPanel, BorderLayout.WEST);
zoomPanel.add(Box.createVerticalGlue());
JButton zoomIn = new JButton("Zoom In");
zoomIn.setAlignmentX(CENTER_ALIGNMENT);
zoomIn.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
setZoomX(getZoomX()*2);
FunctionGraphCustom.this.requestFocus();
}
});
zoomPanel.add(zoomIn);
JButton zoomOut = new JButton("Zoom Out");
zoomOut.setAlignmentX(CENTER_ALIGNMENT);
zoomOut.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent evt) {
setZoomX(getZoomX()*0.5);
FunctionGraphCustom.this.requestFocus();
}
});
zoomPanel.add(zoomOut);
if (showControls) {
JPanel controls = getControls();
if (controls != null) {
zoomPanel.add(Box.createVerticalGlue());
controls.setAlignmentX(CENTER_ALIGNMENT);
zoomPanel.add(controls);
}
}
mainWidth += zoomPanel.getPreferredSize().width + 30;
zoomPanel.add(Box.createVerticalGlue());
}
main.setSize(mainWidth, height);
if (exitOnClose) {
main.addWindowListener(new java.awt.event.WindowAdapter() {
public void windowClosing(java.awt.event.WindowEvent evt) {
System.exit(0);
}
});
}
main.setVisible(true);
this.requestFocus();
return main;
}
/**
* Subclasses may provide specific controls here.
* @return a JPanel filled with the controls, or null if none are to be provided.
*/
protected JPanel getControls()
{
return null;
}
protected String getLabel(double x, double y)
{
// be about one order of magnitude less precise than there are pixels
int pixelPrecisionX = 2;
if (graphImage != null) {
pixelPrecisionX = (int) (Math.log(graphImage.getWidth()/getXRange())/Math.log(10));
}
int precisionX = -(int)(Math.log(getXRange())/Math.log(10)) + pixelPrecisionX;
if (precisionX < 0) precisionX = 0;
// ignore imageY
int precisionY = -(int)(Math.log(getYRange())/Math.log(10)) + 2;
if (precisionY < 0) precisionY = 0;
int indexX = X2indexX(x);
double[] data = dataseries.get(0);
return "f(" + new PrintfFormat("%."+precisionX+"f").sprintf(x)
+ ")=" + new PrintfFormat("%."+precisionY+"f").sprintf(data[indexX]);
}
public class DoublePoint
{
public DoublePoint()
{
this(Double.NaN, Double.NaN);
}
public DoublePoint(double x, double y)
{
this.x = x;
this.y = y;
}
double x;
double y;
}
}

View File

@ -1,717 +0,0 @@
package marytts.signalproc.display;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.ItemEvent;
import java.awt.event.ItemListener;
import java.awt.event.MouseEvent;
import java.awt.event.MouseListener;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Hashtable;
import java.util.List;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.swing.BoxLayout;
import javax.swing.JCheckBox;
import javax.swing.JComboBox;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JSlider;
import javax.swing.SwingUtilities;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import marytts.signalproc.analysis.CepstrumSpeechAnalyser;
import marytts.signalproc.analysis.FrameBasedAnalyser;
import marytts.signalproc.analysis.LpcAnalyser;
import marytts.signalproc.analysis.ShortTermLogSpectrumAnalyser;
import marytts.signalproc.filter.FIRFilter;
import marytts.signalproc.window.HammingWindow;
import marytts.signalproc.window.RectWindow;
import marytts.signalproc.window.Window;
import marytts.util.data.BufferedDoubleDataSource;
import marytts.util.math.ArrayUtils;
import marytts.util.math.FFT;
import marytts.util.math.MathUtils;
import marytts.util.string.PrintfFormat;
public class SpectrogramCustom extends FunctionGraphCustom
{
private static final long serialVersionUID = 1L;
public static final int DEFAULT_WINDOWSIZE = 65;
public static final int DEFAULT_WINDOW = Window.HAMMING;
public static final int DEFAULT_WINDOWSHIFT = 32;
public static final int DEFAULT_FFTSIZE = 256;
protected static final double PREEMPHASIS = 6.0; // dB per Octave
protected static final double DYNAMIC_RANGE = 40.0; // dB below global maximum to show
protected static final double FREQ_MAX = 8000.0; // Hz of upper limit frequency to show
protected double[] signal;
protected int samplingRate;
protected Window window;
protected int windowShift;
protected int fftSize;
protected GraphAtCursor[] graphsAtCursor = new GraphAtCursor[] {
new SpectrumAtCursor(),
new PhasogramAtCursor(),
new LPCAtCursor(),
new CepstrumAtCursor(),
};
public List<double[]> spectra;
protected double spectra_max = 0.;
protected double spectra_min = 0.;
protected double deltaF = 0.; // distance in Hz between two spectrum samples
public int spectra_indexmax = 0; // index in each spectrum corresponding to FREQ_MAX
public SpectrogramCustom(double[] signal, int samplingRate)
{
this(signal, samplingRate, DEFAULT_WIDTH, DEFAULT_HEIGHT);
}
public SpectrogramCustom(double[] signal, int samplingRate, int width, int height)
{
this(signal, samplingRate, Window.get(DEFAULT_WINDOW, DEFAULT_WINDOWSIZE), DEFAULT_WINDOWSHIFT, DEFAULT_FFTSIZE, width, height);
}
public SpectrogramCustom(double[] signal, int samplingRate, Window window, int windowShift, int fftSize, int width, int height)
{
initialise(signal, samplingRate, window, windowShift, fftSize, width, height);
}
public SpectrogramCustom(double[][] spectrum, int samplingRate, int windowShift)
{
spectra = new ArrayList<double[]>();
for (int i=0;i<spectrum.length;i++){
spectra.add(spectrum[i]);
}
this.samplingRate = samplingRate;
this.fftSize = spectrum[0].length;
// spectra_indexmax = fftSize/2; // == spectra[i].length
super.dataseries=spectra;
// super.updateData(0, (double)windowShift/samplingRate, new double[spectra.size()]);
// correct y axis boundaries, for graph:
ymin = 0.;
ymax = fftSize;
repaint();
initialiseDependentWindows();
}
protected void initialise(double[] aSignal, int aSamplingRate, Window aWindow, int aWindowShift, int aFftSize, int width, int height)
{
this.signal = aSignal;
this.samplingRate = aSamplingRate;
this.window = aWindow;
this.windowShift = aWindowShift;
this.fftSize = aFftSize;
super.initialise(width, height, 0, (double)aWindowShift/aSamplingRate, new double[10]);
update();
initialiseDependentWindows();
}
protected void update()
{
ShortTermLogSpectrumAnalyser spectrumAnalyser = new ShortTermLogSpectrumAnalyser
(new BufferedDoubleDataSource(signal), fftSize, window, windowShift, samplingRate);
spectra = new ArrayList<double[]>();
// Frequency resolution of the FFT:
deltaF = spectrumAnalyser.getFrequencyResolution();
long startTime = System.currentTimeMillis();
spectra_max = Double.NaN;
spectra_min = Double.NaN;
FrameBasedAnalyser.FrameAnalysisResult<double[]>[] results = spectrumAnalyser.analyseAllFrames();
for (int i=0; i<results.length; i++) {
double[] spectrum = (double[]) results[i].get();
spectra.add(spectrum);
// Still do the preemphasis inline:
for (int j=0; j<spectrum.length; j++) {
double freqPreemphasis = PREEMPHASIS / Math.log(2) * Math.log((j+1)*deltaF/1000.);
spectrum[j] += freqPreemphasis;
if (Double.isNaN(spectra_min) || spectrum[j] < spectra_min) {
spectra_min = spectrum[j];
}
if (Double.isNaN(spectra_max) || spectrum[j] > spectra_max) {
spectra_max = spectrum[j];
}
}
}
long endTime = System.currentTimeMillis();
System.err.println("Computed " + spectra.size() + " spectra in " + (endTime-startTime) + " ms.");
spectra_indexmax = (int) (FREQ_MAX / deltaF);
if (spectra_indexmax > fftSize/2)
spectra_indexmax = fftSize/2; // == spectra[i].length
super.updateData(0, (double)windowShift/samplingRate, new double[spectra.size()]);
// correct y axis boundaries, for graph:
ymin = 0.;
ymax = spectra_indexmax * deltaF;
repaint();
}
protected void initialiseDependentWindows()
{
addMouseListener(new MouseListener() {
public void mouseClicked(MouseEvent e) {
int imageX = e.getX()-paddingLeft;
double x = imageX2X(imageX);
for (int i=0; i<graphsAtCursor.length; i++) {
if (graphsAtCursor[i].show) {
graphsAtCursor[i].update(x);
}
}
}
public void mousePressed(MouseEvent e) {}
public void mouseReleased(MouseEvent e) {}
public void mouseEntered(MouseEvent e) {}
public void mouseExited(MouseEvent e) {}
});
}
/**
* While painting the graph, draw the actual function data.
* @param g the graphics2d object to paint in
* @param image_fromX first visible X coordinate of the Graph display area (= after subtracting space reserved for Y axis)
* @param image_toX last visible X coordinate of the Graph display area (= after subtracting space reserved for Y axis)
* @param image_refX X coordinate of the origin, in the display area
* @param image_refY Y coordinate of the origin, in the display area
* @param xScaleFactor conversion factor between data space and image space, image_x = xScaleFactor * data_x
* @param yScaleFactor conversion factor between data space and image space, image_y = yScaleFactor * data_y
* @param startY the start position on the Y axis (= the lower bound of the drawing area)
* @param image_height the height of the drawable region for the y values
*/
@Override
protected void drawData(Graphics2D g,
int image_fromX, int image_toX,
int image_refX, int image_refY,
int startY, int image_height,
double[] data, Color currentGraphColor, int currentGraphStyle, int currentDotStyle)
{
int index_fromX = imageX2indexX(image_fromX);
int index_toX = imageX2indexX(image_toX);
//System.err.println("Drawing spectra from image " + image_fromX + " to " + image_toX);
for (int i=index_fromX; i<index_toX; i++) {
//System.err.println("Drawing spectrum " + i);
int spectrumWidth = indexX2imageX(1);
if (spectrumWidth == 0) spectrumWidth = 1;
drawSpectrum(g, (double[])spectra.get(i), image_refX + indexX2imageX(i), spectrumWidth, image_refY, image_height);
}
}
protected void drawSpectrum(Graphics2D g, double[] spectrum, int image_X, int image_width, int image_refY, int image_height)
{
double yScaleFactor = (double) image_height / spectra_indexmax;
if (image_width < 2) image_width = 2;
int rect_height = (int) Math.ceil(yScaleFactor);
if (rect_height < 2) rect_height = 2;
for (int i=0; i<spectra_indexmax; i++) {
int color;
if (spectrum.length<=i || Double.isNaN(spectrum[i]) || spectrum[i] < spectra_max - DYNAMIC_RANGE) {
color = 255; // white
} else {
color = (int) (255 * (spectra_max-spectrum[i])/DYNAMIC_RANGE);
}
g.setColor(new Color(color, color, color));
g.fillRect(image_X, image_refY-(int)(i*yScaleFactor), image_width, rect_height);
}
}
public double[] getSpectrumAtTime(double t)
{
int index = (int) ((t-x0)/xStep);
if (index < 0 || index >= spectra.size()) {
return null;
}
return (double[]) spectra.get(index);
}
protected String getLabel(double x, double y)
{
int precisionX = -(int)(Math.log(getXRange())/Math.log(10)) + 2;
if (precisionX < 0) precisionX = 0;
int indexX = X2indexX(x);
double[] spectrum = (double[])spectra.get(indexX);
int precisionY = -(int)(Math.log(getYRange())/Math.log(10)) + 2;
if (precisionY < 0) precisionY = 0;
double E = spectrum[Y2indexY(y)];
int precisionE = 1;
return "E(" + new PrintfFormat("%."+precisionX+"f").sprintf(x)
+ "," + new PrintfFormat("%."+precisionY+"f").sprintf(y)
+ ")=" + new PrintfFormat("%."+precisionE+"f").sprintf(E);
}
protected int imageY2indexY(int imageY)
{
double y = imageY2Y(imageY);
return Y2indexY(y);
}
protected int Y2indexY(double y)
{
assert ymin == 0; // or we would have to write (ymax-ymin) or so below
return (int) (spectra_indexmax * y / ymax);
}
protected JPanel getControls()
{
/*
JPanel controls = new JPanel();
controls.setLayout(new BoxLayout(controls, BoxLayout.Y_AXIS));
// Controls for graphs at cursor:
for (int i=0; i<graphsAtCursor.length; i++) {
controls.add(graphsAtCursor[i].getControls());
}
return controls;
*/
return null;
}
protected JPanel getControls1()
{
JPanel controls = new JPanel();
controls.setLayout(new BoxLayout(controls, BoxLayout.Y_AXIS));
// FFT size slider:
JLabel fftLabel = new JLabel("FFT size:");
fftLabel.setAlignmentX(CENTER_ALIGNMENT);
controls.add(fftLabel);
int min = 5;
int max = 13;
int deflt = (int) (Math.log(this.fftSize) / Math.log(2));
JSlider fftSizeSlider = new JSlider(JSlider.VERTICAL, min, max, deflt);
fftSizeSlider.setAlignmentX(CENTER_ALIGNMENT);
fftSizeSlider.setMajorTickSpacing(1);
fftSizeSlider.setPaintTicks(true);
fftSizeSlider.setSnapToTicks(true);
Hashtable<Integer, JLabel> labelTable = new Hashtable<Integer, JLabel>();
for (int i=min; i<=max; i++) {
int twoPowI = 1<<i; // 2^i, e.g. i==8 => twoPowI==256
labelTable.put(new Integer(i), new JLabel(String.valueOf(twoPowI)));
}
fftSizeSlider.setLabelTable(labelTable);
fftSizeSlider.setPaintLabels(true);
fftSizeSlider.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent ce)
{
JSlider source = (JSlider)ce.getSource();
if (!source.getValueIsAdjusting()) {
int logfftSize = (int)source.getValue();
int newFftSize = 1<<logfftSize;
if (newFftSize != SpectrogramCustom.this.fftSize) {
SpectrogramCustom.this.fftSize = newFftSize;
SpectrogramCustom.this.window = Window.get(SpectrogramCustom.this.window.type(), newFftSize/4+1);
SpectrogramCustom.this.update();
}
}
}
});
controls.add(fftSizeSlider);
// Window type:
JLabel windowTypeLabel = new JLabel("Window type:");
windowTypeLabel.setAlignmentX(CENTER_ALIGNMENT);
controls.add(windowTypeLabel);
int[] windowTypes = Window.getAvailableTypes();
Window[] windows = new Window[windowTypes.length];
int selected = 0;
for (int i=0; i<windowTypes.length; i++) {
windows[i] = Window.get(windowTypes[i], 1);
if (windowTypes[i] == this.window.type()) selected = i;
}
JComboBox windowList = new JComboBox(windows);
windowList.setAlignmentX(CENTER_ALIGNMENT);
windowList.setSelectedIndex(selected);
windowList.setMaximumSize(windowList.getPreferredSize());
windowList.addActionListener(new ActionListener() {
public void actionPerformed(ActionEvent e) {
JComboBox cb = (JComboBox)e.getSource();
int newWindowType = ((Window)cb.getSelectedItem()).type();
if (newWindowType != SpectrogramCustom.this.window.type()) {
SpectrogramCustom.this.window = Window.get(newWindowType, SpectrogramCustom.this.window.getLength());
// Spectrogram2.this.update();
}
}
});
controls.add(windowList);
// Controls for graphs at cursor:
for (int i=0; i<graphsAtCursor.length; i++) {
controls.add(graphsAtCursor[i].getControls());
}
return controls;
}
public static void main(String[] args) throws Exception
{
for (int i=0; i<args.length; i++) {
AudioInputStream ais = AudioSystem.getAudioInputStream(new File(args[i]));
// Spectrogram2 signalSpectrum = new Spectrogram2(ais);
// signalSpectrum.showInJFrame(args[i], true, true);
}
}
/**
* Determine the next free location for a dependent and put the window there.
* @param jf
*/
protected void setDependentWindowLocation(JFrame jf)
{
if (nextDependentWindowX == 0 && nextDependentWindowY == 0) {
// first dependent window:
nextDependentWindowX = getTopLevelAncestor().getWidth();
}
jf.setLocationRelativeTo(this);
jf.setLocation(nextDependentWindowX, nextDependentWindowY);
nextDependentWindowY += jf.getHeight();
}
private static int nextDependentWindowX;
private static int nextDependentWindowY;
public abstract class GraphAtCursor
{
private JPanel controls;
protected FunctionGraph graph;
protected boolean show = false;
public abstract void update(double x);
public JPanel getControls()
{
if (controls == null) {
controls = createControls();
}
return controls;
}
protected abstract JPanel createControls();
protected void updateGraph(FunctionGraph someGraph, String title) {
if (someGraph.getParent() == null) {
JFrame jf = someGraph.showInJFrame(title, 400, 250, false, false);
setDependentWindowLocation(jf);
} else {
JFrame jf = (JFrame) SwingUtilities.getWindowAncestor(someGraph);
jf.setTitle(title);
jf.setVisible(true); // just to be sure
someGraph.repaint();
}
}
}
public class SpectrumAtCursor extends GraphAtCursor
{
public void update(double x)
{
if (Double.isNaN(x)) return;
int centerIndex = (int) (x * samplingRate);
assert centerIndex >= 0 && centerIndex < signal.length;
int windowLength = 1024;
int leftIndex = centerIndex - windowLength/2;
if (leftIndex < 0) leftIndex = 0;
double[] signalExcerpt = new HammingWindow(windowLength).apply(signal, leftIndex);
double[] spectrum = FFT.computeLogPowerSpectrum(signalExcerpt);
if (graph == null) {
graph = new FunctionGraph(300, 200, 0, samplingRate/windowLength, spectrum);
} else {
graph.updateData(0, samplingRate/windowLength, spectrum);
}
super.updateGraph(graph, "Spectrum at "+new PrintfFormat("%.3f").sprintf(x)+" s");
}
protected JPanel createControls()
{
JPanel controls = new JPanel();
JCheckBox checkSpectrum = new JCheckBox("Show spectrum");
checkSpectrum.setAlignmentX(CENTER_ALIGNMENT);
checkSpectrum.setSelected(show);
checkSpectrum.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
if (e.getStateChange() == ItemEvent.DESELECTED) {
show = false;
if (graph != null)
graph.getTopLevelAncestor().setVisible(false);
} else if (e.getStateChange() == ItemEvent.SELECTED) {
show = true;
update(positionCursor.x);
if (graph != null) {
graph.getTopLevelAncestor().setVisible(true);
}
}
}
});
controls.add(checkSpectrum);
return controls;
}
}
public class PhasogramAtCursor extends GraphAtCursor
{
public void update(double x)
{
if (Double.isNaN(x)) return;
int centerIndex = (int) (x * samplingRate);
assert centerIndex >= 0 && centerIndex < signal.length;
// Want to show a phasogram of 10 ms centered around cursor position:
int halfWindowLength = samplingRate / 200;
double[] signalExcerpt;
if (graph == null) {
signalExcerpt = new double[2*halfWindowLength+Phasogram.DEFAULT_FFTSIZE];
} else {
assert graph instanceof Phasogram;
signalExcerpt = ((Phasogram)graph).signal;
}
int leftIndex = centerIndex - halfWindowLength;
if (leftIndex < 0) leftIndex = 0;
int len = signalExcerpt.length;
if (leftIndex + len >= signal.length)
len = signal.length - leftIndex;
System.arraycopy(signal, leftIndex, signalExcerpt, 0, len);
//System.err.println("Copied excerpt from signal pos " + leftIndex + ", len " + len);
if (len < signalExcerpt.length) {
Arrays.fill(signalExcerpt, len, signalExcerpt.length, 0);
}
if (graph == null) {
graph = new Phasogram(signalExcerpt, samplingRate, 300, 200);
} else {
((Phasogram)graph).update();
}
super.updateGraph(graph, "Phasogram at "+new PrintfFormat("%.3f").sprintf(x)+" s");
}
protected JPanel createControls()
{
JPanel controls = new JPanel();
JCheckBox checkPhasogram = new JCheckBox("Show phasogram");
checkPhasogram.setAlignmentX(CENTER_ALIGNMENT);
checkPhasogram.setSelected(show);
checkPhasogram.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
if (e.getStateChange() == ItemEvent.DESELECTED) {
show = false;
if (graph != null)
graph.getTopLevelAncestor().setVisible(false);
} else if (e.getStateChange() == ItemEvent.SELECTED) {
show= true;
update(positionCursor.x);
if (graph != null)
graph.getTopLevelAncestor().setVisible(true);
}
}
});
controls.add(checkPhasogram);
return controls;
}
}
public class LPCAtCursor extends GraphAtCursor
{
protected int lpcOrder = 50;
protected SignalGraph lpcResidueAtCursor = null;
public void update(double x)
{
if (Double.isNaN(x)) return;
int centerIndex = (int) (x * samplingRate);
assert centerIndex >= 0 && centerIndex < signal.length;
int windowLength = 1024;
int leftIndex = centerIndex - windowLength/2;
if (leftIndex < 0) leftIndex = 0;
double[] signalExcerpt = new HammingWindow(windowLength).apply(signal, leftIndex);
LpcAnalyser.LpCoeffs lpc = LpcAnalyser.calcLPC(signalExcerpt, lpcOrder);
double[] coeffs = lpc.getOneMinusA();
double g_db = 2*MathUtils.db(lpc.getGain()); // *2 because g is signal, not energy
double[] fftCoeffs = new double[windowLength];
System.arraycopy(coeffs, 0, fftCoeffs, 0, coeffs.length);
double[] lpcSpectrum = FFT.computeLogPowerSpectrum(fftCoeffs);
for (int i=0; i<lpcSpectrum.length; i++) {
lpcSpectrum[i] = -lpcSpectrum[i] + g_db;
}
if (graph == null) {
graph = new FunctionGraph(300, 200, 0, samplingRate/windowLength, lpcSpectrum);
} else {
graph.updateData(0, samplingRate/windowLength, lpcSpectrum);
}
updateGraph(graph, "LPC spectrum (order "+lpcOrder+") at "+new PrintfFormat("%.3f").sprintf(x)+" s");
// And the residue:
FIRFilter whiteningFilter = new FIRFilter(coeffs);
double[] signalExcerpt2 = new RectWindow(lpcOrder+windowLength).apply(signal, leftIndex-lpcOrder);
double[] residue = whiteningFilter.apply(signalExcerpt2);
double[] usableSignal = ArrayUtils.subarray(signalExcerpt2, lpcOrder, windowLength);
double[] usableResidue = ArrayUtils.subarray(residue, lpcOrder, windowLength);
double predictionGain = MathUtils.db(MathUtils.sum(MathUtils.multiply(usableSignal, usableSignal))
/ MathUtils.sum(MathUtils.multiply(usableResidue, usableResidue)));
System.err.println("LPC prediction gain: " + predictionGain + " dB");
if (lpcResidueAtCursor == null) {
lpcResidueAtCursor = new SignalGraph(usableResidue, samplingRate, 300, 200);
} else {
lpcResidueAtCursor.update(usableResidue, samplingRate);
}
super.updateGraph(lpcResidueAtCursor, "LPC residue at "+new PrintfFormat("%.3f").sprintf(x)+" s");
}
protected JPanel createControls()
{
JPanel controls = new JPanel();
controls.setLayout(new BoxLayout(controls, BoxLayout.Y_AXIS));
JCheckBox checkLPC = new JCheckBox("Show LPC");
checkLPC.setAlignmentX(CENTER_ALIGNMENT);
checkLPC.setSelected(show);
checkLPC.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
if (e.getStateChange() == ItemEvent.DESELECTED) {
show = false;
if (graph != null)
graph.getTopLevelAncestor().setVisible(false);
if (lpcResidueAtCursor != null)
lpcResidueAtCursor.getTopLevelAncestor().setVisible(false);
} else if (e.getStateChange() == ItemEvent.SELECTED) {
show = true;
update(positionCursor.x);
if (graph != null)
graph.getTopLevelAncestor().setVisible(true);
if (lpcResidueAtCursor != null)
lpcResidueAtCursor.getTopLevelAncestor().setVisible(true);
}
}
});
controls.add(checkLPC);
// LPC order slider:
JLabel lpcLabel = new JLabel("LPC order:");
lpcLabel.setAlignmentX(CENTER_ALIGNMENT);
controls.add(lpcLabel);
int min = 1;
int max = 100;
JSlider lpcSlider = new JSlider(JSlider.HORIZONTAL, min, max, lpcOrder);
lpcSlider.setAlignmentX(CENTER_ALIGNMENT);
lpcSlider.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent ce)
{
JSlider source = (JSlider)ce.getSource();
if (!source.getValueIsAdjusting()) {
lpcOrder = (int)source.getValue();
System.err.println("Adjusted lpc order to " + lpcOrder);
if (show) update(positionCursor.x);
}
}
});
controls.add(lpcSlider);
return controls;
}
}
public class CepstrumAtCursor extends GraphAtCursor
{
protected int cepstrumCutoff = 50;
protected FunctionGraph cepstrumSpectrumAtCursor = null;
public void update(double x)
{
if (Double.isNaN(x)) return;
int centerIndex = (int) (x * samplingRate);
assert centerIndex >= 0 && centerIndex < signal.length;
int windowLength = 1024;
int leftIndex = centerIndex - windowLength/2;
if (leftIndex < 0) leftIndex = 0;
// Create a zero-padded version of the signal excerpt:
double[] signalExcerpt = new double[2*windowLength];
new HammingWindow(windowLength).apply(signal, leftIndex, signalExcerpt, 0);
double[] realCepstrum = CepstrumSpeechAnalyser.realCepstrum(signalExcerpt);
if (graph == null) {
graph = new FunctionGraph(300, 200, 0, samplingRate, realCepstrum);
} else {
graph.updateData(0, samplingRate, realCepstrum);
}
super.updateGraph(graph, "Cepstrum at "+new PrintfFormat("%.3f").sprintf(x)+" s");
// And the spectral envelope computed from a low-pass cut-off version of the cepstrum:
double[] lowCepstrum = CepstrumSpeechAnalyser.filterLowPass(realCepstrum, cepstrumCutoff);
double[] real = lowCepstrum;
double[] imag = new double[real.length];
FFT.transform(real, imag, false);
double[] cepstrumSpectrum = ArrayUtils.subarray(real, 0, real.length/2);
if (cepstrumSpectrumAtCursor == null) {
cepstrumSpectrumAtCursor = new FunctionGraph(300, 200, 0, samplingRate/real.length, cepstrumSpectrum);
} else {
cepstrumSpectrumAtCursor.updateData(0, samplingRate/real.length, cepstrumSpectrum);
}
super.updateGraph(cepstrumSpectrumAtCursor, "Cepstrum spectrum (cutoff "+cepstrumCutoff+") at "+new PrintfFormat("%.3f").sprintf(x)+" s");
}
protected JPanel createControls()
{
JPanel controls = new JPanel();
controls.setLayout(new BoxLayout(controls, BoxLayout.Y_AXIS));
JCheckBox checkCepstrum = new JCheckBox("Show Cepstrum");
checkCepstrum.setAlignmentX(CENTER_ALIGNMENT);
checkCepstrum.setSelected(show);
checkCepstrum.addItemListener(new ItemListener() {
public void itemStateChanged(ItemEvent e) {
if (e.getStateChange() == ItemEvent.DESELECTED) {
show = false;
if (graph != null)
graph.getTopLevelAncestor().setVisible(false);
if (cepstrumSpectrumAtCursor != null)
cepstrumSpectrumAtCursor.getTopLevelAncestor().setVisible(false);
} else if (e.getStateChange() == ItemEvent.SELECTED) {
show = true;
update(positionCursor.x);
if (graph != null)
graph.getTopLevelAncestor().setVisible(true);
if (cepstrumSpectrumAtCursor != null)
cepstrumSpectrumAtCursor.getTopLevelAncestor().setVisible(true);
}
}
});
controls.add(checkCepstrum);
// Cepstrum cutoff slider:
JLabel cepstrumLabel = new JLabel("Cepstrum cutoff:");
cepstrumLabel.setAlignmentX(CENTER_ALIGNMENT);
controls.add(cepstrumLabel);
int min = 1;
int max = 256;
JSlider cepstrumSlider = new JSlider(JSlider.HORIZONTAL, min, max, cepstrumCutoff);
cepstrumSlider.setAlignmentX(CENTER_ALIGNMENT);
cepstrumSlider.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent ce)
{
JSlider source = (JSlider)ce.getSource();
if (!source.getValueIsAdjusting()) {
cepstrumCutoff = (int)source.getValue();
System.err.println("Adjusted cepstrum cutoff to " + cepstrumCutoff);
if (show) update(positionCursor.x);
}
}
});
controls.add(cepstrumSlider);
return controls;
}
}
}

View File

@ -1,38 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals;
public class Delta {
public static void calcDelta(double A[][], int numCoeff) throws Exception {
int delta;
for (int j = 0; j < numCoeff; j++) {
delta = j + numCoeff;
completeDelta(A, j, delta);
}
}
public static void calcDoubleDelta(double A[][], int numCoeff) throws Exception {
int fine = numCoeff * 2;
for (int delta = numCoeff; delta < fine; delta++) {
int doppioDelta = delta + numCoeff;
completeDelta(A, delta, doppioDelta);
}
}
private static void completeDelta(double A[][], int j, int d) throws Exception {
if (A.length < 4) {
throw new Exception();
}
if (A.length > 2) {
A[0][d] = A[1][j] - A[0][j];
A[1][d] = (A[2][j] - A[0][j]) - (A[0][j] - A[1][j]) / 4;
}
for (int i = 2; i < A.length - 2; i++)
A[i][d] = ((2 * (A[i + 2][j] - A[i - 2][j])) + (A[i + 1][j] - A[i - 1][j])) / 8;
if (A.length > 3) {
A[A.length - 2][d] = (A[A.length - 1][j] - A[A.length - 3][j]) - (A[A.length - 1][j] - A[A.length - 2][j]);
A[A.length - 1][d] = A[A.length - 2][j] - A[A.length - 1][j];
}
}
}

View File

@ -1,408 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class PeriodicityDetector {
private static Logger logger = LoggerFactory.getLogger(PeriodicityDetector.class);
/*
* static int defaultSamplingRate = 1000;// Hz static float defaultSignalLengthTimeinSec = 5;// s static float defaultHiddenFrequency = 100f;// Hz static float defaultMinPossibleFreq = 0; // Hz static float defaultMaxPossibleFreq = 200; // Hz static float defaultSNratio = 2; static float defaultFreqError = 1f;
*/
static int defaultSamplingRate = 8000;// Hz
static float defaultSignalLengthTimeinSec = 5;// s
static float defaultHiddenFrequency = 2f;// Hz
static float defaultMinPossibleFreq = 0; // Hz
static float defaultMaxPossibleFreq = 1000; // Hz
static float defaultSNratio = 0;
static float defaultFreqError = 1f;
public int currentSamplingRate;
public int currentWindowShiftSamples;
public int currentWindowAnalysisSamples;
public double[][] currentspectrum;
public double meanF = 0;
public double lowermeanF = 0;
public double uppermeanF = 0;
public double meanPeriod = 0;
public double lowermeanPeriod = 0;
public double uppermeanPeriod = 0;
public double startPeriodTime = 0;
public double endPeriodTime = 0;
public double startPeriodSampleIndex = 0;
public double endPeriodSampleIndex = 0;
public double periodicityStrength = 0;
public double minFrequency;
public double maxFrequency;
public String getPeriodicityStregthInterpretation() {
if (periodicityStrength > 0.6)
return "High";
if (periodicityStrength <= 0.6 && periodicityStrength > 0.5)
return "Moderate";
if (periodicityStrength <= 0.5 && periodicityStrength > 0.3)
return "Weak";
if (periodicityStrength >= 0.3)
return "Very Low";
else
return "None";
}
public String getPowerSpectrumStregthInterpretation(double powerStrength) {
if (powerStrength > 3)
return "High";
if (powerStrength <= 3 && powerStrength > 2.5)
return "Moderate";
if (powerStrength <= 2.5 && powerStrength > 2)
return "Weak";
if (powerStrength >= 1.4)
return "Very Low";
else
return "None";
}
public void demo() throws Exception {
double[] signal = produceNoisySignal(defaultSignalLengthTimeinSec, defaultSamplingRate, defaultHiddenFrequency, defaultSNratio);
logger.debug("Signal samples: " + signal.length);
double F = detectFrequency(signal, defaultSamplingRate, defaultMinPossibleFreq, defaultMaxPossibleFreq, defaultFreqError, -1, true);
logger.debug("Detected F:" + F + " indecision [" + lowermeanF + " , " + uppermeanF + "]");
}
public static void main(String[] args) throws Exception {
PeriodicityDetector processor = new PeriodicityDetector();
processor.demo();
}
public double[] produceNoisySignal(float signalLengthTimeinSec, int samplingRate, float frequency, float SNratio) {
// generate a signal with the above period
double[] sin = SignalConverter.generateSinSignal((int) signalLengthTimeinSec * samplingRate, 1f / samplingRate, frequency);
// add noise
for (int i = 0; i < sin.length; i++) {
sin[i] = sin[i] + SNratio * Math.random();
}
return sin;
}
public double detectFrequency(double[] signal, boolean display) throws Exception {
return detectFrequency(signal, 1, 0, 1, 1f, -1, display);
}
public double detectFrequency(double[] signal) throws Exception {
return detectFrequency(signal, false);
}
public double detectFrequency(double[] signal, int samplingRate, float minPossibleFreq, float maxPossibleFreq, float wantedFreqError, int FFTnsamples, boolean display) throws Exception {
// estimate the best samples based on the error we want
int wLength = 0;
long pow = 0;
if (wantedFreqError > -1) {
pow = Math.round(Math.log((float) samplingRate / wantedFreqError) / Math.log(2));
if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
logger.debug("Suggested pow for window length=" + pow);
}
// adjust FFT Samples to be even
else {
if (FFTnsamples < 2)
FFTnsamples = 2;
else if (FFTnsamples > signal.length)
FFTnsamples = signal.length;
pow = Math.round(Math.log((float) FFTnsamples) / Math.log(2));
}
wLength = (int) Math.pow(2, pow);
logger.debug("Suggested windows length (samples)=" + wLength);
logger.debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s");
int windowAnalysisSamples = (int) Math.pow(2, 14);// (int)
windowAnalysisSamples = wLength;
int windowShiftSamples = (int) Math.round((float) windowAnalysisSamples / 2f);
float windowShiftTime = (float) SignalConverter.sample2Time(windowShiftSamples, samplingRate);
float error = ((float) samplingRate / (float) windowAnalysisSamples);
logger.debug("Error in the Measure will be: " + error + " Hz");
logger.debug("A priori Min Freq: " + minPossibleFreq + " s");
logger.debug("A priori Max Freq: " + maxPossibleFreq + " s");
if (maxPossibleFreq >= samplingRate)
maxPossibleFreq = (float) (samplingRate / 2f) - (0.1f * samplingRate / 2f);
if (minPossibleFreq == 0)
minPossibleFreq = 0.1f;
minFrequency = minPossibleFreq;
maxFrequency = maxPossibleFreq;
// display the signal
// if (display)
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
this.currentSamplingRate = samplingRate;
this.currentWindowShiftSamples = windowShiftSamples;
this.currentWindowAnalysisSamples = windowAnalysisSamples;
// trace spectrum
double[][] spectrum = SignalConverter.spectrogram("spectrogram", signal, samplingRate, windowShiftSamples, windowAnalysisSamples, false);
if (display)
SignalConverter.displaySpectrogram(spectrum, signal, "complete spectrogram", samplingRate, windowShiftSamples, windowAnalysisSamples);
// apply the bandpass filter
spectrum = SignalConverter.cutSpectrum(spectrum, minPossibleFreq, maxPossibleFreq, windowAnalysisSamples, samplingRate);
if (display)
// display cut spectrum
SignalConverter.displaySpectrogram(spectrum, signal, "clean spectrogram", samplingRate, windowShiftSamples, windowAnalysisSamples);
// extract the maximum frequencies in each frame
SignalConverter signalMaximumAnalyzer = new SignalConverter();
double[] maxfrequencies = signalMaximumAnalyzer.takeMaxFrequenciesInSpectrogram(spectrum, samplingRate, windowAnalysisSamples, minPossibleFreq);
double[] powers = signalMaximumAnalyzer.averagepower;
currentspectrum = spectrum;
// display the maximum freqs
logger.debug("Number of frequency peaks " + maxfrequencies.length);
// take the longest stable sequence of frequencies
SignalConverter signalconverter = new SignalConverter();
maxfrequencies = signalconverter.takeLongestStableTract(maxfrequencies, 0.01);
if (maxfrequencies == null)
return 0;
this.startPeriodTime = SignalConverter.spectrogramTimeFromIndex(signalconverter.startStableTractIdx, windowShiftTime);
this.endPeriodTime = SignalConverter.spectrogramTimeFromIndex(signalconverter.endStableTractIdx, windowShiftTime);
this.startPeriodSampleIndex = SignalConverter.time2Sample(startPeriodTime, samplingRate);
this.endPeriodSampleIndex = Math.min(SignalConverter.time2Sample(endPeriodTime, samplingRate), signal.length - 1);
float power = 0;
int counter = 0;
// calculate the average spectrum relative amplitude in the most stable periodic tract
for (int i = signalconverter.startStableTractIdx; i < signalconverter.endStableTractIdx; i++) {
power = MathFunctions.incrementPerc(power, (float) powers[i], counter);
counter++;
}
this.periodicityStrength = power;
if (this.periodicityStrength == -0.0)
this.periodicityStrength = 0;
// reconstruct the F
double meanF = MathFunctions.mean(maxfrequencies);
// we consider a complete cycle
double possibleperiod = 2d / meanF;
logger.debug("TimeSeriesAnalysis->Frequency " + meanF);
logger.debug("TimeSeriesAnalysis->Periodicity " + possibleperiod);
double maxperiod = Math.min(signal.length, currentWindowAnalysisSamples);
if ((meanF <= minPossibleFreq) || (meanF >= maxPossibleFreq) || (possibleperiod == 0) || (possibleperiod > (maxperiod))) {
logger.debug("TimeSeriesAnalysis->Invalid periodicity " + (meanF <= minPossibleFreq) + " , " + (meanF >= maxPossibleFreq) + " , " + (possibleperiod == 0) + " , " + (possibleperiod > (maxperiod)));
meanF = 0;
this.meanF = 0;
this.lowermeanF = 0;
this.uppermeanF = 0;
this.meanPeriod = 0;
this.lowermeanPeriod = 0;
this.uppermeanPeriod = 0;
this.periodicityStrength = 0;
this.startPeriodTime = 0;
this.endPeriodTime = 0;
this.startPeriodSampleIndex = 0;
this.endPeriodSampleIndex = 0;
} else {
logger.debug("TimeSeriesAnalysis->periodicity is valid " + possibleperiod);
this.meanF = meanF;
this.lowermeanF = Math.max(meanF - error, minPossibleFreq);
this.uppermeanF = Math.min(meanF + error, maxFrequency);
this.meanPeriod = possibleperiod;
this.lowermeanPeriod = 2d / lowermeanF;
this.uppermeanPeriod = 2d / uppermeanF;
}
return meanF;
}
public void adjustParameters(double[] signal, int samplingRate, float minPossibleFreq, float maxPossibleFreq, float wantedFreqError, int FFTnsamples) {
// estimate the best samples based on the error we want
int wLength = 0;
long pow = 0;
if (wantedFreqError > -1) {
pow = Math.round(Math.log((float) samplingRate / wantedFreqError) / Math.log(2));
if (pow <= 1)
pow = Math.round(Math.log((float) signal.length / (float) ("" + signal.length).length()) / Math.log(2));
logger.debug("Suggested pow for window length=" + pow);
}
// adjust FFT Samples to be even
else {
if (FFTnsamples < 2)
FFTnsamples = 2;
else if (FFTnsamples > signal.length)
FFTnsamples = signal.length;
pow = Math.round(Math.log((float) FFTnsamples) / Math.log(2));
}
wLength = (int) Math.pow(2, pow);
logger.debug("Suggested windows length (samples)=" + wLength);
logger.debug("Suggested windows length (s)=" + ((float) wLength / (float) samplingRate) + " s");
int windowAnalysisSamples = (int) Math.pow(2, 14);// (int)
windowAnalysisSamples = wLength;
int windowShiftSamples = (int) Math.round((float) windowAnalysisSamples / 2f);
float windowShiftTime = (float) SignalConverter.sample2Time(windowShiftSamples, samplingRate);
float error = ((float) samplingRate / (float) windowAnalysisSamples);
logger.debug("Error in the Measure will be: " + error + " Hz");
logger.debug("A priori Min Freq: " + minPossibleFreq + " s");
logger.debug("A priori Max Freq: " + maxPossibleFreq + " s");
if (maxPossibleFreq >= samplingRate)
maxPossibleFreq = (float) (samplingRate / 2f) - (0.1f * samplingRate / 2f);
if (minPossibleFreq == 0)
minPossibleFreq = 0.1f;
minFrequency = minPossibleFreq;
maxFrequency = maxPossibleFreq;
// display the signal
// if (display)
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
this.currentSamplingRate = samplingRate;
this.currentWindowShiftSamples = windowShiftSamples;
this.currentWindowAnalysisSamples = windowAnalysisSamples;
}
public LinkedHashMap<String, String> detectAllFrequencies(double[] signal, int samplingRate, float minPossibleFreq, float maxPossibleFreq, float wantedFreqError, int FFTnsamples, float sensitivity, boolean display) throws Exception {
adjustParameters(signal, samplingRate, minPossibleFreq, maxPossibleFreq, wantedFreqError, FFTnsamples);
//evaluate the minimum frequency resolution
double frequencyRes = ((double)samplingRate/2d)/ (double)currentWindowAnalysisSamples;
logger.debug("Frequency Resolution: "+frequencyRes);
// trace spectrum
double[][] spectrum = SignalConverter.spectrogram("spectrogram", signal, samplingRate, currentWindowShiftSamples, currentWindowAnalysisSamples, false);
if (display)
SignalConverter.displaySpectrogram(spectrum, signal, "complete spectrogram", samplingRate, currentWindowShiftSamples, currentWindowAnalysisSamples);
// apply the bandpass filter
spectrum = SignalConverter.cutSpectrum(spectrum, minPossibleFreq, maxPossibleFreq, currentWindowAnalysisSamples, samplingRate);
if (display)
// display cut spectrum
SignalConverter.displaySpectrogram(spectrum, signal, "clean spectrogram", samplingRate, currentWindowShiftSamples, currentWindowAnalysisSamples);
float windowShiftTime = (float) SignalConverter.sample2Time(this.currentWindowShiftSamples, samplingRate);
float windowLengthTime = (float) SignalConverter.sample2Time(this.currentWindowAnalysisSamples, samplingRate);
float signalTime = (float) SignalConverter.sample2Time(signal.length, samplingRate);
currentspectrum = spectrum;
// extract the maximum frequencies in each frame
SignalConverter signalMaximumAnalyzer = new SignalConverter();
ArrayList<Double>[] maxfrequencies = signalMaximumAnalyzer.takePeaksInSpectrogramFrames(spectrum, samplingRate, currentWindowAnalysisSamples, minPossibleFreq);
LinkedHashMap<String, String> peaks = new LinkedHashMap<String, String>();
double maxperiod = (double) Math.min(signal.length, currentWindowAnalysisSamples) * (double) samplingRate;
double error= 1.96*frequencyRes;// ((float) samplingRate / (float) currentWindowAnalysisSamples);
for (int i = 0; i < maxfrequencies.length; i++) {
double startTime = SignalConverter.spectrogramTimeFromIndex(i, windowShiftTime);
double endTime = Math.min(startTime+windowLengthTime,signalTime);
int counter = 0;
int freqCounter = 0;
Double previousFreq=0d;
Double previousPeriod=-100d;
String prefix = "";
if (maxfrequencies.length>1)
prefix = " (Section " + (i + 1)+")";
for (Double peakFreq : maxfrequencies[i]) {
double period = 1d / peakFreq;
double power = signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter);
double periodResolution = sensitivity/samplingRate;
//the period distance has to be at least of 9 sample rates, the frequencies should not go under the resolution and over the borders
//the period should be included two times in the window
//the power of spectrum should be high enough
if ((Math.abs(previousPeriod-period)>(periodResolution))
&& (peakFreq-previousFreq>error)
&& (peakFreq >= minPossibleFreq)
&& (peakFreq <= maxPossibleFreq)
&& (period > 0)
&& (period < maxperiod*0.55f)
&& (!getPowerSpectrumStregthInterpretation(power).equalsIgnoreCase("None")))
{
logger.debug("DISCREPANCY WITH RESPECT TO THE PREVIOUS FREQ:"+(peakFreq-previousFreq));
logger.debug("RATIO WITH RESPECT TO THE PREVIOUS FREQ:"+((peakFreq-previousFreq)/error));
if (counter == 0) {
logger.debug("Section "+(i+1));
peaks.put("*StartTime_In_Spectrogram"+prefix, "" + startTime);
peaks.put("*EndTime_In_Spectrogram" + prefix, "" + endTime);
}
double lowermeanF = Math.max(peakFreq - error, minPossibleFreq);
double uppermeanF = Math.min(peakFreq + error, maxPossibleFreq);
double upperUncertPeriod = 0;
double lowerUncertPeriod = 0;
if (peakFreq-previousFreq>error){
upperUncertPeriod=MathFunctions.roundDecimal(1d / lowermeanF,2);
lowerUncertPeriod=MathFunctions.roundDecimal(1d / uppermeanF,2);
}
else
{
upperUncertPeriod=MathFunctions.roundDecimal(period+periodResolution/2,2);
lowerUncertPeriod=Math.max(1/samplingRate,MathFunctions.roundDecimal(period-periodResolution/2,2));
}
peaks.put("Period_"+(counter+1)+prefix, MathFunctions.roundDecimal(period,2)+" ~ "+"["+lowerUncertPeriod+";"+upperUncertPeriod+"]");
peaks.put("Frequency_"+(counter+1)+prefix, MathFunctions.roundDecimal(peakFreq,2)+" ~ "+"["+MathFunctions.roundDecimal(lowermeanF,2)+";"+MathFunctions.roundDecimal(uppermeanF,2)+"]");
peaks.put("Strength_of_Periodicity_"+(counter+1)+prefix, MathFunctions.roundDecimal(signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter),2)+" ("+getPowerSpectrumStregthInterpretation(signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter))+")");
int minFidx = SignalConverter.frequencyIndex(minPossibleFreq, currentWindowAnalysisSamples, samplingRate);
double spectrogramidx = SignalConverter.spectrumFreq2Idx(peakFreq.floatValue(), samplingRate, currentWindowAnalysisSamples)-minFidx;
logger.debug("SpectorgramIdx_"+(counter+1)+":" + spectrogramidx);
logger.debug("Strength_of_Periodicity_"+(counter+1)+":" + signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter));
logger.debug("Strength_of_Periodicity_Interpretation"+(counter+1)+":" + getPowerSpectrumStregthInterpretation(signalMaximumAnalyzer.currentSpikesPowerSpectra[i].get(freqCounter)));
logger.debug("Frequency_"+(counter+1)+":" + peakFreq);
logger.debug("UpperFrequencyConfidence_"+(counter+1)+":" + uppermeanF);
logger.debug("LowerFrequencyConfidence_"+(counter+1)+":" + lowermeanF);
logger.debug("Period"+":" + period);
logger.debug("UpperFrequencyPeriod_"+(counter+1)+":" + (1d / lowermeanF));
logger.debug("LowerFrequencyPeriod_"+(counter+1)+":"+ (1d / uppermeanF));
logger.debug("");
counter++;
previousFreq=peakFreq;
previousPeriod=period;
}
freqCounter++;
}
if (counter==0)
peaks.put("Periodicity_"+(counter+1)+prefix, "No periodicities found");
}
return peaks;
}
}

View File

@ -1,498 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals;
import java.awt.Graphics2D;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Arrays;
import javax.swing.JPanel;
import org.gcube.contentmanagement.graphtools.data.BigSamplesTable;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.Example;
import com.rapidminer.example.ExampleSet;
import com.rapidminer.example.table.MemoryExampleTable;
import marytts.signalproc.display.SpectrogramCustom;
import marytts.signalproc.window.Window;
/**
* includes tools for basic signal transformations: delta + double delta center frequency cepstral coefficients calculation spectrum frequency cut transformation to and from Rapid Miner Example Set filterbanks fequency to mel frequency to index in fft sinusoid signal generation inverse mel log10 mel filterbanks sample to time and time to sample signal timeline generation index to time in spectrogram spectrogram calculation and display time to index in spectrogram
*
* @author coro
*
*/
public class SignalConverter {
private static Logger logger = LoggerFactory.getLogger(DataAnalysis.class);
public static double[][] addDeltaDouble(double[][] features) throws Exception {
int vectorL = features[0].length;
double[][] delta = new double[features.length][features[0].length * 3];
for (int k = 0; k < features.length; k++) {
for (int g = 0; g < vectorL; g++) {
delta[k][g] = features[k][g];
}
}
Delta.calcDelta(delta, vectorL);
Delta.calcDoubleDelta(delta, vectorL);
return delta;
}
public static double centerFreq(int i, double samplingRate, double lowerFilterFreq, int numMelFilters) {
double mel[] = new double[2];
mel[0] = freqToMel(lowerFilterFreq);
mel[1] = freqToMel(samplingRate / 2);
// take inverse mel of:
double temp = mel[0] + ((mel[1] - mel[0]) / (numMelFilters + 1)) * i;
return inverseMel(temp);
}
public static double[] cepCoefficients(double f[], int numCepstra, int numFilters) {
double cepc[] = new double[numCepstra];
for (int i = 0; i < cepc.length; i++) {
for (int j = 1; j <= numFilters; j++) {
cepc[i] += f[j - 1] * Math.cos(Math.PI * i / numFilters * (j - 0.5));
}
}
return cepc;
}
public static BufferedImage createImage(JPanel panel, int w, int h) {
// int w = panel.getWidth();
// int h = panel.getHeight();
BufferedImage bi = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB);
Graphics2D g = bi.createGraphics();
panel.paint(g);
return bi;
}
public static double[][] cutSpectrum(double[][] spectrum, float minFreq, float maxfreq, int fftWindowSize, int samplingRate) {
int minFrequencyIndex = frequencyIndex(minFreq, fftWindowSize, samplingRate);
int maxFrequencyIndex = frequencyIndex(maxfreq, fftWindowSize, samplingRate);
double[][] cutSpectrum = new double[spectrum.length][maxFrequencyIndex - minFrequencyIndex + 1];
for (int i = 0; i < spectrum.length; i++) {
cutSpectrum[i] = Arrays.copyOfRange(spectrum[i], minFrequencyIndex, maxFrequencyIndex);
}
return cutSpectrum;
}
public static void exampleSet2Signal(double[] rebuiltSignal, ExampleSet es, Double fillerValueFormissingEntries) {
MemoryExampleTable met = (MemoryExampleTable) es.getExampleTable();
int numCol = met.getAttributeCount();
int numRows = met.size();
Attribute labelAtt = met.getAttribute(numCol - 1);
for (int i = 0; i < numRows; i++) {
int index = (int) met.getDataRow(i).get(labelAtt);
String label = labelAtt.getMapping().mapIndex(index);
int id = Integer.parseInt(label);
Example e = es.getExample(i);
// System.out.println(es.getExample(i)+"->"+signal[i]);
for (Attribute a : e.getAttributes()) {
Double value = e.getValue(a);
if (value.equals(Double.NaN) && !fillerValueFormissingEntries.equals(Double.NaN))
value = fillerValueFormissingEntries;
rebuiltSignal[id] = value;
}
}
}
public static void exampleSet2Signal(double[] rebuiltSignal, ExampleSet es) {
exampleSet2Signal(rebuiltSignal, es, null);
}
public static int[] fftBinIndices(double samplingRate, int frameSize, int numMelFilters, int numFequencies, float lowerFilterFreq, float upperFilterFreq) {
int cbin[] = new int[numFequencies + 2];
logger.debug("New Filter banks: " + numFequencies);
cbin[0] = (int) Math.round(lowerFilterFreq / samplingRate * frameSize);
cbin[cbin.length - 1] = frequencyIndex(upperFilterFreq, frameSize, (float) samplingRate);
logger.debug("F0: " + lowerFilterFreq);
for (int i = 1; i <= numFequencies; i++) {
double fc = centerFreq(i, samplingRate, lowerFilterFreq, numMelFilters);
logger.debug("F" + (i) + ": " + fc);
cbin[i] = (int) Math.round(fc / samplingRate * frameSize);
}
logger.debug("F" + (cbin.length - 1) + ": " + upperFilterFreq);
return cbin;
}
public static int[] fftBinIndices(double samplingRate, int frameSize, int numMelFilters, float lowerFilterFreq) {
int cbin[] = new int[numMelFilters + 2];
cbin[0] = (int) Math.round(lowerFilterFreq / samplingRate * frameSize);
cbin[cbin.length - 1] = (int) (frameSize / 2);
for (int i = 1; i <= numMelFilters; i++) {
double fc = centerFreq(i, samplingRate, lowerFilterFreq, numMelFilters);
cbin[i] = (int) Math.round(fc / samplingRate * frameSize);
}
return cbin;
}
public static double freqToMel(double freq) {
return 2595 * log10(1 + freq / 700);
}
public static int frequencyIndex(float frequency, int fftSize, float samplingRate) {
return Math.round(frequency * fftSize / samplingRate);
}
public static double[] generateSinSignal(int signalLength, float timeShift, float frequency) {
// final float frequency = 0.3f;// 1f;
double samples[] = new double[signalLength];
float time = 0;
for (int i = 0; i < samples.length; i++) {
samples[i] = (float) Math.sin(2f * Math.PI * frequency * time);
// time += 1f / (float) samplingRate;
time += timeShift;
}
return samples;
}
public static double inverseMel(double x) {
double temp = Math.pow(10, x / 2595) - 1;
return 700 * (temp);
}
public static double log10(double value) {
return Math.log(value) / Math.log(10);
}
public static double[] melFilter(double bin[], int cbin[], int numMelFilters) {
double temp[] = new double[numMelFilters + 2];
for (int k = 1; k <= numMelFilters; k++) {
double num1 = 0, num2 = 0;
for (int i = cbin[k - 1]; i <= cbin[k]; i++) {
num1 += ((i - cbin[k - 1] + 1) / (cbin[k] - cbin[k - 1] + 1)) * bin[i];
}
for (int i = cbin[k] + 1; i <= cbin[k + 1]; i++) {
num2 += (1 - ((i - cbin[k]) / (cbin[k + 1] - cbin[k] + 1))) * bin[i];
}
temp[k] = num1 + num2;
}
double fbank[] = new double[numMelFilters];
for (int i = 0; i < numMelFilters; i++) {
fbank[i] = temp[i + 1];
}
return fbank;
}
public static int recalculateMaxMelFilters(double samplingRate, int numMelFilters, float lowerFilterFreq, float maxFilterFreq) {
int bestIndex = 1;
for (int i = 1; i <= numMelFilters; i++) {
double fc = centerFreq(i, samplingRate, lowerFilterFreq, numMelFilters);
logger.debug("fc " + fc);
if (fc > maxFilterFreq) {
bestIndex = i;
break;
}
}
return bestIndex - 1;
}
public static double sample2Time(int sample, int sampleRate) {
return (double) sample / (double) sampleRate;
}
public static double[] signalTimeLine(int signalLength, double samplingRate) {
double time[] = new double[signalLength];
Arrays.fill(time, Double.NaN);
for (int i = 0; i < signalLength; i++) {
time[i] = (double) i / (double) samplingRate;
}
logger.debug("time " + time[signalLength - 1] * samplingRate + " vs " + signalLength);
return time;
}
public static float spectrumTime(float linearTime, float windowShiftTime) {
return linearTime / windowShiftTime;
}
public static ExampleSet signal2ExampleSet(double[] signal) {
BigSamplesTable samples = new BigSamplesTable();
for (int k = 0; k < signal.length; k++) {
samples.addSampleRow("" + k, signal[k]);
}
logger.debug("Example Set Created");
return samples.generateExampleSet();
}
public static double[][] spectrogram(String name, double[] signal, int samplingRate, int windowshift, int frameslength, boolean display) throws Exception {
SpectrogramCustom spec = new SpectrogramCustom(signal, samplingRate, Window.get(Window.HAMMING, frameslength), windowshift, frameslength, 640, 480);
double[][] spectrum = spec.spectra.toArray(new double[spec.spectra.size()][]);
if (display) {
spec.showInJFrame(name, true, true);
/*
* save spectrograms to files BufferedImage image = createImage(spec); ImageIO.write(ImageTools.toBufferedImage(image), "png", new File(name+".png"));
*/
// Thread.sleep(2000);
// createImage(spec);
}
return spectrum;
}
public static void displaySpectrogram(double[][] spectrum, double[] signal, String name, int samplingRate, int windowshift, int frameslength) throws Exception {
SpectrogramCustom spec = new SpectrogramCustom(signal, samplingRate, Window.get(Window.HAMMING, frameslength), windowshift, frameslength, 640, 480);
spec.spectra = new ArrayList<double[]>();
for (int i = 0; i < spectrum.length; i++) {
spec.spectra.add(spectrum[i]);
}
spec.showInJFrame(name, true, true);
}
public static float spectrogramTimeFromIndex(int index, float windowShiftTime) {
return index * windowShiftTime;
}
public static int spectrogramIndex(float linearTime, float windowShiftTime) {
return (int) (linearTime / windowShiftTime);
}
public static int time2Sample(double time, int sampleRate) {
return (int) (time * sampleRate);
}
public double[] averagepower;
public double[] takeMaxFrequenciesInSpectrogram(double[][] spectrogram, int samplingRate, int windowSamples, float minfreq) {
double[] maxs = new double[spectrogram.length];
averagepower = new double[spectrogram.length];
int j = 0;
if (TimeSeriesAnalysis.display){
for (int g=0;g<spectrogram.length;g++){
SignalProcessing.displaySignalWithGenericTime(spectrogram[g], 0, 1, "spectrum "+(g+1));
}
}
double tolerance = 0.05;
for (double[] slice : spectrogram) {
int bestidx = 0;
double max = -Double.MAX_VALUE;
double min = Double.MAX_VALUE;
//take the best frequency over the first minimum in the spectrum
boolean overfirstmin = false;
for (int k = 1; k < slice.length; k++) {
double ele = slice[k];
if (!overfirstmin && (slice[k]>slice[k-1])){
logger.debug("First minimum in spectrum is at idx "+k);
overfirstmin=true;
}
if (overfirstmin) {
if (ele > (max + (Math.abs(max) * tolerance))) {
// logger.debug(">max up:"+ele +">" +(max + (Math.abs(max) * tolerance))+" at idx "+k);
max = ele;
bestidx = k;
}
if (ele < (min - (Math.abs(min) * tolerance))) {
min = ele;
}
}
}
// maxs[j] = spectrogram[j][bestidx];
// maxs[j]=bestidx;
int minFidx = SignalConverter.frequencyIndex(minfreq, windowSamples, samplingRate);
// System.out.println("min f idx: "+minFidx);
maxs[j] = spectrumIdx2Frequency(minFidx + bestidx, samplingRate, windowSamples);
double mean = org.gcube.contentmanagement.graphtools.utils.MathFunctions.mean(slice);
logger.debug("max freq in spec: " + maxs[j]+" index "+minFidx + bestidx);
if (min == Double.MAX_VALUE) {
min = max;
}
if (max == -Double.MAX_VALUE) {
averagepower[j] = 0;
} else {
max = max - min;
mean = mean - min;
if (max == 0)
averagepower[j] = 0;
else
averagepower[j] = Math.abs((max - mean) / max);
}
logger.debug("max power : " + max + " min power: " + min + " mean " + mean + " power " + averagepower[j]);
j++;
}
return maxs;
}
ArrayList<Double>[] currentSpikesPowerSpectra;
public ArrayList<Double>[] takePeaksInSpectrogramFrames(double[][] spectrogram, int samplingRate, int windowSamples, float minfreq) {
ArrayList<Double>[] maxs = new ArrayList[spectrogram.length];
ArrayList<Double>[] powers = new ArrayList[spectrogram.length];
if (TimeSeriesAnalysis.display){
for (int g=0;g<spectrogram.length;g++){
SignalProcessing.displaySignalWithGenericTime(spectrogram[g], 0, 1, "spectrum "+(g+1));
}
}
int minFidx = SignalConverter.frequencyIndex(minfreq, windowSamples, samplingRate);
for (int j=0;j<spectrogram.length;j++) {
double[] slice = spectrogram[j];
double maxAmp = Operations.getMax(slice);
double minAmp = Operations.getMin(slice);
//old code: once we used the first element of the FFT as reference, but it is unrealiable
double refAmplitude = 0;
if (maxAmp!=slice[0])
refAmplitude = (slice[0]-minAmp);//(maxAmp-minAmp)/2d;
else
refAmplitude = MathFunctions.mean(slice)-minAmp;
ArrayList<Double> maxFreqs = new ArrayList<Double>();
ArrayList<Double> localpowers = new ArrayList<Double>();
double [] derivSlice = MathFunctions.derivative(slice);
boolean [] spikes = MathFunctions.findMaxima(derivSlice,0.001);
for (int i=0;i<spikes.length;i++){
if (spikes[i]){
// logger.debug("Spike at "+i);
maxFreqs.add((double)spectrumIdx2Frequency(minFidx + i, samplingRate, windowSamples));
//make the min correspond to y=0
//take few samples around the spike and evaluate the amplitude with respect to the samples around
int round =Math.max(slice.length/10,1);
//take samples to the left
double roundmean = 0;
for (int g=1;g<=round;g++){
if (i-g>=0){
roundmean = roundmean+slice[i-g]-minAmp;
}
}
//take samples to the right
for (int g=1;g<=round;g++){
if (i+g<slice.length){
roundmean = roundmean+slice[i+g]-minAmp;
}
}
//take mean value
roundmean = roundmean/(2d*(double)round);
//calculate the power as the ration between the spike and the surrounding points
double power = (slice[i]-minAmp)/(roundmean);
localpowers.add(power);
}
}
powers[j]=localpowers;
maxs[j]=maxFreqs;
}
currentSpikesPowerSpectra=powers;
return maxs;
}
public int startStableTractIdx = -1;
public int endStableTractIdx = -1;
public double[] takeLongestStableTract(double[] signal, double valuedifftoleranceperc) {
ArrayList<int[]> pairs = new ArrayList<int[]>();
int idx1 = -1;
int[] pair = null;
// analyze the signal
for (int i = 1; i < signal.length; i++) {
// if there is not current range create it
if (idx1 == -1) {
idx1 = 1;
pair = new int[2];
pair[0] = i - 1;
pair[1] = i - 1;
}
// if the current sample is similar to the previous, enlarge the range
if (Math.abs(signal[i] - signal[i - 1]) / Math.max(signal[i], signal[i - 1]) <= valuedifftoleranceperc)
pair[1] = i;
// otherwise add the couple and reset
else {
idx1 = -1;
pairs.add(pair);
}
}
// if the last couple was reset, add the last interval
if (idx1 > -1)
pairs.add(pair);
// find the longest pair
int best = 0;
int maxsize = 0;
int k = 0;
for (int[] setcouple : pairs) {
int diff = setcouple[1] - setcouple[0];
if (diff > maxsize) {
maxsize = diff;
best = k;
}
k++;
}
// take the longest range
if (pairs.size() == 0) {
pairs.add(new int[] { 0, 1 });
}
int[] bestcouple = pairs.get(best);
// take the related slice of signal
if (bestcouple[1]==bestcouple[0])
bestcouple[1]=bestcouple[0]+1;
double[] subsignal = new double[bestcouple[1] - bestcouple[0]];
logger.debug("Longest range: from " + bestcouple[0] + " to " + bestcouple[1]);
startStableTractIdx = bestcouple[0];
endStableTractIdx = bestcouple[1];
int l = 0;
for (int i = bestcouple[0]; i < bestcouple[1]; i++) {
subsignal[l] = signal[i];
l++;
}
return subsignal;
}
public static float spectrumIdx2Frequency(int idx, int samplingRate, int windowsSizeSamples) {
return ((float) idx * samplingRate) / (1f*(float) (windowsSizeSamples - 1));
}
public static int spectrumFreq2Idx(float freq, int samplingRate, int windowsSizeSamples) {
return Math.round((windowsSizeSamples - 1) * 1f *freq / samplingRate);
}
}

View File

@ -1,488 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals;
import java.awt.Image;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import org.gcube.contentmanagement.graphtools.data.BigSamplesTable;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.NumericSeriesGraph;
import org.gcube.contentmanagement.graphtools.plotting.graphs.SpectrumPlot2;
import org.gcube.contentmanagement.graphtools.plotting.graphs.TimeSeriesGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.MathFunctions;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.jfree.chart.JFreeChart;
import org.jfree.data.time.FixedMillisecond;
import org.jfree.data.time.TimeSeriesCollection;
import org.jfree.data.xy.XYSeriesCollection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.rapidminer.example.ExampleSet;
import com.rapidminer.operator.preprocessing.sampling.AbsoluteSampling;
import com.rapidminer.operator.preprocessing.series.filter.SeriesMissingValueReplenishment;
import com.rapidminer.tools.OperatorService;
import marytts.signalproc.display.SpectrogramCustom;
import marytts.signalproc.window.Window;
public class SignalProcessing {
private static Logger logger = LoggerFactory.getLogger(SignalProcessing.class);
public static double[][] applyFilterBank(double[][] feature, int numCepstra, int numMelFilters, int samplingRate, int frameLength, float minCutFequency, float maxCutFrequency) throws Exception {
// recalculate Mel filters on the basis of the maxFrequency
int recalcMelFilters = SignalConverter.recalculateMaxMelFilters(samplingRate, numMelFilters, minCutFequency, maxCutFrequency);
double[][] mels = new double[feature.length][numCepstra];
int i = 0;
for (double[] bin : feature) {
int cbin[] = SignalConverter.fftBinIndices(samplingRate, frameLength, numMelFilters, recalcMelFilters, minCutFequency, maxCutFrequency);
double f[] = SignalConverter.melFilter(bin, cbin, recalcMelFilters);
double cepstra[] = SignalConverter.cepCoefficients(f, numCepstra, recalcMelFilters);
mels[i] = cepstra;
i++;
}
double[][] deltamels = new double[feature.length][numCepstra * 3];
for (int k = 0; k < feature.length; k++) {
for (int g = 0; g < mels[0].length; g++) {
deltamels[k][g] = mels[k][g];
}
}
Delta.calcDelta(deltamels, numCepstra);
Delta.calcDoubleDelta(deltamels, numCepstra);
return deltamels;
}
public static double[][] calculateSumSpectrum(List<double[]> signals, int windowShiftSamples, int frameLength, int samplingRate) throws Exception {
int signalLenght = signals.get(0).length;
logger.debug("TRIALS LENGHT " + signalLenght);
List<double[][]> spectrograms = new ArrayList<double[][]>();
logger.debug("Getting Spectra");
int j = 0;
// get all spectrograms
for (double[] signal : signals) {
double[][] spectro = SignalConverter.spectrogram("Spectrogram", signal, samplingRate, windowShiftSamples, frameLength, false);
logger.debug("Signal Number " + (j + 1) + " spectrum lenght " + ((spectro.length * windowShiftSamples) / samplingRate));
spectrograms.add(spectro);
j++;
}
logger.debug("Summing Spectra");
// sum all spectrograms
double[][] sumSpectro = SignalProcessing.sumSpectra(spectrograms);
spectrograms = null;
return sumSpectro;
}
// concatenates several spectra
public static double[][] concatenateSpectra(List<double[][]> spectra) {
double[][] firstSpectrum = spectra.get(0);
int mi = firstSpectrum.length;
int mj = firstSpectrum[0].length;
int nSpectra = spectra.size();
double[][] concatenatedSpectrum = new double[mi][mj * nSpectra];
int k = 0;
for (double[][] spectrum : spectra) {
for (int i = 0; i < mi; i++) {
for (int j = 0; j < mj; j++)
concatenatedSpectrum[i][j + (k * mj)] = spectrum[i][j];
}
k++;
}
return concatenatedSpectrum;
}
public static void displaySignalWithGenericTime(double[] signal, float t0, float timeshift, String name) {
org.jfree.data.xy.XYSeries xyseries = new org.jfree.data.xy.XYSeries(name);
float time = t0;
for (int i = 0; i < signal.length; i++) {
xyseries.add(time, signal[i]);
time = time + timeshift;
}
XYSeriesCollection collection = new XYSeriesCollection(xyseries);
NumericSeriesGraph nsg = new NumericSeriesGraph(name);
nsg.render(collection);
}
public static Image renderSignalWithGenericTime(double[] signal, float t0, float timeshift, String name) {
if (signal.length > 20000) {
logger.debug("Too many points to display: " + signal.length);
return null;
}
org.jfree.data.xy.XYSeries xyseries = new org.jfree.data.xy.XYSeries(name);
float time = t0;
for (int i = 0; i < signal.length; i++) {
xyseries.add(time, signal[i]);
time = time + timeshift;
}
XYSeriesCollection collection = new XYSeriesCollection(xyseries);
JFreeChart chart = NumericSeriesGraph.createStaticChart(collection);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
return image;
}
public static Image renderSignalWithGenericTime(double[] signal, double[] timeline, String name) {
if (signal.length > 20000) {
logger.debug("Too many points to display: " + signal.length);
return null;
}
org.jfree.data.xy.XYSeries xyseries = new org.jfree.data.xy.XYSeries(name);
for (int i = 0; i < signal.length; i++) {
xyseries.add(timeline[i], signal[i]);
}
XYSeriesCollection collection = new XYSeriesCollection(xyseries);
JFreeChart chart = NumericSeriesGraph.createStaticChart(collection);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
return image;
}
public static Image renderSignalWithTime(double[] signal, Date[] dates, String name, String format) {
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length > 20000) {
logger.debug("Too many points to display: " + signal.length);
return null;
}
for (int i = 0; i < signal.length; i++) {
try {
FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i]);
} catch (Exception e) {
logger.debug("Skipping value yet present: " + dates[i]);
}
}
TimeSeriesCollection dataset = new TimeSeriesCollection();
dataset.addSeries(series);
JFreeChart chart = TimeSeriesGraph.createStaticChart(dataset, format,name);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
return image;
}
public static Image renderSignalsWithTime(List<double[]> signals, Date[] dates, List<String> names, String format) {
TimeSeriesCollection dataset = new TimeSeriesCollection();
int nsignals = signals.size();
for (int j = 0; j < nsignals; j++) {
double[] signal = signals.get(j);
String name = names.get(j);
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length > 20000) {
logger.debug("Too many points to display: " + signal.length);
return null;
}
int offset = 0;
if (j>0)
offset = signals.get(j-1).length;
for (int i = offset; i < offset+signal.length; i++) {
try {
FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i-offset]);
} catch (Exception e) {
logger.debug("Skipping value yet present: " + dates[i]);
}
}
dataset.addSeries(series);
}
JFreeChart chart = TimeSeriesGraph.createStaticChart(dataset, format);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
return image;
}
public static Image renderSignalSpectrogram(double[] signal, double[] timeline, int samplingRate, int frameslength, int windowshift) {
SpectrogramCustom spec = new SpectrogramCustom(signal, samplingRate, Window.get(Window.HAMMING, frameslength), windowshift, frameslength, 640, 480);
double[][] spectrum = spec.spectra.toArray(new double[spec.spectra.size()][]);
spec.setZoomX(640d / (double) spectrum.length);
BufferedImage image = SignalConverter.createImage(spec, 640, 480);
return ImageTools.toImage(image);
}
public static Image renderSignalSpectrogram2(double[][] spectrogram) {
SpectrumPlot2 spectrumPlot = new SpectrumPlot2(spectrogram);
logger.debug("Spectrum W:" + spectrumPlot.width);
logger.debug("Spectrum H:" + spectrumPlot.height);
// spectrumPlot.hzoomSet(2f);
spectrumPlot.hzoomSet(640f / (float) spectrumPlot.width);
spectrumPlot.vzoomSet(480f / (float) spectrumPlot.height);
/*
* ApplicationFrame app = new ApplicationFrame("Spectrogram "); app.setContentPane(spectrumPlot); app.pack(); app.setVisible(true);
*/
BufferedImage image = SignalConverter.createImage(spectrumPlot, 640, 480);
return ImageTools.toImage(image);
}
public static void displaySignalWithTime(double[] signal, Date[] dates, String name, String format) {
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length > 20000) {
logger.debug("Too many points to display: " + signal.length);
return;
}
for (int i = 0; i < signal.length; i++) {
try {
FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i]);
} catch (Exception e) {
logger.debug("Skipping value yet present: " + dates[i]);
}
}
TimeSeriesCollection dataset = new TimeSeriesCollection();
dataset.addSeries(series);
TimeSeriesGraph tsg = new TimeSeriesGraph(name);
tsg.timeseriesformat = format;
tsg.render(dataset);
}
public static void displaySignalsWithTime(List<double[]> signals, Date[] dates, List<String> names, String format) {
TimeSeriesCollection dataset = new TimeSeriesCollection();
int nsignals = signals.size();
for (int j = 0; j < nsignals; j++) {
double[] signal = signals.get(j);
String name = names.get(j);
org.jfree.data.time.TimeSeries series = new org.jfree.data.time.TimeSeries(name);
if (signal.length > 20000) {
logger.debug("Too many points to display: " + signal.length);
return ;
}
int offset = 0;
if (j>0)
offset = signals.get(j-1).length;
for (int i = offset; i < offset+signal.length; i++) {
try {
FixedMillisecond ms = new FixedMillisecond(dates[i]);
series.add(ms, signal[i-offset]);
} catch (Exception e) {
logger.debug("Skipping value yet present: " + dates[i]);
}
}
dataset.addSeries(series);
}
TimeSeriesGraph tsg = new TimeSeriesGraph("Multiple Time Series");
tsg.timeseriesformat = format;
tsg.render(dataset);
}
public static double[] downSample(double[] signal, int numElements, AlgorithmConfiguration config) throws Exception {
config.initRapidMiner();
double[] rebuiltSignal = new double[signal.length];
Arrays.fill(rebuiltSignal, Double.NaN);
BigSamplesTable samples = new BigSamplesTable();
for (int k = 0; k < signal.length; k++) {
samples.addSampleRow("" + k, signal[k]);
}
ExampleSet es = samples.generateExampleSet();
logger.debug("Example Set Created");
AbsoluteSampling sampler = (AbsoluteSampling) OperatorService.createOperator("AbsoluteSampling");
sampler.setParameter("sample_size", "" + numElements);
sampler.setParameter("local_random_seed", "-1");
es = sampler.apply(es);
logger.debug("Finished");
SignalConverter.exampleSet2Signal(rebuiltSignal, es);
return rebuiltSignal;
}
public static double[][] extractSumSpectrum(String file, int windowShiftSamples, int frameLength, int samplingRate) throws Exception {
List<double[]> signals = SignalProcessing.readSignalsFromCSV(file, ",");
// int numSignals = signals.size();
int signalLenght = signals.get(0).length;
List<double[][]> sumspectrograms = new ArrayList<double[][]>();
List<double[][]> spectrograms = new ArrayList<double[][]>();
logger.debug("Getting Spectra");
int j = 0;
// get all spectrograms
for (double[] signal : signals) {
logger.debug("Signal Number " + (j + 1));
double[][] spectro = SignalConverter.spectrogram("Spectrogram", signal, samplingRate, windowShiftSamples, frameLength, false);
spectrograms.add(spectro);
j++;
}
logger.debug("Summing Spectra");
// sum all spectrograms
double[][] sumSpectro = SignalProcessing.sumSpectra(spectrograms);
spectrograms = null;
return sumSpectro;
}
public static Date[] fillTimeLine(double[] timemilliseconds, double samplingRate, AlgorithmConfiguration config) throws Exception {
double[] milliseconds = fillTimeSeries(timemilliseconds, timemilliseconds, samplingRate, config);
Date[] dates = new Date[milliseconds.length];
for (int i = 0; i < milliseconds.length; i++)
dates[i] = new Date((long) milliseconds[i]);
return dates;
}
public static double[] fillSignal(double[] signal) throws Exception {
ExampleSet es = SignalConverter.signal2ExampleSet(signal);
SeriesMissingValueReplenishment sampler = (SeriesMissingValueReplenishment) OperatorService.createOperator("SeriesMissingValueReplenishment");
sampler.setParameter("attribute_name", "att0");
sampler.setParameter("replacement", "3");
es = sampler.apply(es);
logger.debug("Finished");
double[] rebuiltSignal = new double[signal.length];
SignalConverter.exampleSet2Signal(rebuiltSignal, es, 0d);
return rebuiltSignal;
}
public static double[] fillTimeSeries(double[] values, double[] timeseconds, double samplingRate, AlgorithmConfiguration config) throws Exception {
double t0 = timeseconds[0];
double t1 = timeseconds[timeseconds.length - 1];
int signalength = Math.abs((int) ((t1 - t0) * samplingRate) + 1);
logger.debug("SignalProcessing->Old Time Series had: " + values.length + " samples. New Time Series will have: " + signalength + " samples");
if (values.length == signalength)
return values;
config.initRapidMiner();
double signal[] = new double[signalength];
Arrays.fill(signal, Double.NaN);
for (int i = 0; i < values.length; i++) {
if (values[i] != Double.NaN) {
int index = Math.abs((int) ((timeseconds[i] - t0) * samplingRate));
signal[index] = values[i];
}
}
double[] rebuiltSignal = new double[signal.length];
BigSamplesTable samples = new BigSamplesTable();
for (int k = 0; k < signal.length; k++) {
samples.addSampleRow("" + k, signal[k]);
}
ExampleSet es = samples.generateExampleSet();
logger.debug("Example Set Created");
SeriesMissingValueReplenishment sampler = (SeriesMissingValueReplenishment) OperatorService.createOperator("SeriesMissingValueReplenishment");
sampler.setParameter("attribute_name", "att0");
sampler.setParameter("replacement", "3");
es = sampler.apply(es);
logger.debug("Finished");
SignalConverter.exampleSet2Signal(rebuiltSignal, es);
return rebuiltSignal;
}
public static double[][] multiSignalAnalysis(List<double[]> signals, int samplingRate, int windowshift, int frameslength, boolean display) throws Exception {
List<double[][]> spectra = new ArrayList<double[][]>();
for (double[] signal : signals)
spectra.add(SignalConverter.spectrogram("Spectrogram", signal, samplingRate, windowshift, frameslength, display));
double[][] sumSpec = sumSpectra(spectra);
return sumSpec;
}
public static List<double[]> readSignalsFromCSV(String file, String delimiter) throws Exception {
BufferedReader br = new BufferedReader(new FileReader(new File(file)));
String line = br.readLine();
List<double[]> signals = new ArrayList<double[]>();
while (line != null) {
double[] signal = readSignalFromCSVLine(line, delimiter);
signals.add(signal);
line = br.readLine();
}
br.close();
return signals;
}
public static double[] readSignalFromCSVLine(String line, String delimiter) throws Exception {
String[] splitted = line.split(delimiter);
double[] signal = new double[splitted.length];
for (int i = 0; i < splitted.length; i++)
signal[i] = Double.parseDouble(splitted[i]);
return signal;
}
// sums several spectra of the same length
public static double[][] sumSpectra(List<double[][]> spectra) {
double[][] firstSpectrum = spectra.get(0);
int mi = firstSpectrum.length;
int mj = firstSpectrum[0].length;
double[][] sumSpectrum = new double[mi][mj];
int k = 0;
for (double[][] spectrum : spectra) {
for (int i = 0; i < mi; i++) {
for (int j = 0; j < mj; j++)
sumSpectrum[i][j] = MathFunctions.incrementAvg(sumSpectrum[i][j], spectrum[i][j], k);
}
k++;
}
return sumSpectrum;
}
public static double[][] takeCentralSpectrum(double[][] spectrum, float numOfCentralSeconds, float windowShiftTime, int sampleRate) {
float maxTime = ((float) spectrum.length * (float) windowShiftTime);
float centralTime = (maxTime / (2f * numOfCentralSeconds));
logger.debug("Max Time in the Spectrum " + maxTime + " Central time " + centralTime);
int startIndex = (int) (centralTime / windowShiftTime);
int endIndex = (int) ((centralTime + numOfCentralSeconds) / windowShiftTime);
logger.debug("Recalculated lenght " + maxTime + " sec");
logger.debug("Lenght " + spectrum.length);
logger.debug("Start " + startIndex + " End " + endIndex + " max " + spectrum.length + " Cut lenght " + (endIndex - startIndex + 1) * windowShiftTime);
double[][] cutSpectrum = new double[endIndex - startIndex + 1][spectrum[0].length];
for (int i = startIndex; i <= endIndex; i++) {
cutSpectrum[i - startIndex] = spectrum[i];
}
return cutSpectrum;
}
public static void main(String[] args) throws Exception {
AlgorithmConfiguration conf = new AlgorithmConfiguration();
conf.setConfigPath("./cfg/");
conf.initRapidMiner();
// double[] signal = new double[] {1,2,Double.NaN,4,5};
double[] signal = new double[] { Double.NaN, 1, 2, 3, 4, 5, Double.NaN };
// double[] signal = new double[] {Double.NaN,Double.NaN,Double.NaN};
// double[] signal = new double[] {Double.NaN,Double.NaN,0};
double[] resignal = fillSignal(signal);
System.out.println(resignal);
}
}

View File

@ -1,242 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals;
import java.text.SimpleDateFormat;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import org.gcube.contentmanagement.graphtools.utils.DateGuesser;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TimeSeries {
private static Logger logger = LoggerFactory.getLogger(TimeSeries.class);
private double[] values;
private Date[] times;
private Date[] unsortedtimes;
private String[] timeLabels;
private long minimumtimegap = -1;
private String timepattern;
AlgorithmConfiguration config;
public TimeSeries(int timeLength, AlgorithmConfiguration config) {
values = new double[timeLength];
times = new Date[timeLength];
unsortedtimes = new Date[timeLength];
timeLabels = new String[timeLength];
this.config = config;
}
public TimeSeries(double[] values, Date[] time, String[] timeLabels, AlgorithmConfiguration config) {
this.values = values;
this.times = time;
this.unsortedtimes = Arrays.copyOf(time, time.length);
this.timeLabels = timeLabels;
this.config = config;
}
public void setValues(double[] values) {
this.values = values;
}
public void setTime(Date[] time) {
this.times = time;
}
public void setTimeLabels(String[] timeLabels) {
this.timeLabels = timeLabels;
}
public double[] getValues() {
return values;
}
public String[] getLabels() {
return timeLabels;
}
public Date[] getTime() {
return times;
}
public Date[] extendTime(int furtherPointsinTime){
Date[] time = new Date[times.length+furtherPointsinTime];
for (int i=0;i<times.length;i++){
time[i]=times[i];
}
long lastDate = times[times.length-1].getTime();
for (int i=times.length;i<(times.length+furtherPointsinTime);i++){
time[i]=new Date(lastDate+(i+1-times.length)*minimumtimegap);
}
return time;
}
public double[] getMillisecondsTimeline() {
double[] secondstimes = new double[times.length];
for (int i = 0; i < times.length; i++) {
long t = times[i].getTime();
secondstimes[i] = (double) t;
}
return secondstimes;
}
public long getMimimumTimeGapInMillisecs() {
if (minimumtimegap > -1)
return minimumtimegap;
long mintime = Long.MAX_VALUE;
for (int i = 1; i < times.length; i++) {
long t0 = times[i - 1].getTime();
long t1 = times[i].getTime();
long timediff = Math.abs(t1 - t0);
if (timediff < mintime && timediff > 0)
mintime = timediff;
}
minimumtimegap = mintime;
return mintime;
}
public void addElement(double value, Date date, String label, int index) {
values[index] = value;
times[index] = date;
unsortedtimes[index] = date;
timeLabels[index] = label;
}
public void sort() {
Arrays.sort(times);
double[] tempvalues = new double[values.length];
String[] temptimeLabels = new String[timeLabels.length];
List<Date> unsortedTimesList = Arrays.asList(unsortedtimes);
int i = 0;
for (Date time : times) {
int index = unsortedTimesList.indexOf(time);
tempvalues[i] = values[index];
temptimeLabels[i] = timeLabels[index];
i++;
}
values = null;
timeLabels = null;
values = tempvalues;
timeLabels = temptimeLabels;
unsortedtimes = Arrays.copyOf(times, times.length);
}
public double getValue(int index) {
return values[index];
}
public Date getDate(int index) {
return times[index];
}
public String getTimeLabel(int index) {
return timeLabels[index];
}
// each element in the list is Time,Quantity
public static TimeSeries buildFromSignal(List<Tuple<String>> lines, AlgorithmConfiguration config) throws Exception {
TimeSeries ts = new TimeSeries(lines.size(), config);
int counter = 0;
String timepattern = null;
SimpleDateFormat sdf = null;
for (Tuple<String> line : lines) {
String timel = line.getElements().get(0);
timel = timel.replace("time:", "");
Double quantity = Double.parseDouble(line.getElements().get(1));
Date time = null;
if (counter == 0) {
timepattern = DateGuesser.getPattern(timel);
ts.setTimepattern(timepattern);
logger.debug("Time pattern: " + timepattern);
sdf = new SimpleDateFormat(timepattern, Locale.ENGLISH);
}
try{
time = (Date) sdf.parse(timel);
}catch(Exception e){
logger.debug("Error in parsing...adjusting "+timel);
time = DateGuesser.convertDate(timel).getTime();
logger.debug("Error in parsing...adjusting "+timel+" in "+time);
}
if (counter == 0) {
logger.debug("Date detection: input " + timel + " output " + time);
}
ts.addElement(quantity, time, timel, counter);
counter++;
}
ts.sort();
return ts;
}
public void convertToUniformSignal(double samplingrate) throws Exception {
if (samplingrate <= 0) {
if (minimumtimegap < 0)
getMimimumTimeGapInMillisecs();
if (minimumtimegap > 0)
samplingrate = 1d / (double) minimumtimegap;
}
logger.debug("TimeSeries->Samplig rate: " + samplingrate + " minimum gap in time: " + minimumtimegap);
if (samplingrate == 0)
return;
double[] timeline = getMillisecondsTimeline();
logger.debug("TimeSeries->filling gaps");
double[] newvalues = SignalProcessing.fillTimeSeries(values, timeline, samplingrate, config);
if (newvalues.length != values.length) {
logger.debug("TimeSeries->filling also time values");
Date[] newtimeline = SignalProcessing.fillTimeLine(timeline, samplingrate, config);
values = null;
times = null;
unsortedtimes = null;
values = newvalues;
times = newtimeline;
unsortedtimes = newtimeline;
timeLabels = new String[times.length];
}
logger.debug("TimeSeries->Returning values");
timeLabels = new String[times.length];
SimpleDateFormat sdf = new SimpleDateFormat("dd/MM/yyyy HH:mm", Locale.ROOT);
for (int i = 0; i < times.length; i++) {
timeLabels[i] = sdf.format(times[i]);
}
}
public void normalize() throws Exception {
double max = Operations.getMax(values);
for (int i = 0; i < values.length; i++) {
values[i] = values[i] / max;
}
}
public String getTimepattern() {
return timepattern;
}
public void setTimepattern(String timepattern) {
this.timepattern = timepattern;
}
}

View File

@ -1,259 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals.ssa;
import java.util.ArrayList;
import java.util.List;
import org.jfree.chart.ChartPanel;
import Jama.Matrix;
public class SSADataset {
private List<Double> timeSeries; //the original time series
private int L; //length of window
private double inclosureMatrix [][]; //matrix attachment
private Matrix X []; //Basic Matrix singular decomposition
private Matrix groupX []; //the resulting matrix for each of the groups
private Matrix V []; //the main components of singular decomposition
private List<Double> reconstructionList;
private List<Double> forecastList;
private double[] reconstructedSignal;
private double[] forecastSignal;
private List <Double> SMA; //moving averages
private List <Double> cov; //averaging the diagonal covariance
private List <Double> eigenValueList;//eigenvalues
private List <Double> lgEigenValue; //log of the eigenvalues
private List <Double> sqrtEigenValue;//roots of eigenvalues
private List eigenVectors; //eigenvectors
private List <Double> percentList; //capital/interest/numbers
private List<Double> accruePercentList; //accrued interest eigenvalues
private double percThreshold=1;
/*
* for a cascading display InternalFrame
*/
private int nextFrameX;
private int nextFrameY;
private int frameDistance;
private int eigenFuncPage;
private int mainCompPage;
private List<ChartPanel> eigenVecListCharts;
private List<ChartPanel> mainCompListCharts;
public SSADataset() {
timeSeries = new ArrayList<Double>();
L = 2;
}
public List getEigenVectors() {
return eigenVectors;
}
public void setEigenVectors(List eigenVectors) {
this.eigenVectors = eigenVectors;
}
public Matrix[] getV() {
return V;
}
public void setV(Matrix[] V) {
this.V = V;
}
public List<Double> getTimeSeries() {
return timeSeries;
}
public void setTimeSeries(List<Double> timeSeries) {
this.timeSeries = timeSeries;
}
public int getL() {
return L;
}
public void setL(int L) {
this.L = L;
}
public double[][] getInclosureMatrix() {
return inclosureMatrix;
}
public void setInclosureMatrix(double matrix[][]) {
inclosureMatrix = matrix;
}
public Matrix[] getX() {
return X;
}
public void setX(Matrix X[]) {
this.X = X;
}
public List<Double> getReconstructionList() {
return reconstructionList;
}
public void setReconstructionList(List<Double> reconstructionList) {
this.reconstructionList = reconstructionList;
}
public List<Double> getSMA() {
return SMA;
}
public void setSMA(List<Double> SMA) {
this.SMA = SMA;
}
public List<Double> getCov() {
return cov;
}
public void setCov(List<Double> cov) {
this.cov = cov;
}
public void setLgEigenValue(List<Double> lgEigenValue) {
this.lgEigenValue = lgEigenValue;
}
public List<Double> getLgEigenValue() {
return lgEigenValue;
}
public void setSqrtEigenValue(List<Double> sqrtEigenValue) {
this.sqrtEigenValue = sqrtEigenValue;
}
public List<Double> getSqrtEigenValue() {
return sqrtEigenValue;
}
public List<Double> getEigenValueList() {
return eigenValueList;
}
public void setEigenValueList(List<Double> eigenValueList) {
this.eigenValueList = eigenValueList;
}
public List<Double> getAccruePercentList() {
return accruePercentList;
}
public void setAccruePercentList(List<Double> accruePercentList) {
this.accruePercentList = accruePercentList;
}
public List<Double> getPercentList() {
return percentList;
}
public void setPercentList(List<Double> percentList) {
this.percentList = percentList;
}
public void setFrameDistance(int frameDistance) {
this.frameDistance = frameDistance;
}
public void setNextFrameX(int nextFrameX) {
this.nextFrameX = nextFrameX;
}
public void setNextFrameY(int nextFrameY) {
this.nextFrameY = nextFrameY;
}
public int getFrameDistance() {
return frameDistance;
}
public int getNextFrameX() {
return nextFrameX;
}
public int getNextFrameY() {
return nextFrameY;
}
public int getEigenFuncPage() {
return eigenFuncPage;
}
public void setEigenFuncPage(int eigenFuncPage) {
this.eigenFuncPage = eigenFuncPage;
}
public List<ChartPanel> getEigenVecListCharts() {
return eigenVecListCharts;
}
public void setEigenVecListCharts(List<ChartPanel> eigenVecListCharts) {
this.eigenVecListCharts = eigenVecListCharts;
}
public List<ChartPanel> getMainCompListCharts() {
return mainCompListCharts;
}
public void setMainCompListCharts(List<ChartPanel> mainCompListCharts) {
this.mainCompListCharts = mainCompListCharts;
}
public int getMainCompPage() {
return mainCompPage;
}
public void setMainCompPage(int mainCompPage) {
this.mainCompPage = mainCompPage;
}
public Matrix[] getGroupX() {
return groupX;
}
public void setGroupX(Matrix[] groupX) {
this.groupX = groupX;
}
public double getPercThreshold() {
return percThreshold;
}
public void setPercThreshold(double percThreshold) {
this.percThreshold = percThreshold;
}
public List<Double> getForecastList() {
return forecastList;
}
public void setForecastList(List<Double> forecastList) {
this.forecastList = forecastList;
}
public double[] getReconstructedSignal() {
return reconstructedSignal;
}
public void setReconstructedSignal(double[] reconstructedSignal) {
this.reconstructedSignal = reconstructedSignal;
}
public double[] getForecastSignal() {
return forecastSignal;
}
public void setForecastSignal(double[] forecastSignal) {
this.forecastSignal = forecastSignal;
}
}

View File

@ -1,30 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals.ssa;
import java.util.List;
public class SSAGroupList {
private List groups;
public SSAGroupList(List groups) {
this.groups = groups;
}
public String toString() {
String value = "";
for (int i = 0; i < groups.size(); i++) {
if(i != groups.size() - 1) {
value += groups.get(i).toString() + ",";
} else {
value += groups.get(i).toString();
}
}
return value;
}
public List getGroups() {
return groups;
}
}

View File

@ -1,27 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals.ssa;
import java.math.BigDecimal;
import java.math.RoundingMode;
public class SSAUnselectList{
public int getIndex() {
return index;
}
private int index;
private double percent;
public SSAUnselectList(int index, double percent) {
this.index = index;
this.percent = percent;
}
public String toString() {
String value = "";
BigDecimal per = new BigDecimal(percent);
double num = per.setScale(4, RoundingMode.HALF_EVEN).doubleValue();
value = value + (index + 1) + "(" + num + "%)";
return value;
}
}

View File

@ -1,70 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals.ssa;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class SSAWorkflow {
private static Logger logger = LoggerFactory.getLogger(SSAWorkflow.class);
public static SSADataset applyCompleteWorkflow(List<Double> timeseries, int analysisWindowLength, float eigenValuesPercentageThreshold, int nPointsToForecast, boolean reportReconstructedSignal){
SSADataset data = new SSADataset();
data.setTimeSeries(timeseries);
data.setL(analysisWindowLength);
data.setPercThreshold(eigenValuesPercentageThreshold);
// step 1: Embedding of time series in a LxK matrix
// L = the length of the window
// K = timeseries.size() - L + 1 the number of vectors of attachments
SingularSpectrumAnalysis.inclosure(data);
// apply SVD and get a number of eigenvectors equal to the rank of the
// embedding matrix
System.gc();
SingularSpectrumAnalysis.singularDecomposition(data);
// calculate averages for each frame of the time series
System.gc();
SingularSpectrumAnalysis.setMovingAverage(data);
// Diagonal averaging of the covariance matrix
System.gc();
SingularSpectrumAnalysis.averagedCovariance(data);
// store the logs and the sqrts of the eigenvalues
System.gc();
SingularSpectrumAnalysis.functionEigenValue(data);
//build groups of indices
List<SSAGroupList> groupsModel = new ArrayList<SSAGroupList>();
List<SSAUnselectList> groups = new ArrayList<SSAUnselectList>();
logger.debug("Listing All the Eigenvalues");
for (int i = 0; i < data.getPercentList().size(); i++) {
double currentperc = data.getPercentList().get(i);
logger.debug("Eigenvalue: Number: "+i+" Percentage: "+currentperc);
if (currentperc>eigenValuesPercentageThreshold)
groups.add(new SSAUnselectList(i, currentperc));
}
groupsModel.add(new SSAGroupList(groups));
//build a matrix which is the sum of the groups matrices
SingularSpectrumAnalysis.grouping(groupsModel, data);
// restoration of the time series (the diagonal averaging)
SingularSpectrumAnalysis.diagonalAveraging(data);
double[] signal = new double[data.getTimeSeries().size()];
for(int i = 0; i < data.getTimeSeries().size(); i++) signal[i] = data.getTimeSeries().get(i);
SingularSpectrumAnalysis.forecast(data,nPointsToForecast,reportReconstructedSignal);
double[] rsignal = new double[data.getForecastList().size()];
for(int i = 0; i < data.getForecastList().size(); i++) rsignal[i] = data.getForecastList().get(i);
data.setReconstructedSignal(rsignal);
data.setForecastSignal(rsignal);
// SignalProcessing.displaySignalWithGenericTime(signal, 0, 1, "signal");
// SignalProcessing.displaySignalWithGenericTime(rsignal, 0, 1, "reconstructed signal");
logger.debug("SSA workflow DONE");
return data;
}
}

View File

@ -1,418 +0,0 @@
package org.gcube.dataanalysis.ecoengine.signals.ssa;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import Jama.EigenvalueDecomposition;
import Jama.Matrix;
public class SingularSpectrumAnalysis {
private static Logger logger = LoggerFactory.getLogger(SingularSpectrumAnalysis.class);
/**
* translation of the original time series into a sequence of multidimensional
* vectors
*
* @param data data for analysis
*/
public static void inclosure(SSADataset data) {
int L = data.getL(); //the length of the window
int K = data.getTimeSeries().size() - L + 1; //the number of vectors of attachments
double inclosureMatrix[][] = new double[L][K]; //Matrix Orbital
//form attachment vectors
for (int i = 1; i <= K; i++) {
int num = 0;
for (int j = i - 1; j <= i + L - 2; j++) {
inclosureMatrix[num][i - 1] = data.getTimeSeries().get(j);
num++;
}
}
data.setInclosureMatrix(inclosureMatrix);
}
/**
* singular value decomposition
*
* @param data data for analysis
*/
public static void singularDecomposition(SSADataset data) {
double inclosureMatrix[][] = data.getInclosureMatrix();
double transp[][] = transpositionMatrix(inclosureMatrix);
Matrix S = new Matrix(inclosureMatrix).times(new Matrix(transp));
//int d = new Matrix(inclosureMatrix).rank(); //rank of matrix attachment
EigenvalueDecomposition decomposition = new EigenvalueDecomposition(S);
Matrix eigenvalue = decomposition.getD(); //matrix with eigenvalues
Matrix eigenvec = decomposition.getV(); //the matrix of eigenvectors
List<Double> eigenvalueList = new ArrayList<Double>();
//form the set of eigenvalues on the diagonal
for (int i = 0; i < eigenvalue.getRowDimension(); i++) {
for (int j = 0; j < eigenvalue.getRowDimension(); j++) {
if (i == j) {
eigenvalueList.add(eigenvalue.get(i, j));
}
}
}
Comparator comparator = Collections.reverseOrder();
/*
* own values must be in descending order, so
* We sort them in reverse order (initially ascending values
* order)
*/
Collections.sort(eigenvalueList, comparator);
data.setEigenValueList(eigenvalueList);
double sumValueList = 0;
List<Double> percentList;
List<Double> accruePercentList;
for (int i = 0; i < data.getEigenValueList().size(); i++) {
sumValueList = sumValueList + data.getEigenValueList().get(i);
}
//a percent of eigenvalues and accrued interest
percentList = new ArrayList<Double>();
accruePercentList = new ArrayList<Double>();
double accruePercent = 0;
for (int i = 0; i < data.getEigenValueList().size(); i++) {
percentList.add(data.getEigenValueList().get(i) / sumValueList * 100);
accruePercent += percentList.get(i);
accruePercentList.add(accruePercent);
}
data.setAccruePercentList(accruePercentList);
data.setPercentList(percentList);
int size = eigenvec.getColumnDimension();
Matrix V[] = new Matrix[size];
Matrix U[] = new Matrix[size];
Matrix X[] = new Matrix[size]; //Elementary matrix singular value decomposition
ArrayList listSeries = new ArrayList();
for (int j = 0; j < eigenvec.getColumnDimension(); j++) {
double uVec[][] = new double[size][1];
ArrayList series = new ArrayList();
for (int k = 0; k < eigenvec.getRowDimension(); k++) {
/*
* vectors must comply with its own number (!), so
* start with the last native vector
*/
uVec[k][0] = eigenvec.get(k, eigenvec.getColumnDimension() - j - 1);
series.add(uVec[k][0]);
}
listSeries.add(series);
U[j] = new Matrix(uVec);
V[j] = new Matrix(transp).times(U[j]);
}
data.setEigenVectors(listSeries);
for (int i = 0; i < V.length; i++) {
for (int j = 0; j < V[i].getRowDimension(); j++) {
for (int k = 0; k < V[i].getColumnDimension(); k++) {
double val = V[i].get(j, k) / Math.sqrt(eigenvalueList.get(i));
V[i].set(j, k, val);
}
}
}
data.setV(V);
for (int i = 0; i < X.length; i++) {
X[i] = U[i].times(V[i].transpose());
for (int j = 0; j < X[i].getRowDimension(); j++) {
for (int k = 0; k < X[i].getColumnDimension(); k++) {
double val = X[i].get(j, k) * Math.sqrt(eigenvalueList.get(i));
X[i].set(j, k, val);
}
}
}
data.setX(X);
}
/**
* restoration of the time series (group stage)
*
* a JList model @param (group list)
* @param data data for analysis
*/
public static void grouping(List<SSAGroupList> model, SSADataset data) {
Matrix grouX[] = new Matrix[model.size()];
for (int i = 0; i < model.size(); i++) {
SSAGroupList obj = (SSAGroupList) model.get(i);
for (int j = 0; j < obj.getGroups().size(); j++) {
SSAUnselectList unselect = (SSAUnselectList) obj.getGroups().get(j);
if (j == 0) {
grouX[i] = data.getX()[unselect.getIndex()];
} else {
grouX[i] = grouX[i].plus(data.getX()[unselect.getIndex()]);
}
}
}
data.setGroupX(grouX);
}
/**
* restoration of the time series (the stage diagonal averaging)
*
* @param data for analysis
*/
public static void diagonalAveraging(SSADataset data) {
int L;
int K;
int N;
List<List> list = new ArrayList<List>();
for (int i = 0; i < data.getGroupX().length; i++) {
if (data.getGroupX()[i].getRowDimension() < data.getGroupX()[i].getColumnDimension()) {
L = data.getGroupX()[i].getRowDimension();
K = data.getGroupX()[i].getColumnDimension();
} else {
K = data.getGroupX()[i].getRowDimension();
L = data.getGroupX()[i].getColumnDimension();
}
N = data.getGroupX()[i].getRowDimension() + data.getGroupX()[i].getColumnDimension() - 1;
List series = new ArrayList();
double element;
for (int k = 0; k <= N - 1; k++) {
element = 0;
if (k >= 0 && k < L - 1) {
for (int m = 0; m <= k; m++) {
if (data.getGroupX()[i].getRowDimension() <= data.getGroupX()[i].getColumnDimension()) {
element += data.getGroupX()[i].get(m, k - m);
} else if (data.getGroupX()[i].getRowDimension() > data.getGroupX()[i].getColumnDimension()) {
element += data.getGroupX()[i].get(k - m, m);
}
}
element = element * (1.0 / (k + 1));
series.add(element);
}
if (k >= L - 1 && k < K - 1) {
for (int m = 0; m <= L - 2; m++) {
if (data.getGroupX()[i].getRowDimension() <= data.getGroupX()[i].getColumnDimension()) {
element += data.getGroupX()[i].get(m, k - m);
} else if (data.getGroupX()[i].getRowDimension() > data.getGroupX()[i].getColumnDimension()) {
element += data.getGroupX()[i].get(k - m, m);
}
}
element = element * (1.0 / L);
series.add(element);
}
if (k >= K - 1 && k < N) {
for (int m = k - K + 1; m <= N - K; m++) {
if (data.getGroupX()[i].getRowDimension() <= data.getGroupX()[i].getColumnDimension()) {
element += data.getGroupX()[i].get(m, k - m);
} else if (data.getGroupX()[i].getRowDimension() > data.getGroupX()[i].getColumnDimension()) {
element += data.getGroupX()[i].get(k - m, m);
}
}
element = element * (1.0 / (N - k));
series.add(element);
}
}
list.add(series);
}
double sum;
//We summarize the series and get the original number
List<Double> reconstructionList = new ArrayList<Double>();
for (int j = 0; j < list.get(0).size(); j++) {
sum = 0;
for (int i = 0; i < list.size(); i++) {
sum += (Double) list.get(i).get(j);
}
reconstructionList.add(sum);
}
//added by Gianpaolo Coro
/*
double reconstructionratio = 1;
double ratiosum = 0;
int tssize = data.getTimeSeries().size();
for (int j = 0; j < tssize ; j++) {
double ratio = data.getTimeSeries().get(j)/reconstructionList.get(j);
ratiosum=ratiosum+ratio;
}
reconstructionratio = ratiosum/tssize;
System.out.println("Reconstruction ratio: "+reconstructionratio);
for (int j = 0; j < tssize ; j++) {
reconstructionList.set(j,reconstructionratio*reconstructionList.get(j));
}
*/
data.setReconstructionList(reconstructionList);
}
/**
* the transpose of a matrix
*
* the original matrix matrix @param
* @return the resulting matrix
*/
private static double[][] transpositionMatrix(double matrix[][]) {
logger.debug("SSA->Building a matrix with dimensions: "+matrix[0].length+" X "+matrix.length);
double transpMatrix[][] = new double[matrix[0].length][matrix.length];
for (int i = 0; i < matrix.length; i++) {
for (int j = 0; j < matrix[i].length; j++) {
transpMatrix[j][i] = matrix[i][j];
}
}
return transpMatrix;
}
/**
* formation of moving averages
*
* @param data data for analysis
*/
public static void setMovingAverage(SSADataset data) {
List<Double> SMA = new ArrayList<Double>();
int m = data.getTimeSeries().size() - data.getL() + 1; //период осреднения
for (int i = 0; i < data.getL(); i++) {
double sum = 0;
double avg = 0;
for (int j = i; j < m + i; j++) {
sum += data.getTimeSeries().get(j);
}
avg = sum / m;
SMA.add(avg);
data.setSMA(SMA);
}
}
/**
* the diagonal of the covariance matrix averaging * (on the side diagonal)
* @param data data for analysis
*/
public static void averagedCovariance(SSADataset data) {
double avg;
double K = data.getTimeSeries().size() - data.getL() + 1; //the number of vectors of attachments
List<Double> covarianceList = new ArrayList<Double>();
double transp[][] = transpositionMatrix(data.getInclosureMatrix());
Matrix S = new Matrix(data.getInclosureMatrix()).times(new Matrix(transp));
S = S.times(1.0 / K); //covariance matrix
int size = S.getColumnDimension();
int N = size + size - 1;
int n;
for (int k = 0; k < N; k++) {
if ((k % 2) == 0) {
if (k >= 0 && k < size) {
avg = 0;
n = 0;
for (int m = 0; m <= k; m++) {
avg += S.get(m, size - 1 - (k - m));
n++;
}
avg = avg / (n);
covarianceList.add(avg);
}
if (k >= size && k < N) {
avg = 0;
n = 0;
for (int m = k - size + 1; m <= N - size; m++) {
avg += S.get(m, size - 1 - (k - m));
n++;
}
avg = avg / (n);
covarianceList.add(avg);
}
}
}
data.setCov(covarianceList);
}
/**
*formation of the functions eigenvalues
* @param data data for analysis
*/
public static void functionEigenValue(SSADataset data) {
List<Double> lgList = new ArrayList<Double>();
List<Double> sqrtList = new ArrayList<Double>();
for (int i = 0; i < data.getEigenValueList().size(); i++) {
lgList.add((Double) Math.log(data.getEigenValueList().get(i)));
sqrtList.add(Math.sqrt(data.getEigenValueList().get(i)));
}
data.setLgEigenValue(lgList);
data.setSqrtEigenValue(sqrtList);
}
/**
* author Gianpaolo Coro
* @param data
*/
public static void forecast(SSADataset data, int nPointsToForecast, boolean reconstructedSignal){
if (nPointsToForecast==0){
data.setForecastList(data.getReconstructionList());
return;
}
// List eigenvectors = data.getEigenVectors().subList(0, 11);
int nTotalEigenV = data.getPercentList().size();
int bestEigenVectors = nTotalEigenV;
//find the best number of eigenvectors to use for the forecast
for (int i=0;i<nTotalEigenV;i++){
double currentperc = data.getPercentList().get(i);
if (currentperc<data.getPercThreshold()){
bestEigenVectors=i+1;
break;
}
}
List eigenvectors = data.getEigenVectors().subList(0, bestEigenVectors);
int L = data.getL();
int lastcoordinate = L-1;
logger.debug("SSA: value for L: "+L);
int nEigenVectors = eigenvectors.size();
logger.debug("Number of Selected Eigenvectors For Reconstruction: "+nEigenVectors);
double[] p = new double[nEigenVectors];
for (int i = 0;i<nEigenVectors;i++){
p[i] = (Double)((List)eigenvectors.get(i)).get(lastcoordinate);
}
double[][] P = new double[nEigenVectors][L-1];
for (int i = 0;i<nEigenVectors;i++){
List<Double> evec = (List)eigenvectors.get(i);
for (int j =0;j<(L-1);j++)
P[i][j] = evec.get(j);
}
double ni_sqr = 0d;
for (int i = 0;i<nEigenVectors;i++){
ni_sqr = ni_sqr+(p[i]*p[i]);
}
double [] R = new double[L-1];
for (int j=0;j<L-1;j++){
double rj = 0d;
for (int i=0;i<nEigenVectors;i++){
rj = rj+(p[i]*P[i][j]);
}
// R[i] = (1d/(1d-ni_sqr))*ri;
R[j] = rj/(1-ni_sqr);
}
int M = nPointsToForecast;
List<Double> y = new ArrayList<Double>();
int signalSize = data.getTimeSeries().size();
for (int j =0 ;j<(signalSize+M);j++){
if (j<signalSize){
if (reconstructedSignal)
y.add(j,data.getReconstructionList().get(j));
else
y.add(j,data.getTimeSeries().get(j));
}
else
{
double sumprec = 0;
for (int g=0;g<L-1;g++){
double ag = R[L-2-g];
double yj_g = y.get(j-g-1);
sumprec=sumprec+ag*yj_g;
}
y.add(j, sumprec);
// System.out.println("Forecast: "+y.get(j));
}
}
logger.debug("Length of the original signal: "+signalSize+" Length of the reconstructed signal: "+y.size());
data.setForecastList(y);
}
}

View File

@ -1,342 +0,0 @@
package org.gcube.dataanalysis.ecoengine.transducers;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.graphtools.utils.DateGuesser;
import org.gcube.contentmanagement.graphtools.utils.MathFunctions;
import org.gcube.dataanalysis.ecoengine.datatypes.ColumnType;
import org.gcube.dataanalysis.ecoengine.datatypes.DatabaseType;
import org.gcube.dataanalysis.ecoengine.datatypes.InputTable;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.TableTemplates;
import org.gcube.dataanalysis.ecoengine.interfaces.DataAnalysis;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.SignalProcessing;
import org.gcube.dataanalysis.ecoengine.signals.TimeSeries;
import org.gcube.dataanalysis.ecoengine.signals.ssa.SSADataset;
import org.gcube.dataanalysis.ecoengine.signals.ssa.SSAWorkflow;
import org.gcube.dataanalysis.ecoengine.utils.AggregationFunctions;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseFactory;
import org.gcube.dataanalysis.ecoengine.utils.DatabaseUtils;
import org.gcube.dataanalysis.ecoengine.utils.Operations;
import org.gcube.dataanalysis.ecoengine.utils.Tuple;
import org.hibernate.SessionFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class TimeSeriesAnalysis extends StandardLocalExternalAlgorithm {
private static String timeSeriesTable = "TimeSeriesTable";
private static String valuesColumn = "ValueColum";
private static String timeColumn = "TimeColum";
private static String fftwindowsamples = "FFT_Window_Samples";
private static String aggregationFunction = "AggregationFunction";
private static String sensitivityParam = "Sensitivity";
private static String SSAAnalysisWindowSamples = "SSA_Window_in_Samples";
private static String SSAEigenvaluesThreshold = "SSA_EigenvaluesThreshold";
private static String SSAPointsToForecast = "SSA_Points_to_Forecast";
private Image signalImg = null;
private Image uniformSignalImg = null;
private Image uniformSignalSamplesImg = null;
private Image spectrogramImg = null;
private Image forecastsignalImg = null;
private Image eigenValuesImg = null;
private File outputfilename = null;
public static boolean display = false;
private static int maxpoints = 10000;
public enum Sensitivity {
LOW, NORMAL, HIGH
}
private static Logger logger = LoggerFactory.getLogger(DataAnalysis.class);
@Override
public void init() throws Exception {
}
@Override
public String getDescription() {
return "An algorithms applying signal processing to a non uniform time series. A maximum of " + maxpoints + " distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram.";
}
@Override
protected void process() throws Exception {
SessionFactory dbconnection = null;
status = 0;
try {
dbconnection = DatabaseUtils.initDBSession(config);
String tablename = config.getParam(timeSeriesTable);
String valuescolum = config.getParam(valuesColumn);
String timecolumn = config.getParam(timeColumn);
String aggregationFunc = config.getParam(aggregationFunction);
String fftwindowsamplesS = config.getParam(fftwindowsamples);
int windowLength = Integer.parseInt(config.getParam(SSAAnalysisWindowSamples));
float eigenvaluespercthr = Float.parseFloat(config.getParam(SSAEigenvaluesThreshold));
int pointsToReconstruct = Integer.parseInt(config.getParam(SSAPointsToForecast));
Sensitivity sensitivityP = Sensitivity.LOW;
try{sensitivityP = Sensitivity.valueOf(config.getParam(sensitivityParam));}catch(Exception e){}
float sensitivity = 9;
switch (sensitivityP) {
case LOW:
sensitivity = 9;
break;
case NORMAL:
sensitivity = 5;
break;
case HIGH:
sensitivity = 1;
break;
}
int fftWindowSamplesDouble = 1;
if (timecolumn == null)
timecolumn = "time";
if (aggregationFunc == null)
aggregationFunc = "SUM";
if (fftwindowsamplesS != null) {
try {
fftWindowSamplesDouble = Integer.parseInt(fftwindowsamplesS);
} catch (Exception e) {
}
}
logger.debug("TimeSeriesAnalysis->Table Name: " + tablename);
logger.debug("TimeSeriesAnalysis->Time Column: " + timecolumn);
logger.debug("TimeSeriesAnalysis->Values Column: " + valuescolum);
logger.debug("TimeSeriesAnalysis->Aggregation: " + aggregationFunc);
logger.debug("TimeSeriesAnalysis->FFT Window Samples: " + fftWindowSamplesDouble);
logger.debug("TimeSeriesAnalysis->SSA Window Samples: " + windowLength);
logger.debug("TimeSeriesAnalysis->SSA Eigenvalues threshold: " + eigenvaluespercthr);
logger.debug("TimeSeriesAnalysis->SSA Points to Reconstruct: " + pointsToReconstruct);
logger.debug("TimeSeriesAnalysis->Extracting Points...");
String query = "select * from (select " + aggregationFunc + "( CAST ( " + valuescolum + " as real))," + timecolumn + " from " + tablename + " group by " + timecolumn + ") as a";
logger.debug("TimeSeriesAnalysis->Query to execute: " + query);
List<Object> results = DatabaseFactory.executeSQLQuery(query, dbconnection);
status = 10;
if (results == null || results.size() == 0)
throw new Exception("Error in retrieving values from the table: no time series found");
else if (results.size() > maxpoints)
throw new Exception("Too long Time Series: a maximum of distinct " + maxpoints + " in time is allowed");
logger.debug("TimeSeriesAnalysis->Points Extracted!");
// build signal
logger.debug("TimeSeriesAnalysis->Building signal");
List<Tuple<String>> signal = new ArrayList<Tuple<String>>();
int sizesignal = 0;
for (Object row : results) {
Object[] srow = (Object[]) row;
String value = "" + srow[0];
String time = "" + srow[1];
signal.add(new Tuple<String>(time, value));
sizesignal++;
}
status = 20;
logger.debug("TimeSeriesAnalysis->Signal built with success. Size: " + sizesignal);
logger.debug("TimeSeriesAnalysis->Building Time Series");
TimeSeries ts = TimeSeries.buildFromSignal(signal, config);
String timepattern = ts.getTimepattern();
String chartpattern = "MM-dd-yy";
if (timepattern.equals("s") || (DateGuesser.isJavaDateOrigin(ts.getTime()[0]) && DateGuesser.isJavaDateOrigin(ts.getTime()[ts.getTime().length - 1]))) {
logger.debug("TimeSeriesAnalysis->Changing chart pattern to Seconds!");
chartpattern = "HH:mm:ss:SS";
} else
logger.debug("TimeSeriesAnalysis->Chart pattern remains " + chartpattern);
logger.debug("TimeSeriesAnalysis->Uniformly sampling the signal");
if (display)
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Time Series", chartpattern);
signalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Original Time Series", chartpattern);
int originalSignalLength = ts.getValues().length;
ts.convertToUniformSignal(0);
logger.debug("TimeSeriesAnalysis->Uniform sampling finished");
status = 30;
// spectrum and signal processing
logger.debug("TimeSeriesAnalysis->Detecting periodicities");
PeriodicityDetector pd = new PeriodicityDetector();
LinkedHashMap<String, String> frequencies = pd.detectAllFrequencies(ts.getValues(), 1, 0.01f, 0.5f, -1, fftWindowSamplesDouble, sensitivity, display);
outputParameters.put("Original Time Series Length", "" + originalSignalLength);
outputParameters.put("Uniformly Samples Time Series Length", "" + ts.getValues().length);
outputParameters.put("Spectral Analysis Window Length", "" + pd.currentWindowAnalysisSamples);
outputParameters.put("Spectral Analysis Window Shift", "" + pd.currentWindowShiftSamples);
outputParameters.put("Spectral Analysis Sampling Rate", "" + MathFunctions.roundDecimal(pd.currentSamplingRate, 2));
outputParameters.put("Spectrogram Sections", "" + pd.currentspectrum.length);
outputParameters.put("Range of frequencies (in samples^-1) represented in the Spectrogram:", "[" + MathFunctions.roundDecimal(pd.minFrequency, 2) + " ; " + MathFunctions.roundDecimal(pd.maxFrequency, 2) + "]");
outputParameters.put("Unit of Measure of Frequency", "samples^-1");
outputParameters.put("Unit of Measure of Time", "samples");
for (String freqPar : frequencies.keySet()) {
outputParameters.put(freqPar, frequencies.get(freqPar));
}
/*
* outputParameters.put("Detected Frequency (samples^-1)", ""+MathFunctions.roundDecimal(F,2)); outputParameters.put("Indecision on Frequency", "["+MathFunctions.roundDecimal(pd.lowermeanF,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanF,2) + "]"); outputParameters.put("Average detected Period (samples)", ""+MathFunctions.roundDecimal(pd.meanPeriod,2)); outputParameters.put("Indecision on Average Period", "["+MathFunctions.roundDecimal(pd.lowermeanPeriod,2)+" , "+MathFunctions.roundDecimal(pd.uppermeanPeriod,2) + "]"); outputParameters.put("Samples range in which periodicity was detected", "from "+pd.startPeriodSampleIndex+" to "+pd.endPeriodSampleIndex); outputParameters.put("Period Strength with interpretation", ""+MathFunctions.roundDecimal(pd.periodicityStrength,2)+" ("+pd.getPeriodicityStregthInterpretation()+")");
*/
logger.debug("TimeSeriesAnalysis->Periodicity Detected!");
status = 60;
System.gc();
logger.debug("TimeSeriesAnalysis->Executing SSA analysis");
List<Double> values = new ArrayList<Double>();
for (double v : ts.getValues()) {
values.add(v);
}
Date[] newtimes = ts.extendTime(pointsToReconstruct);
SSADataset ssa = null;
if (windowLength < ts.getValues().length)
ssa = SSAWorkflow.applyCompleteWorkflow(values, windowLength, eigenvaluespercthr, pointsToReconstruct, false);
else {
logger.debug("TimeSeriesAnalysis->SSA analysis impossible to complete");
outputParameters.put("SSA Note:", "The window length is higher than the signal length. Please reduce the value to less than the signal length.");
return;
}
logger.debug("TimeSeriesAnalysis->SSA analysis completed");
status = 70;
logger.debug("TimeSeriesAnalysis->Rendering Images");
uniformSignalImg = SignalProcessing.renderSignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern);
if (uniformSignalImg == null)
outputParameters.put("Note:", "The charts for uniformly sampled and forecasted signals contain too many points and will not be displayed. The values will be only reported in the output file.");
else
outputParameters.put("Note:", "Details about the values are reported in the output file.");
uniformSignalSamplesImg = SignalProcessing.renderSignalWithGenericTime(ts.getValues(), 0, 1, "Uniformly Sampled Time Series in Samples");
spectrogramImg = SignalProcessing.renderSignalSpectrogram2(pd.currentspectrum);
int timeseriesV = ts.getValues().length;
double[] forecastedpiece = Arrays.copyOfRange(ssa.getForecastSignal(), timeseriesV, timeseriesV + pointsToReconstruct);
List<String> tsnames = new ArrayList<String>();
tsnames.add("Original Time Series");
tsnames.add("Forecasted Time Series");
List<double[]> signals = new ArrayList<double[]>();
signals.add(ts.getValues());
signals.add(forecastedpiece);
forecastsignalImg = SignalProcessing.renderSignalsWithTime(signals, newtimes, tsnames, chartpattern);
if (display) {
SignalProcessing.displaySignalsWithTime(signals, newtimes, tsnames, chartpattern);
}
double[] eigenValues = new double[ssa.getPercentList().size()];
for (int i = 0; i < eigenValues.length; i++) {
eigenValues[i] = ssa.getPercentList().get(i);
}
eigenValuesImg = SignalProcessing.renderSignalWithGenericTime(eigenValues, 0f, 1, "SSA Eigenvalues");
logger.debug("TimeSeriesAnalysis->Images Rendered");
System.gc();
logger.debug("TimeSeriesAnalysis->Producing Files");
outputfilename = new File(config.getPersistencePath(), valuescolum + "_SignalProcessing.csv");
BufferedWriter bw = new BufferedWriter(new FileWriter(outputfilename));
bw.write("Uniformly Sampled Time Series,Time Line,Forecasted Time Series,SSA Eigenvalues\n");
int[] lengthsVector = { ts.getValues().length, newtimes.length, ssa.getForecastSignal().length, eigenValues.length };
int maxLen = Operations.getMax(lengthsVector);
for (int i = 0; i < maxLen; i++) {
if (i < ts.getValues().length)
bw.write("" + ts.getValues()[i] + ",");
else
bw.write(",");
if (i < newtimes.length)
bw.write("" + newtimes[i] + ",");
else
bw.write(",");
if (i < ssa.getForecastSignal().length)
bw.write("" + ssa.getForecastSignal()[i] + ",");
else
bw.write(",");
if (i < eigenValues.length)
bw.write("" + eigenValues[i] + ",");
else
bw.write(",");
bw.write("\n");
}
bw.close();
logger.debug("TimeSeriesAnalysis->Files Produced");
if (display) {
SignalProcessing.displaySignalWithTime(ts.getValues(), ts.getTime(), "Uniformly Sampled Time Series", chartpattern);
SignalProcessing.displaySignalWithGenericTime(ts.getValues(), 0, 1, "Uniformly Sampled Time Series in Samples");
SignalProcessing.displaySignalWithTime(ssa.getForecastSignal(), newtimes, "Forecasted Time Series", chartpattern);
SignalProcessing.displaySignalWithGenericTime(eigenValues, 0f, 1, "SSA Eigenvalues");
}
logger.debug("TimeSeriesAnalysis->" + outputParameters);
logger.debug("TimeSeriesAnalysis->Computation has finished");
} catch (Throwable e) {
e.printStackTrace();
throw new Exception(e.getLocalizedMessage());
} finally {
if (dbconnection != null)
dbconnection.close();
}
}
@Override
protected void setInputParameters() {
// the time series table
List<TableTemplates> templates = new ArrayList<TableTemplates>();
templates.add(TableTemplates.TIMESERIES);
InputTable p = new InputTable(templates, timeSeriesTable, "The table containing the time series", "timeseries");
inputs.add(p);
ColumnType p1 = new ColumnType(timeSeriesTable, valuesColumn, "The column containing the values of the time series", "values", false);
inputs.add(p1);
// addDoubleInput(fftwindowsamples, "The number of samples precision in detecting the period. The lower this number the less the number of points in the Spectrogram (higher number of samples used at each step). Reducing this, the spectrogram will be finer and sharper, but you should tune it. Too many samples will make the Spectrogram noisy.", "1");
addIntegerInput(fftwindowsamples, "The number of samples N on which the Fourier Transform (FFT) will be extracted. It should be a power of two and less than the signal length, otherwise it will be automatically recalculated. The FFT will be calculated every N/2 samples, taking N samples each time. The spectrogram will display the FFT on the slices of N samples.", "12");
addEnumerateInput(AggregationFunctions.values(), aggregationFunction, "Function to apply to samples with the same time instant", AggregationFunctions.SUM.name());
addEnumerateInput(Sensitivity.values(), sensitivityParam, "Sensitivity to the frequency components. High sensitivity will report all the frequency components, low sensitivity will report only the most distant ones.", Sensitivity.LOW.name());
addIntegerInput(SSAAnalysisWindowSamples, "The number of samples in the produced uniformly sampled signal, to use in the SSA algorithm. Must be strictly less than the Time Series length. This number should identify a portion of the signal long enough to make the system guess the nature of the trend", "20");
addDoubleInput(SSAEigenvaluesThreshold, "The threshold under which an SSA eigenvalue will be ignored, along with its eigenvector, for the reconstruction of the signal", "0.7");
addIntegerInput(SSAPointsToForecast, "The number of points to forecast over the original length of the time series", "10");
DatabaseType.addDefaultDBPars(inputs);
}
@Override
public StatisticalType getOutput() {
LinkedHashMap<String, StatisticalType> outMap = PrimitiveType.stringMap2StatisticalMap(outputParameters);
LinkedHashMap<String, Image> producedImages = new LinkedHashMap<String, Image>();
if (signalImg != null)
producedImages.put("Original Time Series", signalImg);
if (uniformSignalImg != null)
producedImages.put("Uniformly Sampled Time Series", uniformSignalImg);
if (uniformSignalSamplesImg != null)
producedImages.put("Uniformly Sampled Time Series in Samples", uniformSignalSamplesImg);
if (forecastsignalImg != null)
producedImages.put("Forecasted Time Series", forecastsignalImg);
if (spectrogramImg != null)
producedImages.put("Spectrogram of the Uniformly Sampled Time Series", spectrogramImg);
if (eigenValuesImg != null)
producedImages.put("SSA Eigenvalues", eigenValuesImg);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(), producedImages, PrimitiveTypes.IMAGES, "Time Series Report", "Charts reporting the Time Series Analysis");
outMap.put("Images", images);
if (outputfilename != null) {
PrimitiveType file = new PrimitiveType(File.class.getName(), outputfilename, PrimitiveTypes.FILE, "AnalysisReport", "AnalysisReport");
outMap.put("Analysis Report", file);
}
PrimitiveType p = new PrimitiveType(LinkedHashMap.class.getName(), outMap, PrimitiveTypes.MAP, "Output", "");
return p;
}
@Override
public void shutdown() {
}
}

View File

@ -1,252 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.transducers.charts.QuantitiesAttributesChartsTransducerer;
import org.gcube.dataanalysis.ecoengine.transducers.charts.TimeSeriesChartsTransducerer;
import org.junit.Test;
public class ChartsTests {
@Test
public void testSmallGeneric() throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "hspen_mini");
config.setParam("Attributes", "speciesid#lifestage#faoareas");
config.setParam("Quantities", "depthmax#speccode");
config.setGcubeScope("/gcube/devsec/devVRE");
QuantitiesAttributesChartsTransducerer cscreator = new QuantitiesAttributesChartsTransducerer();
cscreator.displaycharts=true;
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
@Test
public void testLargeGeneric() throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "generic_id6ef3e4fa_6a06_4df1_9445_553f2e918102");
config.setParam("Attributes", "long#lat");
config.setParam("Quantities", "long");
config.setGcubeScope("/gcube/devsec/devVRE");
QuantitiesAttributesChartsTransducerer cscreator = new QuantitiesAttributesChartsTransducerer();
cscreator.displaycharts=true;
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
@Test
public void testStrangeGeneric() throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "generic_id0746f5ab_fb3e_4848_97cd_43f46ae57ac1");
config.setParam("Attributes", "time#quantity");
config.setParam("Quantities", "quantity");
config.setGcubeScope("/gcube/devsec/devVRE");
QuantitiesAttributesChartsTransducerer cscreator = new QuantitiesAttributesChartsTransducerer();
cscreator.displaycharts=true;
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
@Test
public void testLonLatDataset() throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "csq_84e9302c161243a3b29f3eff9c392d3e");
config.setParam("Attributes", "field1");
config.setParam("Quantities", "field2");
config.setGcubeScope("/gcube/devsec/devVRE");
QuantitiesAttributesChartsTransducerer cscreator = new QuantitiesAttributesChartsTransducerer();
cscreator.displaycharts=true;
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
public static void main(String[] args) throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "generic_id0db6e87b_abd6_4dfc_aa05_208eab3df212");
config.setParam("Attributes", "decimallongitude#decimallatitude");
config.setParam("Quantities", "chlorophyll");
config.setGcubeScope("/gcube/devsec/devVRE");
QuantitiesAttributesChartsTransducerer cscreator = new QuantitiesAttributesChartsTransducerer();
cscreator.displaycharts=true;
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
public static void mainTimeSeriesMedium(String[] args) throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "timeseries_id4dd368bf_63fb_4d19_8e31_20ced63a477d");
config.setParam("Attributes", "country#area");
// config.setParam("Attributes", "");
config.setParam("Quantities", "quantity");
config.setParam("Time", "time");
config.setGcubeScope("/gcube/devsec/devVRE");
TimeSeriesChartsTransducerer cscreator = new TimeSeriesChartsTransducerer();
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
public static void main1(String[] args) throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "generic_idc3f49110_995b_45cd_9846_240f25c136be");
// config.setParam("Attributes", "decimallatitude#decimallongitude#basisofrecord");
// config.setParam("Attributes", "");
config.setParam("Quantities", "maxdepth");
config.setParam("Time", "eventdate");
config.setGcubeScope("/gcube/devsec/devVRE");
TimeSeriesChartsTransducerer cscreator = new TimeSeriesChartsTransducerer();
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
@Test
public void testTimeSeriesSuperposed() throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "generic_id3249137c_1559_438c_857b_29942cb13118");
config.setParam("Attributes", "latdecdeg#longdecdeg");
config.setParam("Quantities", "specific_a");
config.setParam("Time", "begperiod");
config.setGcubeScope("/gcube/devsec/devVRE");
TimeSeriesChartsTransducerer cscreator = new TimeSeriesChartsTransducerer();
cscreator.displaycharts=true;
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
@Test
public void testTimeSeriesLongFAO() throws Exception{
AnalysisLogger.setLogger("./cfg/" + AlgorithmConfiguration.defaultLoggerFile);
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("InputTable", "generic_id0746f5ab_fb3e_4848_97cd_43f46ae57ac1");
config.setParam("Attributes", "catch#country#suggested_country");
config.setParam("Quantities", "quantity");
config.setParam("Time", "time");
config.setGcubeScope("/gcube/devsec/devVRE");
TimeSeriesChartsTransducerer cscreator = new TimeSeriesChartsTransducerer();
cscreator.displaycharts=true;
cscreator.setConfiguration(config);
cscreator.compute();
System.out.println("DONE! "+cscreator.getOutput());
}
}

View File

@ -1,55 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.signalprocessing;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
public class IOTCAnalyse {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
System.out.println("*******START TEST******");
List<ComputationalAgent> agent = TransducerersFactory.getTransducerers(configAlgorithm());
agent.get(0).init();
Regressor.process(agent.get(0));
StatisticalType st = agent.get(0).getOutput();
System.out.println("Output:"+st);
agent = null;
System.out.println("*******END TEST******");
}
public static AlgorithmConfiguration configAlgorithm(){
AlgorithmConfiguration config = new AlgorithmConfiguration();
AnalysisLogger.setLogger("./cfg/"+AlgorithmConfiguration.defaultLoggerFile);
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
config.setParam("TimeSeriesTable", "timeseries_id9ac52133_3d3b_418e_8d70_c61844623e81");
config.setParam("ValueColum", "Effort");
config.setParam("FFT_Window_Samples", "128");
config.setParam("SSA_Window_in_Samples", "36");
config.setParam("SSA_EigenvaluesThreshold", "0.07");
config.setParam("SSA_Points_to_Forecast", "12");
config.setParam("AggregationFunction", "SUM");
config.setParam("Sensitivity", "LOW");
config.setGcubeScope("/gcube");
TimeSeriesAnalysis.display=true;
return config;
}
}

View File

@ -1,121 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.signalprocessing;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
public class TestIOTCSignals {
static AlgorithmConfiguration[] configs = {IOTClongitudeConfig()};
public static void main(String[] args) throws Exception {
int wLength = (int) Math.pow(2, 1);
System.out.println("L:"+wLength);
for (int i = 0; i < configs.length; i++) {
TimeSeriesAnalysis.display=true;
System.out.println("*****************TEST "+i+" *****************");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
System.out.println("*****************END TEST*****************");
}
}
public static AlgorithmConfiguration IOTCSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idb73029b9_226e_4d0f_b828_24854d0b7b44 ");
config.setParam("ValueColum", "fishing_hours");
config.setParam("FFT_Window_Samples", "128");
config.setParam("SSA_Window_in_Samples", "80");
config.setParam("SSA_EigenvaluesThreshold", "0.07");
config.setParam("SSA_Points_to_Forecast", "24");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration IOTCLatitudeConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idb73029b9_226e_4d0f_b828_24854d0b7b44 ");
config.setParam("ValueColum", "latitude");
config.setParam("FFT_Window_Samples", "256");
config.setParam("SSA_Window_in_Samples", "200");
config.setParam("SSA_EigenvaluesThreshold", "0.07");
config.setParam("SSA_Points_to_Forecast", "12");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration IOTClongitudeConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idb73029b9_226e_4d0f_b828_24854d0b7b44 ");
config.setParam("ValueColum", "longitude");
config.setParam("FFT_Window_Samples", "256");
config.setParam("SSA_Window_in_Samples", "200");
config.setParam("SSA_EigenvaluesThreshold", "0.07");
config.setParam("SSA_Points_to_Forecast", "12");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
}

View File

@ -1,98 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.signalprocessing;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.signals.PeriodicityDetector;
import org.gcube.dataanalysis.ecoengine.signals.ssa.SSAWorkflow;
import org.gcube.dataanalysis.ecoengine.utils.Transformations;
public class TestSSA {
/**
* @param args
*/
public static void main(String[] args) throws Exception{
int windowLength = 20;
float eigenvaluespercthr = 0.7f;
int pointsToReconstruct = 100;
SSAWorkflow.applyCompleteWorkflow(sawTimeSeries(), windowLength,eigenvaluespercthr,pointsToReconstruct,false);
SSAWorkflow.applyCompleteWorkflow(sinTimeSeries(), windowLength,eigenvaluespercthr,pointsToReconstruct,false);
SSAWorkflow.applyCompleteWorkflow(noisyTimeSeries(), windowLength,eigenvaluespercthr,pointsToReconstruct,false);
SSAWorkflow.applyCompleteWorkflow(largeTimeSeries(), windowLength,eigenvaluespercthr,10,false);
}
public static List<Double> sawTimeSeries() throws Exception{
String file = "timeseries";
BufferedReader br = new BufferedReader(new FileReader(new File(file)));
String line = "";
List<Double> timeseries = new ArrayList<Double>();
while ((line = br.readLine()) != null) {
timeseries.add(Double.parseDouble(line));
}
br.close();
return timeseries;
}
public static List<Double> sinTimeSeries() throws Exception{
double sin [] = new PeriodicityDetector().produceNoisySignal(120, 1, 0.1f, 0);
List<Double> timeseries = new ArrayList<Double>();
for (int i=0;i<sin.length;i++){
timeseries.add(sin[i]);
}
return timeseries;
}
public static List<Double> noisyTimeSeries() throws Exception{
double sin [] = new PeriodicityDetector().produceNoisySignal(120, 1, 0.1f, 1.2f);
List<Double> timeseries = new ArrayList<Double>();
for (int i=0;i<sin.length;i++){
timeseries.add(sin[i]);
}
return timeseries;
}
public static List<Double> largeTimeSeries() throws Exception{
String file = "LargeTS.csv";
BufferedReader br = new BufferedReader(new FileReader(new File(file)));
String line = "";
List<Double> timeseries = new ArrayList<Double>();
LinkedHashMap<String,String> values= new LinkedHashMap<String,String>();
line = br.readLine();
while ((line = br.readLine()) != null) {
List<String> row= Transformations.parseCVSString(line, ",");
if (values.get(row.get(3))==null)
values.put(row.get(3), row.get(5));
else{
double val = Double.parseDouble(values.get(row.get(3)));
val = val+Double.parseDouble(row.get(5));
values.put(row.get(3), ""+val);
}
}
br.close();
for (String val:values.values()){
timeseries.add(Double.parseDouble(val));
}
return timeseries;
}
}

View File

@ -1,421 +0,0 @@
package org.gcube.dataanalysis.ecoengine.test.signalprocessing;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.transducers.TimeSeriesAnalysis;
public class TestSimpleSignal {
// static AlgorithmConfiguration[] configs = {periodicSignalConfig(), russianSignalConfig(),simpleSignalConfig(), occurrencePointsSignalConfig(),hugeSignalConfig()};
//static AlgorithmConfiguration[] configs = {periodicSignalConfig(), russianSignalConfig(),simpleSignalConfig()};
// static AlgorithmConfiguration[] configs = {NAFOSignalConfig()};
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
static AlgorithmConfiguration[] configs = {periodicSignalConfig()};
// static AlgorithmConfiguration[] configs = {simpleSignalConfig()};
// static AlgorithmConfiguration[] configs = {sawSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureBariSignalConfig()};
// static AlgorithmConfiguration[] configs = {russianSignalConfig()};
// static AlgorithmConfiguration[] configs = {largeCustomSignalConfig()};
// static AlgorithmConfiguration[] configs = {occurrencePointsSignalConfig()};
// static AlgorithmConfiguration[] configs = {hugeSignalConfig()};
// static AlgorithmConfiguration[] configs = {IOTCSSignalConfig()};
// static AlgorithmConfiguration[] configs = {temperatureShortSignalConfig()};
public static void main(String[] args) throws Exception {
TimeSeriesAnalysis.display=true;
int wLength = (int) Math.pow(2, 1);
System.out.println("L:"+wLength);
for (int i = 0; i < configs.length; i++) {
System.out.println("*****************TEST "+i+" *****************");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
System.out.println("*****************END TEST*****************");
}
}
public static AlgorithmConfiguration IOTCSSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idb73029b9_226e_4d0f_b828_24854d0b7b44");
config.setParam("ValueColum", "cpue");
config.setParam("FFT_Window_Samples", "200");
config.setParam("AggregationFunction", "SUM");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration simpleSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id4dd368bf_63fb_4d19_8e31_20ced63a477d");
config.setParam("ValueColum", "quantity");
config.setParam("FFT_Window_Samples", "70");
config.setParam("SSA_Window_in_Samples", "30");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration russianSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "generic_ideb9efbe0_61ad_4eea_b0ee_95e64ce11b28");
config.setParam("ValueColum", "quantity");
config.setParam("FFT_Window_Samples", "70");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration occurrencePointsSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
config.setParam("ValueColum", "speed");
config.setParam("TimeColum", "datetime");
config.setParam("AggregationFunction", "AVG");
config.setParam("FFT_Window_Samples", "200");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration periodicSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("FFT_Window_Samples", "200");
config.setParam("TimeSeriesTable", "timeseries_ide814eb07_c13b_41b3_a240_aa99446db831");
config.setParam("Sensitivity", "HIGH");
config.setParam("ValueColum", "quantity");
config.setParam("FrequencyResolution", "0.01");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration hugeSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "generic_id634a660c_4d1a_410c_aa45_eb6e4c5afdf9");
config.setParam("ValueColum", "quantity");
config.setParam("TimeColum", "years");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration NAFOSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id39c6c28f_2484_421c_8ffb_9c2cc2330c62");
config.setParam("ValueColum", "speed");
config.setParam("FFT_Window_Samples", "50");
config.setParam("SSA_Window_in_Samples", "30");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration sawSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_ide814eb07_c13b_41b3_a240_aa99446db831");
config.setParam("ValueColum", "quantity");
config.setParam("FFT_Window_Samples", "50");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration largeCustomSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_idd3dd174e_242c_4f8b_920a_faa79691ca43");
config.setParam("ValueColum", "quantity");
config.setParam("FFT_Window_Samples", "14");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration earthquakesSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id0f44b131_de55_4839_b07f_2721574e2b9d");
config.setParam("ValueColum", "magnitude");
config.setParam("FFT_Window_Samples", "14");
config.setParam("SSA_Window_in_Samples", "20");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration temperatureSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
config.setParam("ValueColum", "fvalue");
config.setParam("FFT_Window_Samples", "52");
config.setParam("SSA_Window_in_Samples", "10");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration temperatureBariSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "utente");
config.setParam("DatabasePassword", "d4science");
config.setParam("DatabaseURL", "jdbc:postgresql://statistical-manager.d.d4science.research-infrastructures.eu/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "timeseries_id5b39298a_0e32_4a9c_8e6c_f2e48e3f1b1a");
config.setParam("ValueColum", "fvalue");
config.setParam("FFT_Window_Samples", "500");
config.setParam("SSA_Window_in_Samples", "10");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
public static AlgorithmConfiguration temperatureShortSignalConfig() {
AlgorithmConfiguration config = new AlgorithmConfiguration();
config.setAgent("TIME_SERIES_PROCESSING");
config.setConfigPath("./cfg/");
config.setPersistencePath("./");
config.setParam("DatabaseUserName", "gcube");
config.setParam("DatabasePassword", "d4science2");
config.setParam("DatabaseURL", "jdbc:postgresql://localhost/testdb");
config.setParam("DatabaseDriver", "org.postgresql.Driver");
// vessels
config.setParam("TimeSeriesTable", "testtextractiontemp");
config.setParam("ValueColum", "fvalue");
config.setParam("FFT_Window_Samples", "500");
config.setParam("SSA_Window_in_Samples", "10");
config.setParam("SSA_EigenvaluesThreshold", "0.7");
config.setParam("SSA_Points_to_Forecast", "10");
AnalysisLogger.setLogger(config.getConfigPath() + AlgorithmConfiguration.defaultLoggerFile);
config.setGcubeScope("/gcube");
config.setConfigPath("./cfg");
return config;
}
}