Synch with branch
git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-access/DatabasesResourcesManagerAlgorithms@133876 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
parent
54cbad7676
commit
2426179b9c
|
@ -0,0 +1,26 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<classpath>
|
||||
<classpathentry kind="src" output="target/classes" path="src/main/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
|
||||
<attributes>
|
||||
<attribute name="optional" value="true"/>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
|
||||
<attributes>
|
||||
<attribute name="maven.pomderived" value="true"/>
|
||||
</attributes>
|
||||
</classpathentry>
|
||||
<classpathentry kind="output" path="target/classes"/>
|
||||
</classpath>
|
|
@ -0,0 +1,23 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<projectDescription>
|
||||
<name>database-rm-algorithms</name>
|
||||
<comment></comment>
|
||||
<projects>
|
||||
</projects>
|
||||
<buildSpec>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.jdt.core.javabuilder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
<buildCommand>
|
||||
<name>org.eclipse.m2e.core.maven2Builder</name>
|
||||
<arguments>
|
||||
</arguments>
|
||||
</buildCommand>
|
||||
</buildSpec>
|
||||
<natures>
|
||||
<nature>org.eclipse.jdt.core.javanature</nature>
|
||||
<nature>org.eclipse.m2e.core.maven2Nature</nature>
|
||||
</natures>
|
||||
</projectDescription>
|
|
@ -0,0 +1,4 @@
|
|||
eclipse.preferences.version=1
|
||||
encoding//src/main/java=UTF-8
|
||||
encoding//src/test/java=UTF-8
|
||||
encoding/<project>=UTF-8
|
|
@ -0,0 +1,12 @@
|
|||
eclipse.preferences.version=1
|
||||
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
|
||||
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
|
||||
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
|
||||
org.eclipse.jdt.core.compiler.compliance=1.7
|
||||
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
|
||||
org.eclipse.jdt.core.compiler.debug.localVariable=generate
|
||||
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
|
||||
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
|
||||
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
|
||||
org.eclipse.jdt.core.compiler.source=1.7
|
|
@ -0,0 +1,4 @@
|
|||
activeProfiles=
|
||||
eclipse.preferences.version=1
|
||||
resolveWorkspaceProjects=true
|
||||
version=1
|
|
@ -0,0 +1,4 @@
|
|||
csquarecode, depthmin, depthmax, depthmean, depthsd, sstanmean, sstansd, sstmnmax, sstmnmin, sstmnrange, sbtanmean, salinitymean, salinitysd, salinitymax, salinitymin, salinitybmean, primprodmean, iceconann, iceconspr, iceconsum, iceconfal, iceconwin, faoaream, eezall, lme, landdist, oceanarea, centerlat, centerlong
|
||||
"1000:100:1","4760.0","5014.0","4896.0","46.9","27.27","0.38","30.05","23.63","6.42","1.89","34.76","","35.45","34.09","34.826","450","0.0","0.0","0.0","0.0","0.0","34","0","0","594","3091.036","0.25","0.25"
|
||||
"1000:100:2","4687.0","4991.0","4851.0","47.44","27.27","0.38","30.05","23.63","6.42","1.89","34.76","","35.45","34.09","34.826","471","0.0","0.0","0.0","0.0","0.0","34","0","0","606","3091.036","0.25","0.75"
|
||||
"1000:100:3","4778.0","4990.0","4890.0","34.23","27.27","0.38","30.05","23.63","6.42","1.89","34.76","","35.45","34.09","34.826","440","0.0","0.0","0.0","0.0","0.0","34","0","0","539","3090.8","0.75","0.25"
|
|
|
@ -0,0 +1,101 @@
|
|||
centerlong, centerlat, oceanarea, landdist, lme, eezall, faoaream, iceconwin, iceconfal, iceconsum, iceconspr, iceconann, primprodmean, salinitybmean, salinitymin, salinitymax, salinitysd, salinitymean, sbtanmean, sstmnrange, sstmnmin, sstmnmax, sstansd, sstanmean, depthsd, depthmean, depthmax, depthmin, csquarecode
|
||||
"-162.25","-0.75","3090.8","339","0","845","77","0","0","0","0","0","483","","35.1","35.44","","35.31","","5.61","24.34","29.95","0.92","27.54","52.05","5292","5428","5162","5016:102:3"
|
||||
"-25.25","36.75","2402.4263","22","0","622","27","0","0","0","0","0","347","34.9546","36.13","36.43","","36.27","3.09","9.22","15.78","25","0.44","19.62","853.11","2670","3892","0","7302:465:3"
|
||||
"143.25","22.75","2850.583","322","0","394, 580","61","0","0","0","0","0","197","34.6813","34.71","35.07","","34.91","1.5","6.55","23.44","29.99","0.42","27.17","832.55","3263","4659","1249","1214:123:3"
|
||||
"35.75","19.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1103:495:4"
|
||||
"-69.25","-7.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5006:479:3"
|
||||
"129.75","-34.25","2555.043","309","43","36","57","0","0","0","0","0","318","34.7372","35.48","35.88","","35.69","1.96","6.4","14.34","20.74","0.31","17.25","464.68","2438","3499","1573","3312:249:2"
|
||||
"-125.75","-81.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5812:215:2"
|
||||
"-108.25","46.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","7410:468:1"
|
||||
"-102.25","65.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","7610:352:1"
|
||||
"74.25","54.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1507:144:1"
|
||||
"-74.25","-78.25","0","5","61","0","48","1.5","1.5","1.5","1.5","1.5","0","33.4334","33.5","34.2","","33.9","-1.67","0.01","-1.79","-1.78","0","-1.79","0","1","1","0","5707:384:1"
|
||||
"135.25","-26.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","3213:465:1"
|
||||
"178.25","41.75","2306.112","1101","0","0","61","0","0","0","0","0","391","34.6875","33.53","34.2","","33.88","1.62","12.91","7.76","20.67","0.61","12.41","308.23","5665","6257","4778","1417:218:3"
|
||||
"-16.75","57.75","1649.437","504","0","0","27","0","0","0","0","0","419","35.0471","35.24","35.37","","35.32","5.07","6","8.21","14.21","0.24","10.49","45.25","1184","1282","1117","7501:476:4"
|
||||
"23.75","79.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1702:393:4"
|
||||
"-28.75","-74.25","839.04","197","0","0","48","0.34","0.89","0.93","0.74","0.73","363","34.6665","33.31","34.4","","34.08","0.04","0.41","-1.79","-1.38","0.01","-1.79","782.86","2026","2855","368","5702:248:2"
|
||||
"-113.75","-36.75","2476.728","1183","0","0","87","0","0","0","0","0","236","34.673","34.15","34.78","","34.43","1.8","10.1","12.83","22.93","0.39","16.69","690.44","2468","3283","540","5311:363:4"
|
||||
"149.25","-64.25","1342.899","385","0","0","58","0.006666667","0.47","0.64","0.16","0.31","246","34.68","33.58","33.99","","33.8","-0.17","3.11","-1.79","1.32","0.17","-1.19","42.16","3591","3649","3486","3614:249:1"
|
||||
"-14.75","37.25","2460.494","540","0","0","27","0","0","0","0","0","338","34.9463","36.11","36.4","","36.22","2.8","7.58","14.85","22.43","0.33","18.3","643","2819","4417","1683","7301:374:2"
|
||||
"-133.25","-15.25","2982.221","869","0","0","77","0","0","0","0","0","303","34.6907","36.19","36.47","","36.34","1.54","3.66","25.6","29.26","0.32","27.35","216.33","4093","4331","2921","5113:353:1"
|
||||
"-169.75","-60.25","1533.838","1402","0","0","88","0","0","0","0","5.5E-5","218","34.7061","33.93","34.1","","33.97","0.86","6.14","-0.13","6.01","0.3","2.05","345.54","4183","4780","3588","5616:209:2"
|
||||
"-24.75","-28.75","2710.02","1071","0","0","41","0","0","0","0","0","164","34.7535","35.83","36.3","","36.09","1.14","8.77","18.28","27.05","0.3","22.11","149.49","4907","5268","4531","5202:384:4"
|
||||
"-122.75","27.75","2735.554","504","3","0","77","0","0","0","0","0","232","34.6819","33.7","33.9","","33.81","1.55","5.77","16.63","22.4","0.45","19.18","344.03","4204","4642","2869","7212:372:4"
|
||||
"-2.25","6.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","7000:362:1"
|
||||
"144.25","33.75","2570.127","435","0","0, 390","61","0","0","0","0","0","367","34.6916","34.4","34.79","","34.61","1.56","11.73","17.18","28.91","0.44","22.17","80.88","5623","5792","5385","1314:134:3"
|
||||
"78.75","70.75","0","0","0","","-1","","","","","","0","","21.68","28.02","","26.23","","4.65","-1.77","2.88","0.21","-1.51","-9999","0","0","0","1707:208:4"
|
||||
"-110.75","-60.25","1533.838","1360","0","0","88","0","0","0","0","1.2333E-4","212","34.7066","33.71","34.18","","34.03","0.42","4.47","0.97","5.44","0.42","2.95","27.52","5132","5178","5079","5611:100:2"
|
||||
"123.25","-63.25","1391.284","372","0","0","58","0.0033333334","0.32","0.26","0.01","0.15","235","34.6739","33.81","34.05","","33.91","-0.25","3.24","-1.79","1.45","0.12","-0.97","63.24","3915","4036","3805","3612:133:1"
|
||||
"157.75","-38.25","2427.466","712","0","0","81","0","0","0","0","0","431","34.7086","35.38","35.71","","35.49","1.07","8.19","13.97","22.16","0.35","17.2","100.98","4813","5016","4566","3315:487:2"
|
||||
"-52.75","-54.75","1783.993","503","0","0","41","0","0","0","0","6.833E-5","394","34.7144","33.81","34.11","","34.05","0.87","6.15","1.82","7.97","0.34","4.93","247.93","3656","4102","3069","5505:142:4"
|
||||
"131.25","7.75","3062.831","354","0","585","71","0","0","0","0","0","205","34.6761","33.99","34.48","","34.16","1.7","3.74","27.02","30.76","0.25","28.93","305.41","4986","5841","4356","1013:371:3"
|
||||
"-123.25","-39.75","2376.54","1717","0","0","81","0","0","0","0","0","266","34.7039","34.11","34.32","","34.19","1.4","7.6","11.14","18.74","0.35","14.31","86.78","4035","4250","3795","5312:393:3"
|
||||
"19.75","-20.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","3201:209:2"
|
||||
"-150.25","-50.75","1955.733","2105","0","0","81","0","0","0","0","0","315","34.7097","34.1","34.55","","34.35","1.1","4.77","7.38","12.15","0.26","9.19","194.62","4520","4947","4120","5515:100:3"
|
||||
"49.25","1.75","3089.624","328","0","706","51","0","0","0","0","0","668","34.7181","35.18","35.53","","35.38","1.31","6.09","24.31","30.4","0.34","27.27","23.83","4647","4698","4578","1004:219:3"
|
||||
"-136.75","80.25","523.47","496","64","0","18","0.98","0.93","0.9","0.98","0.94","0","34.9611","29.75","31.97","","30.57","-0.31","0.01","-1.79","-1.78","0","-1.79","24.95","3694","3719","3635","7813:206:2"
|
||||
"114.25","-67.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","3611:374:3"
|
||||
"44.75","-55.25","1761.897","1095","0","0","58","0","0","0","0","1.2333E-4","188","34.6607","33.86","34.04","","33.98","-0.16","4.39","-1.41","2.98","0.23","0.74","87.48","4415","4655","4217","3504:354:2"
|
||||
"-26.25","-88.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5802:486:3"
|
||||
"-171.75","45.25","2176.155","818","0","0","67","0","0","0","0","0","381","34.6828","32.81","33.43","","33.16","1.62","10.27","4.59","14.86","0.6","8.59","184.9","5542","5822","4975","7417:351:2"
|
||||
"45.25","19.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1104:495:3"
|
||||
"-56.75","6.25","3072.693","27","17","740, 328","31","0","0","0","0","0","1339","36.2528","28.47","36.72","","34.93","26.01","3.63","25.69","29.32","0.3","27.28","24.64","64","121","0","7005:466:2"
|
||||
"-38.25","-82.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5803:228:1"
|
||||
"-86.75","36.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","7308:466:4"
|
||||
"136.75","67.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1613:476:4"
|
||||
"86.75","-65.75","1269.559","134","0","0","58","0.48","0.79","0.81","0.5","0.64","244","34.7089","33.36","34.29","","33.68","0.59","1.31","-1.79","-0.48","0.1","-1.63","712.05","1240","2546","2","3608:456:4"
|
||||
"-171.75","-58.25","1626.562","1158","0","0","81","0","0","0","0","1.5083E-4","249","34.7062","33.93","34.11","","33.99","0.88","5.85","2.16","8.01","0.24","3.93","316.39","4462","5079","3865","5517:381:2"
|
||||
"-163.75","26.25","2772.292","346","0","842","77","0","0","0","0","0","225","34.6892","35.06","35.48","","35.28","1.51","7","20.33","27.33","0.36","23.87","53.07","4879","5007","4719","7216:363:2"
|
||||
"-73.75","-80.25","0","47","61","0","48","1.5","1.5","1.5","1.5","1.5","0","33.456","33.4","34.18","","33.9","-1.57","0.01","-1.79","-1.78","0","-1.79","0","1","1","1","5807:103:2"
|
||||
"-97.75","-80.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5809:207:4"
|
||||
"161.75","-68.75","1120.32","134","0","0","88","0.72","0.92","0.92","0.68","0.81","158","34.6553","33.62","34.17","","33.85","0.02","0.84","-1.79","-0.95","0.03","-1.77","132.05","1980","2340","1721","3616:381:4"
|
||||
"86.75","-59.25","1580.44","804","0","0","58","0","0.06","0.03","0","0.02","286","34.6685","33.76","34.06","","33.91","-0.05","4.55","-1.79","2.76","0.23","-0.17","77.86","4668","4864","4524","3508:496:2"
|
||||
"-113.25","25.25","2795.73","126","3","484","77","0","0","0","0","0","685","34.6269","33.75","34.4","","34.08","2.22","12.25","16.52","28.77","0.87","21.31","891.24","1887","3750","124","7211:353:1"
|
||||
"141.75","40.75","2318.266","20","49","390","61","0.03","0","0","0.05","0.02","719","33.7662","32.95","33.83","","33.34","7.57","20.77","3.29","24.06","0.53","12.68","129.06","163","617","0","1414:101:4"
|
||||
"174.25","-46.75","2117.945","324","0","554","81","0","0","0","0","0","579","34.5455","34.14","34.64","","34.39","2.67","8.11","7.14","15.25","0.43","10.32","151.03","1574","1810","1299","3417:364:3"
|
||||
"-130.75","-36.25","2492.773","1269","0","0","81","0","0","0","0","0","213","34.7028","34.25","34.84","","34.57","1.38","8.4","13.05","21.45","0.3","16.45","174.92","4248","4528","3695","5313:360:2"
|
||||
"-5.25","-58.75","1603.562","751","0","0","48","0.03","0.63","0.45","0","0.28","251","34.6436","33.7","34.18","","33.98","-0.45","3.19","-1.79","1.4","0.17","-0.78","325.51","4536","5202","3872","5500:485:3"
|
||||
"76.25","-12.75","3014.847","806","0","0","51","0","0","0","0","0","249","34.7224","34.08","34.86","","34.48","1.32","5.39","24.68","30.07","0.22","27.14","165.37","5278","5578","4299","3107:226:3"
|
||||
"-179.75","-46.75","2117.945","173","0","554","81","0","0","0","0","0","602","34.7472","34.12","34.42","","34.32","1.46","9.23","7.16","16.39","0.5","10.74","254.31","2812","3225","2300","5417:469:4"
|
||||
"-43.75","-23.75","2829.287","126","15","76","41","0","0","0","0","0","592","36.0741","35.65","36.24","","35.9","18.88","7.22","20.37","27.59","0.28","23.94","15.08","116","141","79","5204:133:4"
|
||||
"-69.25","-35.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5306:459:1"
|
||||
"3.75","17.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1100:373:4"
|
||||
"-91.25","-21.25","2880.897","1347","0","0","87","0","0","0","0","0","227","34.6912","35.24","35.79","","35.56","1.8","5.37","18.72","24.09","0.28","21.22","182.36","4019","4308","3505","5209:111:1"
|
||||
"10.75","85.75","229.074","434","64","0","27","0.96","0.92","0.93","0.97","0.94","0","34.9439","32.02","33.02","","32.57","-0.71","0.01","-1.79","-1.78","0","-1.79","296.83","3742","4206","3116","1801:350:4"
|
||||
"130.25","22.75","2850.583","296","0","393","61","0","0","0","0","0","213","34.6861","34.5","34.89","","34.72","1.71","8.3","22.25","30.55","0.44","26.7","351.85","5371","5887","4238","1213:120:3"
|
||||
"-58.75","37.25","2460.494","772","0","0","21","0","0","0","0","0","362","34.8946","36.13","36.6","","36.37","2.29","10.03","17.67","27.7","0.25","21.91","429.79","4926","5359","2898","7305:478:2"
|
||||
"-15.25","28.25","2586.7463","23","27","723","34","0","0","0","0","0","376","35.1071","36.41","36.61","","36.52","4.22","6.17","17.74","23.91","0.39","20.76","1082.32","2156","3512","0","7201:485:1"
|
||||
"44.25","64.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1604:144:3"
|
||||
"-138.25","-24.25","2818.316","279","0","258","77","0","0","0","0","0","141","34.6935","35.5","36.25","","35.8","1.49","5.93","21.58","27.51","0.36","24.39","113.26","4188","4484","3895","5213:248:1"
|
||||
"25.75","20.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1202:205:4"
|
||||
"-117.25","56.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","7511:467:3"
|
||||
"93.25","-30.25","2670.172","1699","0","0","57","0","0","0","0","0","208","34.7327","35.63","35.96","","35.78","1.87","8.19","15.46","23.65","0.36","19.17","83.71","2333","2559","2140","3309:103:1"
|
||||
"59.25","20.25","2900.011","104","32","512","51","0","0","0","0","0","1621","34.8484","35.81","36.47","","36.2","3.11","5.71","23.51","29.22","0.26","26.32","839.04","2227","3166","4","1205:209:1"
|
||||
"177.75","44.25","2214.136","826","0","0","61","0","0","0","0","1.333E-5","393","34.6875","33.07","33.62","","33.33","1.61","11.87","4.56","16.43","0.58","8.97","32.16","5673","5755","5539","1417:247:2"
|
||||
"90.75","-10.25","3041.734","746","0","0","57","0","0","0","0","0","278","34.7136","34.09","34.98","","34.36","1.2","4.09","26.02","30.11","0.25","27.83","242.56","4959","5364","4515","3109:100:2"
|
||||
"-167.25","-47.25","2098.217","811","0","0","81","0","0","0","0","0","416","34.7036","34.53","34.73","","34.66","0.95","6.51","8.64","15.15","0.38","11.1","110.06","5135","5407","4673","5416:477:1"
|
||||
"43.75","-73.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","3704:133:2"
|
||||
"-136.75","-75.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5713:456:4"
|
||||
"31.75","-69.75","10.6987","58","0","0","58","1.5","1.5","1.5","1.5","1.5","183","34.3177","33.45","34.38","","33.88","-1.43","0.9","-1.79","-0.89","0.03","-1.77","56.55","89","180","1","3603:391:4"
|
||||
"-113.75","76.75","495.9311","4","0","124","18","0.95","0.89","0.79","0.96","0.9","79","34.5247","19.81","29.95","","28.13","0.17","0.04","-1.79","-1.75","0","-1.79","109.26","206","336","0","7711:363:4"
|
||||
"94.25","-77.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","3709:374:1"
|
||||
"159.75","69.75","224.6727","2","0","644","18","0.94","0.55","0.37","0.95","0.71","248","22.9766","17.7","29.04","","25.17","0.04","2.68","-1.79","0.89","0.1","-1.63","1.03","1","3","0","1615:499:4"
|
||||
"-26.75","54.75","1783.993","1058","0","0","27","0","0","0","0","0","500","34.9497","34.95","35.19","","35.07","2.78","7.07","7.13","14.2","0.54","10.19","74.29","3285","3541","3056","7502:246:4"
|
||||
"143.25","64.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1614:143:1"
|
||||
"-145.25","-86.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5814:465:3"
|
||||
"90.75","77.25","675.3681","6","58","644","18","0.95","0.63","0.64","0.93","0.79","77","32.9421","26.76","31.29","","28.79","-1.02","0.92","-1.79","-0.87","0.05","-1.77","8.05","17","36","0","1709:370:2"
|
||||
"112.25","66.75","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1611:362:3"
|
||||
"-103.25","15.75","2975.012","294","11","484","77","0","0","0","0","0","359","34.6736","33.77","34.29","","34.01","1.8","3.42","26.81","30.23","0.25","28.6","194.98","3366","3716","2806","7110:353:3"
|
||||
"-16.75","13.75","2161.787","22","27","686, 270","34","0","0","0","0","0","2536","35.5164","34","35.66","","35.09","18.34","10.4","18.7","29.1","0.47","25.17","33.92","43","163","0","7101:236:4"
|
||||
"-172.25","85.25","255.966","1001","64","0","18","0.99","0.94","0.93","0.99","0.95","0","34.9448","29.92","31.64","","30.74","-0.39","0.01","-1.79","-1.78","0","-1.79","117.56","1706","1968","1448","7817:352:1"
|
||||
"-102.25","56.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","7510:362:1"
|
||||
"-91.75","-61.75","1463.062","806","0","0","88","0","0","0","0","1.333E-5","186","34.7022","33.73","34.06","","33.99","0.45","5.78","-0.34","5.44","0.4","2.75","31.84","4894","4927","4778","5609:111:4"
|
||||
"76.75","-53.75","1827.777","239","0","334","58","0","0","0","0","9.667E-5","293","34.7302","33.75","34","","33.89","1.87","4.38","-0.89","3.49","0.26","1.31","82.26","1561","1705","1369","3507:236:4"
|
||||
"108.75","-53.75","1827.777","1348","0","0","57","0","0","0","0","2.667E-5","315","34.6853","33.84","34.03","","33.95","0.13","5.25","-0.09","5.16","0.34","2.41","89.45","3713","3982","3545","3510:238:4"
|
||||
"-109.25","-46.75","2117.945","2206","0","0","87","0","0","0","0","0","249","34.7085","34.04","34.28","","34.14","1.67","5.89","7.02","12.91","0.34","9.6","44.76","3221","3327","3117","5410:469:3"
|
||||
"135.75","64.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","1613:245:2"
|
||||
"74.75","-28.25","2722.891","1142","0","0","51","0","0","0","0","0","154","34.7296","35.67","35.95","","35.79","1.58","7.68","17.91","25.59","0.29","21.34","208.28","3151","3726","2599","3207:384:2"
|
||||
"-167.75","9.75","3046.418","729","0","0","77","0","0","0","0","0","247","34.7006","34.15","34.66","","34.4","1.31","3.9","25.57","29.47","0.29","27.86","147.2","5111","5450","4743","7016:497:4"
|
||||
"72.25","-43.75","2232.874","642","0","0","51","0","0","0","0","0","412","34.7202","34.17","34.85","","34.52","1.15","6.28","9.59","15.87","0.32","12.17","38.76","3709","3819","3550","3407:132:3"
|
||||
"-1.75","-87.25","0","0","0","","-1","","","","","","0","","","","","","","","","","","","","0","0","0","5800:371:2"
|
|
|
@ -0,0 +1,101 @@
|
|||
percentage, isofficial, language, countrycode
|
||||
52.4,true,Pashto,AFG
|
||||
95.6,true,Dutch,NLD
|
||||
86.2,true,Papiamento,ANT
|
||||
97.9,true,Albaniana,ALB
|
||||
86,true,Arabic,DZA
|
||||
90.6,true,Samoan,ASM
|
||||
44.6,false,Spanish,AND
|
||||
37.2,false,Ovimbundu,AGO
|
||||
0,true,English,AIA
|
||||
95.7,false,Creole English,ATG
|
||||
42,true,Arabic,ARE
|
||||
96.8,true,Spanish,ARG
|
||||
93.4,true,Armenian,ARM
|
||||
76.7,false,Papiamento,ABW
|
||||
81.2,true,English,AUS
|
||||
89,true,Azerbaijani,AZE
|
||||
89.7,false,Creole English,BHS
|
||||
67.7,true,Arabic,BHR
|
||||
97.7,true,Bengali,BGD
|
||||
95.1,false,Bajan,BRB
|
||||
59.2,true,Dutch,BEL
|
||||
50.8,true,English,BLZ
|
||||
39.8,false,Fon,BEN
|
||||
100,true,English,BMU
|
||||
50,true,Dzongkha,BTN
|
||||
87.7,true,Spanish,BOL
|
||||
99.2,true,Serbo-Croatian,BIH
|
||||
75.5,false,Tswana,BWA
|
||||
97.5,true,Portuguese,BRA
|
||||
97.3,true,English,GBR
|
||||
0,true,English,VGB
|
||||
45.5,true,Malay,BRN
|
||||
83.2,true,Bulgariana,BGR
|
||||
50.2,false,Mossi,BFA
|
||||
98.1,true,Kirundi,BDI
|
||||
0,true,English,CYM
|
||||
89.7,true,Spanish,CHL
|
||||
0,true,Maori,COK
|
||||
97.5,true,Spanish,CRI
|
||||
43.9,false,Somali,DJI
|
||||
100,false,Creole English,DMA
|
||||
98,true,Spanish,DOM
|
||||
93,true,Spanish,ECU
|
||||
98.8,true,Arabic,EGY
|
||||
100,true,Spanish,SLV
|
||||
49.1,true,Tigrinja,ERI
|
||||
74.4,true,Spanish,ESP
|
||||
22.7,true,Zulu,ZAF
|
||||
31,false,Oromo,ETH
|
||||
0,true,English,FLK
|
||||
50.8,true,Fijian,FJI
|
||||
29.3,true,Pilipino,PHL
|
||||
100,true,Faroese,FRO
|
||||
35.8,false,Fang,GAB
|
||||
34.1,false,Malinke,GMB
|
||||
71.7,true,Georgiana,GEO
|
||||
52.4,false,Akan,GHA
|
||||
88.9,true,English,GIB
|
||||
100,false,Creole English,GRD
|
||||
87.5,true,Greenlandic,GRL
|
||||
95,false,Creole French,GLP
|
||||
37.5,true,English,GUM
|
||||
64.7,true,Spanish,GTM
|
||||
38.6,false,Ful,GIN
|
||||
36.4,false,Crioulo,GNB
|
||||
96.4,false,Creole English,GUY
|
||||
100,false,Haiti Creole,HTI
|
||||
97.2,true,Spanish,HND
|
||||
88.7,false,Canton Chinese,HKG
|
||||
0,true,Norwegian,SJM
|
||||
39.4,false,Javanese,IDN
|
||||
39.9,true,Hindi,IND
|
||||
77.2,true,Arabic,IRQ
|
||||
45.7,true,Persian,IRN
|
||||
98.4,true,English,IRL
|
||||
95.7,true,Icelandic,ISL
|
||||
63.1,true,Hebrew,ISR
|
||||
94.1,true,Italian,ITA
|
||||
0,false,Sunda,TMP
|
||||
92,true,German,AUT
|
||||
94.2,false,Creole English,JAM
|
||||
99.1,true,Japanese,JPN
|
||||
99.6,true,Arabic,YEM
|
||||
97.9,true,Arabic,JOR
|
||||
0,false,Chinese,CXR
|
||||
75.2,true,Serbo-Croatian,YUG
|
||||
88.6,true,Khmer,KHM
|
||||
19.7,false,Fang,CMR
|
||||
60.4,true,English,CAN
|
||||
100,false,Crioulo,CPV
|
||||
46,true,Kazakh,KAZ
|
||||
20.9,false,Kikuyu,KEN
|
||||
23.8,false,Gbaya,CAF
|
||||
92,true,Chinese,CHN
|
||||
59.7,true,Kirgiz,KGZ
|
||||
98.9,true,Kiribati,KIR
|
||||
99,true,Spanish,COL
|
||||
75,true,Comorian,COM
|
||||
51.5,false,Kongo,COG
|
||||
18,false,Luba,COD
|
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1 @@
|
|||
TEST_ALGORITHM=org.gcube.test.algorithm.SimpleAlg
|
|
@ -0,0 +1,30 @@
|
|||
#### Use two appenders, one to log to console, another to log to a file
|
||||
log4j.rootCategory= R
|
||||
|
||||
#### First appender writes to console
|
||||
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
|
||||
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.stdout.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
|
||||
#log4j.appender.stdout.layout.ConversionPattern=%m%n
|
||||
#log4j.appender.stdout.File=Analysis.log
|
||||
|
||||
#### Second appender writes to a file
|
||||
log4j.logger.AnalysisLogger=trace, stdout,R
|
||||
log4j.appender.R=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.R.File=Analysis.log
|
||||
log4j.appender.R.MaxFileSize=50000KB
|
||||
log4j.appender.R.MaxBackupIndex=2
|
||||
log4j.appender.R.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.R.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
|
||||
#log4j.appender.R.layout.ConversionPattern=%m%n
|
||||
|
||||
#### Third appender writes to a file
|
||||
log4j.logger.org.hibernate=H
|
||||
#log4j.appender.H=org.apache.log4j.RollingFileAppender
|
||||
log4j.appender.H=org.apache.log4j.AsyncAppender
|
||||
#log4j.appender.H.File=HibernateLog.log
|
||||
#log4j.appender.H.MaxFileSize=1024KB
|
||||
#log4j.appender.H.MaxBackupIndex=2
|
||||
log4j.appender.H.layout=org.apache.log4j.PatternLayout
|
||||
log4j.appender.H.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<hibernate-configuration>
|
||||
<session-factory>
|
||||
<property name="connection.driver_class">org.postgresql.Driver</property>
|
||||
<property name="connection.provider_class">org.hibernate.connection.C3P0ConnectionProvider</property>
|
||||
<!-- <property name="connection.url">jdbc:postgresql://localhost/testdb</property> -->
|
||||
<!-- <property name="connection.url">jdbc:postgresql://146.48.87.169/testdb</property> -->
|
||||
<property name="connection.url">jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis</property>
|
||||
<property name="connection.username">gcube</property> -->
|
||||
<property name="connection.password">d4science2</property>
|
||||
<property name="connection.pool_size">1</property>
|
||||
<property name="dialect">org.hibernate.dialect.PostgreSQLDialect</property>
|
||||
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
|
||||
|
||||
<property name="c3p0.max_size">1</property>
|
||||
<property name="c3p0.max_statements">0</property>
|
||||
<property name="c3p0.min_size">1</property>
|
||||
|
||||
<property name="c3p0.checkoutTimeout">1</property>
|
||||
<property name="current_session_context_class">thread</property>
|
||||
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
|
@ -0,0 +1,20 @@
|
|||
<?xml version='1.0' encoding='UTF-8'?>
|
||||
<hibernate-configuration>
|
||||
<session-factory>
|
||||
<property name="connection.driver_class">com.mysql.jdbc.Driver</property>
|
||||
<property name="connection.provider_class">org.hibernate.connection.C3P0ConnectionProvider</property>
|
||||
<property name="connection.url">jdbc:mysql://146.48.87.169/timeseries</property>
|
||||
<property name="connection.username">root</property>
|
||||
<property name="connection.password">test</property>
|
||||
<property name="dialect">org.hibernate.dialect.MySQLDialect</property>
|
||||
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
|
||||
<property name="c3p0.timeout">0</property>
|
||||
<property name="c3p0.checkoutTimeout">0</property>
|
||||
<property name="c3p0.acquireRetryAttempts">1</property>
|
||||
<property name="c3p0.max_size">1</property>
|
||||
<property name="c3p0.max_statements">0</property>
|
||||
<property name="c3p0.min_size">1</property>
|
||||
<property name="current_session_context_class">thread</property>
|
||||
</session-factory>
|
||||
</hibernate-configuration>
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
1,AFD-Pul-1838,pinhead spot,English,USA,52836,7,0
|
||||
16,Alg-111,Piedra de mar,Spanish,Spain,52834,7,0
|
||||
17,Alg-111,Pierre vermiculée,French,France,52834,7,0
|
|
@ -0,0 +1,680 @@
|
|||
9.7703E-7,0.00563891
|
||||
1.4364E-4,0.0510053
|
||||
4.19937E-5,0.0279353
|
||||
2.17743E-5,0.0185856
|
||||
1.944E-4,0.0632979
|
||||
3.44589E-6,0.00799436
|
||||
4.25587E-6,0.00841894
|
||||
4.78409E-6,0.0109983
|
||||
6.89079E-5,0.0364259
|
||||
4.13036E-4,0.121509
|
||||
2.31896E-5,0.0191968
|
||||
1.1745E-5,0.0180833
|
||||
2.35434E-5,0.0207482
|
||||
1.59746E-4,0.0812034
|
||||
7.69376E-5,0.0567212
|
||||
1.33776E-5,0.0141317
|
||||
1.55566E-5,0.0205317
|
||||
3.68715E-4,0.116281
|
||||
31.0365,22.4365
|
||||
4.9397E-6,0.00880015
|
||||
1.60402E-5,0.0176136
|
||||
20.0,18
|
||||
0.0152733,1.36372
|
||||
2.79727E-4,0.17039
|
||||
1.38851E-5,0.0161386
|
||||
3.36371,19.7354
|
||||
1.00472E-5,0.0129909
|
||||
7.62046E-4,0.18213
|
||||
6.13964E-5,0.0296894
|
||||
8.63237E-6,0.0123222
|
||||
9.45758E-6,0.0129748
|
||||
7.65775E-6,0.0104123
|
||||
0.333088,8.76726
|
||||
5.47461E-6,0.0114068
|
||||
2.10634E-4,0.0661339
|
||||
0.00120054,0.61099
|
||||
2.35178E-6,0.00580937
|
||||
7.06851E-4,0.137094
|
||||
3.63206E-6,0.00796127
|
||||
12.6305,36.8429
|
||||
9.10321E-6,0.0126034
|
||||
1.5285E-5,0.0157055
|
||||
4.69194E-6,0.0085918
|
||||
9.4151E-4,0.189438
|
||||
1.00963E-5,0.015446
|
||||
13.407,107.25
|
||||
2.32949E-5,0.0252971
|
||||
4.81072E-6,0.0110336
|
||||
1.38707E-5,0.0180138
|
||||
46.125,29.5
|
||||
7.68974E-5,0.0447466
|
||||
7.74824E-6,0.0104459
|
||||
6.3402E-6,0.0098905
|
||||
1.04955E-5,0.0152612
|
||||
1.17834E-4,0.0500829
|
||||
3.40577E-6,0.00804342
|
||||
4.81313E-6,0.0109715
|
||||
4.8783E-6,0.00874993
|
||||
5.67719E-6,0.0114547
|
||||
0.0011657,0.183435
|
||||
6.38817E-6,0.00985259
|
||||
5.2251E-5,0.0386048
|
||||
8.08833E-6,0.012186
|
||||
2.0055E-6,0.00529332
|
||||
1.67406E-5,0.0170766
|
||||
7.32496E-6,0.0117775
|
||||
4.98469E-4,0.102785
|
||||
4.21296E-6,0.0108077
|
||||
0.00144765,0.234982
|
||||
7.00299E-6,0.0121675
|
||||
1.59803E-5,0.0157567
|
||||
1.43714E-5,0.0188496
|
||||
1.65039E-4,0.05943
|
||||
3.16313E-5,0.0224181
|
||||
5.73683E-6,0.0111805
|
||||
4.93268E-6,0.00879117
|
||||
1.37418E-5,0.0182006
|
||||
5.61506E-6,0.011055
|
||||
3.57414E-5,0.0348078
|
||||
23.0204,48.8665
|
||||
5.18973E-5,0.0365254
|
||||
1.52769E-5,0.0162869
|
||||
1.87537E-5,0.0195511
|
||||
1.05333E-5,0.0121601
|
||||
5.4302E-6,0.0112553
|
||||
8.63725E-6,0.0182325
|
||||
1.09125E-5,0.0153535
|
||||
1.63658E-5,0.0169236
|
||||
2.78859E-5,0.0256243
|
||||
6.21926E-6,0.0111308
|
||||
1.0126E-4,0.042484
|
||||
4.62966E-4,0.180625
|
||||
6.32661E-6,0.00981543
|
||||
1.52606E-4,0.0517762
|
||||
2.12023E-5,0.0172204
|
||||
20.0,18
|
||||
3.74942E-5,0.0295507
|
||||
2.05478E-5,0.0219105
|
||||
7.66321E-6,0.0146046
|
||||
1.30299E-4,0.0465436
|
||||
2.71046E-5,0.0252701
|
||||
5.25587E-6,0.0111208
|
||||
1.51102E-5,0.0167551
|
||||
2.39602E-5,0.0219328
|
||||
2.12038E-4,0.063825
|
||||
9.0321E-6,0.0127005
|
||||
0.00137113,0.340533
|
||||
0.00331001,0.865211
|
||||
0.0249418,2.59526
|
||||
3.4304E-6,0.00801633
|
||||
6.51167E-6,0.0115893
|
||||
25.8366,33.9156
|
||||
9.11521E-6,0.0173653
|
||||
1.0255E-5,0.0119973
|
||||
7.2849E-6,0.0103475
|
||||
6.98957E-6,0.0142729
|
||||
1.9099E-4,0.0885071
|
||||
2.58334E-5,0.0229562
|
||||
7.2885E-6,0.0145269
|
||||
6.1871E-6,0.00975862
|
||||
1.46131E-5,0.0168372
|
||||
1.35135E-5,0.0147707
|
||||
2.84612E-5,0.0244796
|
||||
5.57003E-6,0.0109714
|
||||
1.1586E-4,0.0596005
|
||||
1.43117E-5,0.016303
|
||||
2.83717E-4,0.0676009
|
||||
3.46651E-5,0.0325543
|
||||
1.3657E-4,0.0479645
|
||||
6.32937E-5,0.0364656
|
||||
4.90611E-6,0.0110709
|
||||
3.19951E-5,0.0230177
|
||||
6.50331E-5,0.0408002
|
||||
5.68722E-6,0.0110757
|
||||
1.06169E-4,0.0539417
|
||||
7.59937E-6,0.0103364
|
||||
6.62968E-6,0.00974917
|
||||
7.91608E-5,0.0461142
|
||||
1.47453E-5,0.0188568
|
||||
2.5939E-7,0.00330998
|
||||
1.15728E-4,0.102883
|
||||
3.49181E-5,0.0248281
|
||||
1.37263E-4,0.0675856
|
||||
6.22479E-6,0.00976885
|
||||
3.99077E-5,0.0264196
|
||||
2.65142E-5,0.020737
|
||||
3.88691E-6,0.00890433
|
||||
1.43979E-5,0.0151321
|
||||
1.62729E-5,0.0167599
|
||||
5.14887E-5,0.0368274
|
||||
1.0498E-4,0.0706319
|
||||
2.97793E-4,0.0834792
|
||||
5.48516E-6,0.010986
|
||||
5.393E-8,0.00204208
|
||||
3.82439E-5,0.0264744
|
||||
4.12254E-5,0.0303821
|
||||
8.63469E-5,0.0507739
|
||||
8.80536E-6,0.0115935
|
||||
7.98975E-5,0.0650532
|
||||
8.81541E-6,0.012471
|
||||
2.74345E-5,0.0269974
|
||||
1.65052E-5,0.0193822
|
||||
4.29089E-6,0.0106643
|
||||
1.55832E-5,0.0191981
|
||||
20.0,18
|
||||
0.00120422,0.163573
|
||||
41.2624,61.4094
|
||||
1.12162E-4,0.0514486
|
||||
6.26934E-6,0.00976265
|
||||
2.87747E-6,0.0107453
|
||||
5.67042E-6,0.0111357
|
||||
1.15193E-5,0.0152805
|
||||
4.94424E-6,0.00917829
|
||||
7.22401E-6,0.0117199
|
||||
9.5606E-7,0.00466656
|
||||
4.90297E-6,0.00878456
|
||||
5.60354E-6,0.0112022
|
||||
1.73893E-5,0.0247408
|
||||
1.26203E-4,0.0684286
|
||||
10.5093,17.6008
|
||||
7.54061E-6,0.0103009
|
||||
6.33218E-5,0.0396868
|
||||
1.64273E-5,0.0169376
|
||||
1.5592E-4,0.0567286
|
||||
38.4655,106.582
|
||||
7.15368E-6,0.0114791
|
||||
6.90115E-5,0.042822
|
||||
3.93766E-4,0.117894
|
||||
1.17915E-5,0.0156303
|
||||
15.2436,31.9661
|
||||
1.63266E-5,0.0185158
|
||||
1.52654E-4,0.0571452
|
||||
2.55482E-5,0.0246063
|
||||
2.02187E-5,0.0196909
|
||||
4.91042E-6,0.00916803
|
||||
6.08298E-6,0.00959011
|
||||
4.86997E-6,0.00873849
|
||||
9.70018E-6,0.0150352
|
||||
4.11043E-5,0.0310217
|
||||
6.79588E-6,0.0119992
|
||||
4.04501E-6,0.0104922
|
||||
1.53992E-5,0.0148827
|
||||
6.72016E-5,0.0365898
|
||||
27.2604,53.1083
|
||||
9.51526E-5,0.0424098
|
||||
1.96001E-4,0.072912
|
||||
2.22597E-5,0.0204549
|
||||
4.09066E-6,0.0109499
|
||||
4.93808E-6,0.00882365
|
||||
1.1038E-4,0.0522993
|
||||
1.03582E-5,0.0120528
|
||||
1.33266E-5,0.0185396
|
||||
2.49224E-4,0.12057
|
||||
6.28562E-6,0.0111544
|
||||
5.48005E-6,0.013855
|
||||
5.39748E-5,0.043773
|
||||
7.35521E-6,0.0117967
|
||||
1.39825E-4,0.0454189
|
||||
5.58352E-5,0.0321382
|
||||
5.59136E-6,0.0110979
|
||||
8.84707E-5,0.036729
|
||||
3.03348E-5,0.0216452
|
||||
2.78348E-5,0.0214809
|
||||
8.949E-5,0.0524961
|
||||
1.42036E-5,0.0185446
|
||||
6.88417E-6,0.0120961
|
||||
8.78723E-6,0.0123838
|
||||
32.0,24
|
||||
9.72656E-6,0.0153021
|
||||
5.14264E-4,0.118109
|
||||
4.29736E-4,0.124482
|
||||
4.80843E-6,0.0111309
|
||||
0.00746446,0.79918
|
||||
2.3214E-5,0.0209326
|
||||
7.0459E-5,0.0355598
|
||||
3.43171E-6,0.00807386
|
||||
7.0591E-5,0.0487308
|
||||
7.76933E-6,0.0120165
|
||||
2.88501E-5,0.0243687
|
||||
7.24E-5,0.040359
|
||||
7.55489E-5,0.0469141
|
||||
1.56898E-4,0.0632761
|
||||
1.09235E-5,0.0124049
|
||||
4.05651E-5,0.042648
|
||||
1.51465E-5,0.0154193
|
||||
1.78831E-5,0.0203524
|
||||
1.51837E-5,0.0156147
|
||||
7.24739E-6,0.0117771
|
||||
1.64521E-4,0.0643537
|
||||
1.00094E-5,0.0149554
|
||||
4.64507E-6,0.00863177
|
||||
6.99993E-6,0.0115393
|
||||
3.76117E-5,0.030985
|
||||
7.7686E-6,0.014735
|
||||
8.01623E-5,0.0413427
|
||||
1.1339E-5,0.0133611
|
||||
4.87665E-6,0.00874776
|
||||
1.51463E-4,0.0628629
|
||||
6.31441E-6,0.0114204
|
||||
1.77934E-5,0.0168571
|
||||
0.990086,6.94953
|
||||
3.46749E-6,0.00799544
|
||||
3.54416E-5,0.0322305
|
||||
1.45402E-5,0.0146951
|
||||
3.40148E-5,0.0245084
|
||||
1.97484E-5,0.0196968
|
||||
3.87369E-5,0.0247396
|
||||
5.70696E-6,0.0111178
|
||||
99.2602,175.239
|
||||
1.29631E-5,0.014126
|
||||
10.2418,50.4454
|
||||
1.09268E-5,0.0152518
|
||||
12.2007,20.3949
|
||||
6.87815E-5,0.0532574
|
||||
8.57708E-4,0.125572
|
||||
2.15802E-4,0.105642
|
||||
4.796E-6,0.00870184
|
||||
5.71075E-6,0.0110694
|
||||
1.53091E-5,0.0187159
|
||||
1.26111E-5,0.0140716
|
||||
1.76523E-5,0.0171751
|
||||
2.6124E-5,0.0256958
|
||||
7.23155E-5,0.035976
|
||||
5.77085E-5,0.030605
|
||||
2.07301E-5,0.024814
|
||||
7.63905E-5,0.0381513
|
||||
0.00503634,0.3736
|
||||
2.13352E-5,0.0201323
|
||||
2.14379E-4,0.0765103
|
||||
1.29366E-4,0.0653318
|
||||
4.81075E-6,0.0087364
|
||||
1.2456E-5,0.0157668
|
||||
4.72969E-6,0.00877609
|
||||
9.5267E-6,0.0130738
|
||||
2.70645E-5,0.0233318
|
||||
4.12543E-4,0.121654
|
||||
2.28287E-5,0.0245126
|
||||
6.65116E-6,0.0117262
|
||||
1.03708E-5,0.0120501
|
||||
5.11625E-5,0.0310841
|
||||
8.50127E-6,0.0112504
|
||||
3.39338E-5,0.0252766
|
||||
2.3338E-4,0.0896917
|
||||
4.36054E-4,0.102802
|
||||
1.0299E-5,0.013153
|
||||
3.64824E-6,0.0072728
|
||||
8.7311E-6,0.0124873
|
||||
4.99389E-6,0.00883864
|
||||
3.01524E-5,0.0221235
|
||||
1.43308E-5,0.0185584
|
||||
0.0135375,1.15703
|
||||
4.23928E-5,0.0315665
|
||||
7.87581E-6,0.014493
|
||||
2.37326E-5,0.0250884
|
||||
2.33263E-5,0.0210637
|
||||
6.5444E-5,0.0333008
|
||||
1.24917E-5,0.0148961
|
||||
2.5057E-5,0.0258766
|
||||
7.99923E-5,0.0392109
|
||||
6.7407E-7,0.00370689
|
||||
1.64934E-5,0.0156925
|
||||
7.34689E-6,0.0117761
|
||||
1.10116E-5,0.0155375
|
||||
3.49003E-5,0.0240023
|
||||
4.79389E-5,0.0336569
|
||||
5.57741E-6,0.0111047
|
||||
3.36945E-5,0.0233381
|
||||
7.64675E-6,0.010386
|
||||
0.00799724,0.659073
|
||||
6.71526E-5,0.0344918
|
||||
3.96247E-5,0.0253391
|
||||
2.47065E-4,0.0686088
|
||||
9.36907E-5,0.042208
|
||||
1.94518E-4,0.11588
|
||||
4.81752E-6,0.0109983
|
||||
0.00100041,0.179554
|
||||
4.82336E-6,0.00871537
|
||||
3.26412E-4,0.101085
|
||||
2.01583E-5,0.0202071
|
||||
4.58397E-4,0.0947255
|
||||
1.23653E-5,0.0160874
|
||||
5.32341E-6,0.0117989
|
||||
4.30236,13.4722
|
||||
1.96971E-5,0.0183275
|
||||
2.09065E-4,0.0598918
|
||||
0.0028134,0.359766
|
||||
4.87499E-4,0.105899
|
||||
4.89188E-5,0.0320554
|
||||
4.81601E-6,0.00869791
|
||||
4.72743E-6,0.00900802
|
||||
7.07032E-4,0.215604
|
||||
3.35413E-4,0.078974
|
||||
2.72193E-5,0.0288247
|
||||
1.05278E-4,0.0590045
|
||||
5.48046E-6,0.0113265
|
||||
7.62165E-5,0.0417477
|
||||
1.23731E-5,0.015662
|
||||
1.49468E-5,0.024292
|
||||
3.48717E-5,0.0348614
|
||||
0.0164454,0.67151
|
||||
1.7237E-4,0.0548759
|
||||
4.66219E-6,0.00863206
|
||||
7.299E-8,0.00174579
|
||||
2.00278E-5,0.0187269
|
||||
0.00899511,0.602984
|
||||
6.2523E-6,0.00976894
|
||||
8.42477E-4,0.161537
|
||||
3.62395E-6,0.00984839
|
||||
1.04946E-4,0.0435103
|
||||
6.53428,37.7425
|
||||
1.39836E-5,0.0147882
|
||||
4.75411E-4,0.102706
|
||||
6.92834E-6,0.0124173
|
||||
36.908,29.2145
|
||||
3.17329E-5,0.0321644
|
||||
4.80431E-6,0.0087475
|
||||
4.70384E-6,0.00854259
|
||||
8.21236E-5,0.0480911
|
||||
3.49175E-5,0.0260483
|
||||
1.94338E-5,0.0173099
|
||||
5.64119E-5,0.0403665
|
||||
3.33142E-5,0.025431
|
||||
3.13217E-5,0.0234571
|
||||
7.02198E-5,0.0343343
|
||||
7.3128E-7,0.00380925
|
||||
4.78409E-6,0.0109895
|
||||
5.22142E-6,0.00839373
|
||||
1.32269E-5,0.0195893
|
||||
4.29958E-6,0.0114484
|
||||
9.91712E-6,0.0149054
|
||||
5.07431,29.0843
|
||||
2.51492E-4,0.0943709
|
||||
6.32571E-6,0.00984089
|
||||
6.90728E-5,0.046415
|
||||
2.93913E-5,0.0214902
|
||||
2.31863E-5,0.0183523
|
||||
3.47137E-6,0.00809219
|
||||
3.3656E-6,0.00790383
|
||||
4.73489E-5,0.0314832
|
||||
8.05622E-5,0.0354184
|
||||
4.74366E-6,0.00861308
|
||||
16.7146,47.4161
|
||||
3.51564E-6,0.00806651
|
||||
8.33728E-5,0.0630348
|
||||
4.30414E-5,0.0257771
|
||||
7.08169E-6,0.0121962
|
||||
1.94905E-5,0.017493
|
||||
6.39635E-5,0.0425276
|
||||
1.14504E-5,0.0135943
|
||||
62.4943,42.9971
|
||||
1.9094E-5,0.0207893
|
||||
2.76009E-5,0.0207111
|
||||
0.0010282,0.204352
|
||||
2.8121E-6,0.00956001
|
||||
8.52094E-6,0.0124186
|
||||
2.35137E-4,0.0780883
|
||||
1.07303E-4,0.0715886
|
||||
8.19546E-6,0.0120523
|
||||
2.738E-5,0.0253871
|
||||
2.82366E-4,0.102415
|
||||
7.63725E-6,0.0103654
|
||||
2.3305E-4,0.0980442
|
||||
8.84575E-5,0.0414556
|
||||
1.46621E-5,0.0183132
|
||||
0.00185081,0.266569
|
||||
1.61395E-6,0.00704063
|
||||
7.20101E-4,0.130395
|
||||
1.60633E-5,0.0169116
|
||||
8.33848E-4,0.14828
|
||||
6.05287E-4,0.119328
|
||||
7.37463E-6,0.0102347
|
||||
6.90496E-6,0.0114783
|
||||
0.00762325,0.5351
|
||||
1.06799E-5,0.0181149
|
||||
2.26395E-5,0.0190986
|
||||
2.84254E-6,0.0106435
|
||||
2.60988E-5,0.0277546
|
||||
9.38991E-6,0.014974
|
||||
1.20928E-5,0.0152344
|
||||
1.18972E-5,0.0162437
|
||||
1.04221E-4,0.0434984
|
||||
4.62954E-5,0.0304854
|
||||
9.29013E-5,0.0450334
|
||||
6.38068E-6,0.00986119
|
||||
8.70063E-6,0.0123836
|
||||
5.47485E-5,0.0343837
|
||||
1.90552E-4,0.0596715
|
||||
1.60205E-4,0.0605616
|
||||
4.83028E-6,0.00870924
|
||||
1.63153E-5,0.0168672
|
||||
1.0141E-5,0.0130523
|
||||
1.85877E-5,0.022111
|
||||
3.73099E-5,0.0293874
|
||||
1.20618E-5,0.0141322
|
||||
4.80132E-4,0.239321
|
||||
1.86734E-5,0.0183217
|
||||
2.15333E-6,0.00704786
|
||||
2.8177E-7,0.00764778
|
||||
1.19829E-4,0.0600073
|
||||
2.11027E-4,0.0624342
|
||||
4.15023E-5,0.0404231
|
||||
6.3142E-5,0.0475227
|
||||
7.01177E-5,0.0324059
|
||||
3.50275E-5,0.0277303
|
||||
3.76439E-6,0.00804053
|
||||
4.16921E-6,0.0106673
|
||||
7.86189E-6,0.0148531
|
||||
1.17702E-5,0.0140042
|
||||
2.07788E-6,0.00710602
|
||||
7.33329E-6,0.0117783
|
||||
0.00185396,0.334392
|
||||
1.45967E-5,0.0152665
|
||||
1.89768E-5,0.0220377
|
||||
6.33974E-6,0.00988845
|
||||
3.24418E-4,0.101348
|
||||
8.90444E-6,0.0126817
|
||||
2.90857E-5,0.0345513
|
||||
5.68757E-6,0.00918417
|
||||
0.00709752,0.577801
|
||||
1.44068E-4,0.0630899
|
||||
4.7697E-6,0.00871275
|
||||
1.24975E-5,0.013882
|
||||
14.3761,15.9211
|
||||
2.99573E-4,0.0790785
|
||||
3.90447E-5,0.0277512
|
||||
1.04486E-5,0.0120927
|
||||
7.32497E-5,0.0474162
|
||||
0.0328583,1.68561
|
||||
1.25832E-5,0.0169329
|
||||
6.05401E-6,0.00966575
|
||||
0.8522,7.41662
|
||||
2.9527E-5,0.0232313
|
||||
2.05127E-6,0.00704115
|
||||
6.69359E-6,0.0142841
|
||||
3.62366E-5,0.0249559
|
||||
2.28879E-5,0.0194495
|
||||
6.56421E-5,0.0413058
|
||||
2.35693E-5,0.0193053
|
||||
1.90307E-5,0.0178288
|
||||
1.00153E-5,0.018204
|
||||
2.57921E-5,0.0256418
|
||||
20.0,18
|
||||
4.8505E-6,0.0109678
|
||||
5.61072E-6,0.0114127
|
||||
4.7754E-5,0.0301519
|
||||
2.42677E-5,0.0232316
|
||||
1.89188E-5,0.0208227
|
||||
6.61769E-4,0.167716
|
||||
2.97141E-5,0.0223764
|
||||
1.14063E-4,0.0447234
|
||||
9.09487E-5,0.0446335
|
||||
4.67532E-5,0.0283271
|
||||
5.84188E-4,0.12433
|
||||
2.28362E-5,0.0251913
|
||||
4.84907E-5,0.0308274
|
||||
1.11794E-5,0.0137001
|
||||
27.0718,51.0763
|
||||
1.08003E-4,0.0454306
|
||||
9.38431E-6,0.0129279
|
||||
2.04809E-6,0.0070513
|
||||
3.12931E-5,0.028814
|
||||
0.00189977,0.285349
|
||||
1.20323E-5,0.0145518
|
||||
6.43857E-6,0.0115764
|
||||
9.13355E-6,0.0175894
|
||||
1.13331E-5,0.0144328
|
||||
0.00112667,0.202879
|
||||
1.13165E-5,0.0142628
|
||||
2.8717E-5,0.0212844
|
||||
4.96852E-4,0.131825
|
||||
13.4408,74.2049
|
||||
1.95355E-4,0.0738632
|
||||
8.48093E-5,0.0387794
|
||||
8.52129E-6,0.0121269
|
||||
5.15058E-5,0.031754
|
||||
3.37542E-5,0.0249784
|
||||
1.31215E-5,0.0153912
|
||||
2.96757E-4,0.134141
|
||||
4.9007E-5,0.0298154
|
||||
4.18864E-5,0.0305646
|
||||
2.08578E-6,0.00538811
|
||||
0.0146687,2.34031
|
||||
2.59284E-4,0.0825437
|
||||
1.03252E-4,0.0543814
|
||||
90.6332,103.019
|
||||
5.12688E-5,0.0414163
|
||||
8.2902E-7,0.00603566
|
||||
3.18194E-6,0.00689977
|
||||
8.55502E-5,0.0364648
|
||||
4.8763E-6,0.00877332
|
||||
3.54058E-6,0.00809896
|
||||
1.38108E-6,0.00813992
|
||||
1.90862E-5,0.0175106
|
||||
4.88955E-6,0.00911519
|
||||
15.7244,17.2081
|
||||
4.99868E-6,0.0108084
|
||||
0.00539245,0.624264
|
||||
1.69463E-4,0.0630869
|
||||
0.00259102,0.888393
|
||||
8.39712E-5,0.0435661
|
||||
4.73652E-6,0.010929
|
||||
7.24351E-6,0.0116926
|
||||
13.419,30.2048
|
||||
1.11495E-4,0.0497078
|
||||
6.18346E-6,0.00972724
|
||||
6.87393E-6,0.0114111
|
||||
1.79846E-5,0.0175689
|
||||
4.57238E-5,0.0356232
|
||||
8.10548E-4,0.140009
|
||||
2.99394E-4,0.0757078
|
||||
2.45417E-5,0.019924
|
||||
5.04439E-6,0.00884144
|
||||
5.42362E-6,0.0110175
|
||||
2.20127E-5,0.0267511
|
||||
9.94813E-5,0.0508959
|
||||
1.25062,17.9565
|
||||
1.61344E-4,0.0625028
|
||||
0.00112262,0.229034
|
||||
1.04855E-5,0.0131256
|
||||
0.00125686,0.210136
|
||||
5.17521,33.5174
|
||||
4.7696E-6,0.00858219
|
||||
3.081E-8,0.00359469
|
||||
1.08837E-5,0.0146307
|
||||
1.16904E-4,0.0594755
|
||||
1.20467E-4,0.0768319
|
||||
1.45649E-5,0.0152501
|
||||
9.76951E-4,0.208879
|
||||
2.26888E-5,0.0188056
|
||||
1.72038E-5,0.0213398
|
||||
9.7678E-7,0.00611272
|
||||
5.10934E-6,0.00847851
|
||||
4.94181E-6,0.00884812
|
||||
6.06736E-6,0.00963022
|
||||
4.22465E-4,0.106611
|
||||
5.61754E-6,0.011148
|
||||
0.0189674,1.07685
|
||||
6.30447E-6,0.00967734
|
||||
0.63882,14.4948
|
||||
1.05014E-5,0.0152593
|
||||
3.78932E-5,0.0276698
|
||||
1.12453E-5,0.0155788
|
||||
5.50268E-6,0.0113421
|
||||
6.01866E-6,0.00979631
|
||||
9.59838E-6,0.0128491
|
||||
4.78663E-4,0.120393
|
||||
1.80902E-4,0.0679503
|
||||
3.50241E-6,0.00803909
|
||||
1.59405E-5,0.016117
|
||||
3.68681E-5,0.0332498
|
||||
1.06166E-5,0.0129311
|
||||
8.03002E-6,0.0121394
|
||||
6.56986E-6,0.0116726
|
||||
6.12983E-6,0.00961705
|
||||
11.0619,36.9611
|
||||
1.19807E-4,0.052702
|
||||
2.12194E-5,0.0225268
|
||||
8.16354E-6,0.012526
|
||||
12.5088,23.2871
|
||||
6.63737E-6,0.011786
|
||||
8.69258E-6,0.0123714
|
||||
4.05835E-5,0.0308466
|
||||
6.67081E-6,0.0117398
|
||||
1.22291E-4,0.0549408
|
||||
5.35365E-5,0.037192
|
||||
9.66958E-4,0.234486
|
||||
6.06076E-6,0.00968083
|
||||
5.31188E-6,0.0110805
|
||||
4.54449E-6,0.00877477
|
||||
7.30305E-6,0.0120953
|
||||
2.96538E-4,0.0964961
|
||||
2.66118E-5,0.0226364
|
||||
1.95749E-5,0.0201733
|
||||
5.43957E-5,0.0283473
|
||||
2.10492E-6,0.00694118
|
||||
3.22508E-5,0.0245779
|
||||
4.93528E-6,0.00873575
|
||||
8.70525E-5,0.03822
|
||||
1.95783E-5,0.0203246
|
||||
4.83264E-6,0.0090926
|
||||
7.3687E-4,0.200016
|
||||
1.4543E-5,0.0148591
|
||||
6.35245E-6,0.00988494
|
||||
5.644E-7,0.00566877
|
||||
3.38921E-6,0.00804852
|
||||
1.22734E-4,0.0534114
|
||||
1.16764E-5,0.0140771
|
||||
5.72278E-5,0.029283
|
||||
2.69315E-4,0.0838663
|
||||
6.55163E-5,0.0582305
|
||||
8.29973E-6,0.012344
|
||||
6.14166E-5,0.0414622
|
||||
2.56287E-5,0.0206759
|
||||
4.83934E-6,0.011014
|
||||
1.62917E-5,0.0188659
|
||||
1.06344E-5,0.0127406
|
||||
6.25899E-6,0.00976509
|
||||
1.3609E-5,0.0191601
|
||||
3.55264E-5,0.0284813
|
||||
3.06702E-5,0.0236348
|
||||
45.5391,106.753
|
||||
1.04514E-5,0.01559
|
||||
4.62526E-5,0.0378597
|
||||
1.603E-7,0.00442308
|
||||
1.80651E-4,0.0792493
|
||||
2.81819E-5,0.0230531
|
||||
4.54264E-6,0.0108184
|
||||
1.25729E-5,0.0140746
|
||||
5.36799E-5,0.0301957
|
||||
1.15805E-5,0.0135475
|
||||
5.02222E-6,0.00877333
|
||||
8.41231E-5,0.0403267
|
||||
2.04203E-4,0.113848
|
||||
5.58337E-6,0.011363
|
||||
7.66205E-6,0.0104025
|
||||
3.27681E-6,0.00702314
|
||||
1.34649E-5,0.0184943
|
||||
1.03667E-5,0.0120576
|
||||
8.71444E-6,0.0124203
|
||||
3.57407E-5,0.0414939
|
|
@ -0,0 +1,7 @@
|
|||
AQUAMAPS_SUITABLE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable
|
||||
AQUAMAPS_NATIVE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative
|
||||
AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative2050
|
||||
AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable2050
|
||||
AQUAMAPS_NATIVE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNN
|
||||
AQUAMAPS_SUITABLE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNNSuitable
|
||||
FEED_FORWARD_A_N_N_DISTRIBUTION=org.gcube.dataanalysis.ecoengine.spatialdistributions.FeedForwardNeuralNetworkDistribution
|
|
@ -0,0 +1,3 @@
|
|||
DBSCAN=org.gcube.dataanalysis.ecoengine.clustering.DBScan
|
||||
KMEANS=org.gcube.dataanalysis.ecoengine.clustering.KMeans
|
||||
XMEANS=org.gcube.dataanalysis.ecoengine.clustering.XMeansWrapper
|
|
@ -0,0 +1,3 @@
|
|||
DISCREPANCY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis
|
||||
QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis
|
||||
HRS=org.gcube.dataanalysis.ecoengine.evaluation.HabitatRepresentativeness
|
|
@ -0,0 +1,2 @@
|
|||
LOCAL_WITH_DATABASE=org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator
|
||||
SIMPLE_LOCAL=org.gcube.dataanalysis.ecoengine.processing.LocalSimpleSplitGenerator
|
|
@ -0,0 +1 @@
|
|||
HSPEN_MODELER=org.gcube.dataanalysis.ecoengine.modeling.SimpleModeler
|
|
@ -0,0 +1,4 @@
|
|||
HSPEN=org.gcube.dataanalysis.ecoengine.models.ModelHSPEN
|
||||
AQUAMAPSNN=org.gcube.dataanalysis.ecoengine.models.ModelAquamapsNN
|
||||
FEED_FORWARD_ANN=org.gcube.dataanalysis.ecoengine.models.FeedForwardNN
|
||||
FEED_FORWARD_ANN_FILE=org.gcube.dataanalysis.ecoengine.models.testing.FeedForwardNNFile
|
|
@ -0,0 +1 @@
|
|||
AQUAMAPS_SUITABLE=org.gcube.dataanalysis.peeng.models.AquamapsSuitableNode
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1 @@
|
|||
1,2.87747E-6,2,1.603E-7,3,2.84254E-6,4,1.06799E-5,5,8.2902E-7,6,9.5606E-7,7,12.5088,8,2.8121E-6,9,3.62395E-6,10,15.2436,11,27.0718,12,62.4943,13,27.2604,14,38.4655,15,0.00185396,16,8.42477E-4,17,0.00539245,18,5.84188E-4,19,1.00963E-5,20,1.65052E-5,21,4.22465E-4,22,4.83264E-6,23,2.82366E-4,24,3.37542E-5,25,2.26395E-5,26,1.51102E-5,27,9.59838E-6,28,3.44589E-6,29,1.96001E-4,30,4.7696E-6,
|
|
|
@ -0,0 +1,25 @@
|
|||
ABSENCE_CELLS_FROM_AQUAMAPS=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.MarineAbsencePointsFromAquamapsDistribution
|
||||
BIOCLIMATE_HSPEC=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPECTransducer
|
||||
BIOCLIMATE_HCAF=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHCAFTransducer
|
||||
BIOCLIMATE_HSPEN=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPENTransducer
|
||||
HCAF_INTERPOLATION=org.gcube.dataanalysis.ecoengine.transducers.InterpolationTransducer
|
||||
HCAF_FILTER=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.HcafFilter
|
||||
HSPEN_FILTER=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.HspenFilter
|
||||
OCCURRENCES_MERGER=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsMerger
|
||||
OCCURRENCES_INTERSECTOR=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsIntersector
|
||||
OCCURRENCES_MARINE_TERRESTRIAL=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsInSeaOnEarth
|
||||
OCCURRENCES_DUPLICATES_DELETER=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsDuplicatesDeleter
|
||||
OCCURRENCES_SUBTRACTION=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsSubtraction
|
||||
PRESENCE_CELLS_GENERATION=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.MarinePresencePoints
|
||||
FIN_TAXA_MATCH=org.gcube.dataanalysis.fin.taxamatch.TaxaMatchTransducer
|
||||
LISTNAMES_TABLES=org.gcube.dataacces.algorithms.DatabasesResourcesManagerAlgorithms.AbsoluteSpeciesBarChartsAlgorithm
|
||||
TEST_ALG=org.gcube.dataacces.algorithms.DatabasesResourcesManagerAlgorithms.SimpleAlg
|
||||
LISTDBINFO=org.gcube.dataaccess.algorithms.drmalgorithms.ListDBInfo
|
||||
LISTDBNAMES=org.gcube.dataaccess.algorithms.drmalgorithms.ListNames
|
||||
LISTDBSCHEMA=org.gcube.dataaccess.algorithms.drmalgorithms.ListSchemas
|
||||
LISTTABLES=org.gcube.dataaccess.algorithms.drmalgorithms.ListTables
|
||||
GETTABLEDETAILS=org.gcube.dataaccess.algorithms.drmalgorithms.GetTableDetails
|
||||
LISTSUBMITQUERY=org.gcube.dataaccess.algorithms.drmalgorithms.SubmitQuery
|
||||
SAMPLEONTABLE=org.gcube.dataaccess.algorithms.drmalgorithms.SampleOnTable
|
||||
SMARTSAMPLEONTABLE=org.gcube.dataaccess.algorithms.drmalgorithms.SmartSampleOnTable
|
||||
RANDOMSAMPLEONTABLE=org.gcube.dataaccess.algorithms.drmalgorithms.RandomSampleOnTable
|
|
@ -0,0 +1,12 @@
|
|||
ANOMALIES_DETECTION=DBSCAN,KMEANS,XMEANS
|
||||
CLASSIFICATION=FEED_FORWARD_A_N_N_DISTRIBUTION
|
||||
CLIMATE=BIOCLIMATE_HSPEC,BIOCLIMATE_HCAF,BIOCLIMATE_HSPEN,HCAF_INTERPOLATION
|
||||
CORRELATION_ANALYSIS=HRS
|
||||
DATA_CLUSTERING=DBSCAN,KMEANS,XMEANS
|
||||
FILTERING=HCAF_FILTER,HSPEN_FILTER
|
||||
FUNCTION_SIMULATION=FEED_FORWARD_A_N_N_DISTRIBUTION
|
||||
OCCURRENCES=ABSENCE_CELLS_FROM_AQUAMAPS,PRESENCE_CELLS_GENERATION,OCCURRENCES_MERGER,OCCURRENCES_INTERSECTOR,OCCURRENCES_MARINE_TERRESTRIAL,OCCURRENCES_DUPLICATES_DELETER,OCCURRENCES_SUBTRACTION
|
||||
PERFORMANCES_EVALUATION=QUALITY_ANALYSIS,DISCREPANCY_ANALYSIS
|
||||
SPECIES_SIMULATION=AQUAMAPS_SUITABLE,AQUAMAPS_NATIVE,AQUAMAPS_NATIVE_2050,AQUAMAPS_SUITABLE_2050,AQUAMAPS_NATIVE_NEURALNETWORK,AQUAMAPS_SUITABLE_NEURALNETWORK
|
||||
TRAINING=HSPEN,AQUAMAPSNN,FEED_FORWARD_ANN
|
||||
TIME_SERIES=HCAF_INTERPOLATION
|
|
@ -0,0 +1,2 @@
|
|||
v. 1.0.0 (09-06-2014)
|
||||
* First release
|
|
@ -0,0 +1 @@
|
|||
${gcube.license}
|
|
@ -0,0 +1,64 @@
|
|||
The gCube System - ${name}
|
||||
--------------------------------------------------
|
||||
|
||||
${description}
|
||||
|
||||
|
||||
${gcube.description}
|
||||
|
||||
${gcube.funding}
|
||||
|
||||
|
||||
Version
|
||||
--------------------------------------------------
|
||||
|
||||
${version} (${buildDate})
|
||||
|
||||
Please see the file named "changelog.xml" in this directory for the release notes.
|
||||
|
||||
|
||||
Authors
|
||||
--------------------------------------------------
|
||||
|
||||
* Loredana Liccardo (loredana.liccardo-AT-isti.cnr.it), CNR Pisa,
|
||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
|
||||
|
||||
Maintainers
|
||||
-----------
|
||||
|
||||
* Loredana Liccardo (loredana.liccardo-AT-isti.cnr.it), CNR Pisa,
|
||||
Istituto di Scienza e Tecnologie dell'Informazione "A. Faedo".
|
||||
|
||||
Download information
|
||||
--------------------------------------------------
|
||||
|
||||
Source code is available from SVN:
|
||||
${scm.url}
|
||||
|
||||
Binaries can be downloaded from the gCube website:
|
||||
${gcube.website}
|
||||
|
||||
|
||||
Installation
|
||||
--------------------------------------------------
|
||||
|
||||
Installation documentation is available on-line in the gCube Wiki:
|
||||
using please referer to INSTALL.
|
||||
|
||||
Documentation
|
||||
--------------------------------------------------
|
||||
|
||||
Documentation is available on-line in the gCube Wiki:
|
||||
${gcube.wikiRoot}
|
||||
|
||||
Support
|
||||
--------------------------------------------------
|
||||
|
||||
Bugs and support requests can be reported in the gCube issue tracking tool:
|
||||
${gcube.issueTracking}
|
||||
|
||||
|
||||
Licensing
|
||||
--------------------------------------------------
|
||||
|
||||
This software is licensed under the terms you may find in the file named "LICENSE" in this directory.
|
|
@ -0,0 +1,71 @@
|
|||
<ReleaseNotes>
|
||||
|
||||
<Changeset
|
||||
component="org.gcube.dataaccess.algorithms.database-rm-algorithms.1-3-0"
|
||||
date="2014-12-05">
|
||||
<Change>algorithm SubmitQuery modified in order to pass the string
|
||||
pathfile to the submitQuery method of class DatabaseManagement in
|
||||
order to generate a file and retrieve it from the statistical manager
|
||||
with the map as output.
|
||||
</Change>
|
||||
<Change>Algorithms RandomSampleOnTable, SampleOnTable,
|
||||
SmartSampleOnTable and SubmitQuery modified in order to manage the
|
||||
file as output.
|
||||
</Change>
|
||||
<Change>total number rows returned as output for the result of the
|
||||
submit Query operation: algorithm SubmitQuery modified
|
||||
</Change>
|
||||
</Changeset>
|
||||
<Changeset
|
||||
component="org.gcube.dataaccess.algorithms.database-rm-algorithms.1-2-0"
|
||||
date="2014-10-27">
|
||||
<Change>some algorithms modified to cope modifications applied in the
|
||||
database-resource-manager component and regarding the management of
|
||||
the same access
|
||||
point for a resource and the database connection
|
||||
shutdown.
|
||||
</Change>
|
||||
</Changeset>
|
||||
<Changeset
|
||||
component="org.gcube.dataaccess.algorithms.database-rm-algorithms.1-1-0"
|
||||
date="2014-09-10">
|
||||
<Change>SubmitQuery algorithm updated because input parameters
|
||||
in the
|
||||
SubmitQuery method of DatabaseManagement class have been updated
|
||||
</Change>
|
||||
<Change>timer added in the SubmitQuery algorithm that stops the query
|
||||
execution after 30 minutes
|
||||
</Change>
|
||||
<Change>hibernate bug fixed and related to a join tables with same
|
||||
columns names. Bug resolved using JDBC. GetConnection
|
||||
method changed
|
||||
in the SubmitQuery algorithm to create
|
||||
the connection with JDBC
|
||||
</Change>
|
||||
<Change>method shutdown of algorithm SubmitQuery implemented in order
|
||||
to allow a user to cancel a submit query operation
|
||||
</Change>
|
||||
<Change>bug fixed when two algorithms are submitted concurrently.Bug
|
||||
related to map result's name. Bug fixed generating the name as
|
||||
ResultsMap"+UUID.randomUUID()
|
||||
</Change>
|
||||
<Change>classes SubmitQuery, SampleOnTable, SmartSampleOnTable,
|
||||
RandomSampleOnTable modified in order to not make available the
|
||||
file
|
||||
as result
|
||||
</Change>
|
||||
<Change>file DestinationDBHibernate modified to insert the property on
|
||||
the timeout
|
||||
</Change>
|
||||
<Change>bug fixed on table names that are keywords.Method
|
||||
GetTableDetails corrected to consider the name as schema.table for
|
||||
postgres and database.table for mysql
|
||||
</Change>
|
||||
</Changeset>
|
||||
<Changeset
|
||||
component="org.gcube.dataaccess.algorithms.database-rm-algorithms.1-0-0"
|
||||
date="2014-06-09">
|
||||
<Change>first release</Change>
|
||||
</Changeset>
|
||||
|
||||
</ReleaseNotes>
|
|
@ -0,0 +1,31 @@
|
|||
<assembly
|
||||
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0 http://maven.apache.org/xsd/assembly-1.1.0.xsd">
|
||||
<id>servicearchive</id>
|
||||
<formats>
|
||||
<format>tar.gz</format>
|
||||
</formats>
|
||||
<baseDirectory>/</baseDirectory>
|
||||
<fileSets>
|
||||
<fileSet>
|
||||
<directory>${distroDirectory}</directory>
|
||||
<outputDirectory>/</outputDirectory>
|
||||
<useDefaultExcludes>true</useDefaultExcludes>
|
||||
<includes>
|
||||
<include>README</include>
|
||||
<include>LICENSE</include>
|
||||
<include>changelog.xml</include>
|
||||
<include>profile.xml</include>
|
||||
</includes>
|
||||
<fileMode>755</fileMode>
|
||||
<filtered>true</filtered>
|
||||
</fileSet>
|
||||
</fileSets>
|
||||
<files>
|
||||
<file>
|
||||
<source>target/${build.finalName}.jar</source>
|
||||
<outputDirectory>/${artifactId}</outputDirectory>
|
||||
</file>
|
||||
</files>
|
||||
</assembly>
|
|
@ -0,0 +1,25 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<Resource xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
|
||||
<ID></ID>
|
||||
<Type>Service</Type>
|
||||
<Profile>
|
||||
<Description>Database Resource Manager algorithms</Description>
|
||||
<Class>DataAccess</Class>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>1.0.0</Version>
|
||||
<Packages>
|
||||
<Software>
|
||||
<Name>${artifactId}</Name>
|
||||
<Version>${version}</Version>
|
||||
<MavenCoordinates>
|
||||
<groupId>${groupId}</groupId>
|
||||
<artifactId>${artifactId}</artifactId>
|
||||
<version>${version}</version>
|
||||
</MavenCoordinates>
|
||||
<Files>
|
||||
<File>${build.finalName}.jar</File>
|
||||
</Files>
|
||||
</Software>
|
||||
</Packages>
|
||||
</Profile>
|
||||
</Resource>
|
|
@ -0,0 +1,2 @@
|
|||
0 [main] DEBUG root - Executing:test
|
||||
0 [main] DEBUG root - Executing:test
|
|
@ -0,0 +1,10 @@
|
|||
Xiphias gladius,135638
|
||||
Fulmarus glacialis,131885
|
||||
Thunnus albacares,124076
|
||||
Pachymetopon blochii,113597
|
||||
Aptenodytes patagonicus,112605
|
||||
Gadus morhua,101777
|
||||
Caretta caretta,101769
|
||||
Thyrsites atun,97986
|
||||
Loligo vulgaris reynaudi,96672
|
||||
Argyrozona argyrozona,96278
|
|
|
@ -0,0 +1,84 @@
|
|||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
|
||||
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>org.gcube.tools</groupId>
|
||||
<artifactId>maven-parent</artifactId>
|
||||
<version>1.0.0</version>
|
||||
</parent>
|
||||
|
||||
<groupId>org.gcube.dataaccess.algorithms</groupId>
|
||||
<artifactId>database-rm-algorithms</artifactId>
|
||||
<version>1.3.0-SNAPSHOT</version>
|
||||
<name>DatabasesResourcesManagerAlgorithms</name>
|
||||
<description>Databases Resources Manager Algorithms</description>
|
||||
|
||||
<properties>
|
||||
<distroDirectory>${project.basedir}/distro</distroDirectory>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||
</properties>
|
||||
|
||||
|
||||
<!-- <packaging>maven-plugin</packaging> -->
|
||||
|
||||
<!-- <name>DatabasesResourcesManagerAlgorithms Maven Mojo</name> -->
|
||||
<!-- <url>http://maven.apache.org</url> -->
|
||||
|
||||
<dependencies>
|
||||
|
||||
<!-- <dependency> -->
|
||||
<!-- <groupId>org.gcube.dataanalysis</groupId> -->
|
||||
<!-- <artifactId>ecological-engine</artifactId> -->
|
||||
<!-- <version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version> -->
|
||||
<!-- </dependency> -->
|
||||
|
||||
|
||||
|
||||
<dependency>
|
||||
<groupId>org.gcube.dataaccess</groupId>
|
||||
<artifactId>database-resource-manager</artifactId>
|
||||
<version>[1.0.0-SNAPSHOT, 2.0.0-SNAPSHOT)</version>
|
||||
</dependency>
|
||||
|
||||
|
||||
<!-- <dependency> -->
|
||||
<!-- <groupId>org.apache.maven</groupId> -->
|
||||
<!-- <artifactId>maven-plugin-api</artifactId> -->
|
||||
<!-- <version>2.0</version> -->
|
||||
<!-- </dependency> -->
|
||||
<!-- <dependency> -->
|
||||
<!-- <groupId>junit</groupId> -->
|
||||
<!-- <artifactId>junit</artifactId> -->
|
||||
<!-- <version>3.8.1</version> -->
|
||||
<!-- <scope>test</scope> -->
|
||||
<!-- </dependency> -->
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-assembly-plugin</artifactId>
|
||||
<configuration>
|
||||
<descriptors>
|
||||
<descriptor>${distroDirectory}/descriptor.xml</descriptor>
|
||||
</descriptors>
|
||||
</configuration>
|
||||
<executions>
|
||||
<execution>
|
||||
<id>servicearchive</id>
|
||||
<phase>install</phase>
|
||||
<goals>
|
||||
<goal>single</goal>
|
||||
</goals>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
|
||||
</project>
|
|
@ -0,0 +1,10 @@
|
|||
Xiphias gladius,135638
|
||||
Fulmarus glacialis,131885
|
||||
Thunnus albacares,124076
|
||||
Pachymetopon blochii,113597
|
||||
Aptenodytes patagonicus,112605
|
||||
Gadus morhua,101777
|
||||
Caretta caretta,101769
|
||||
Thyrsites atun,97986
|
||||
Loligo vulgaris reynaudi,96672
|
||||
Argyrozona argyrozona,96278
|
|
|
@ -0,0 +1,539 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
/** Class that allows to retrieve some information about the chosen table */
|
||||
public class GetTableDetails extends StandardLocalExternalAlgorithm {
|
||||
|
||||
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
// object that allows to manage some operations on a database
|
||||
private DatabaseManagement mgt;
|
||||
// variable that keeps track of the database's type
|
||||
private String driverInfo;
|
||||
|
||||
// database's parameters specified by the user
|
||||
private String resourceName = null;
|
||||
private String databaseName = null;
|
||||
private String schemaName = null;
|
||||
private String tableName = null;
|
||||
|
||||
private SessionFactory sf;
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
|
||||
mgt = new DatabaseManagement(config.getConfigPath());
|
||||
|
||||
AnalysisLogger.getLogger().debug("In GetTableDetails->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
|
||||
AnalysisLogger.getLogger().debug("In GetTableDetails->scope set by config: " + scope);
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In TableDetails->scope set by config: " + config.getGcubeScope());
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
|
||||
|
||||
scope = ScopeProvider.instance.get();
|
||||
|
||||
AnalysisLogger.getLogger().debug("In GetTableDetails->scope set by ScopeProvider: " + scope);
|
||||
|
||||
|
||||
} else {
|
||||
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
// add a simple description for the algorithm
|
||||
|
||||
return "Algorithm that allows to view table details of a chosen database";
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception, IOException,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException,
|
||||
HibernateException {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In GetTableDetails->Processing");
|
||||
|
||||
try {
|
||||
|
||||
// retrieve information
|
||||
List<String> Info = retrieveInfo();
|
||||
|
||||
// create the connection
|
||||
getConnection(Info);
|
||||
|
||||
// get table's details
|
||||
|
||||
// recover information about the "CreateTableStatement" and
|
||||
// "Number of rows" of the table chosen by the user
|
||||
|
||||
map = getDetails();
|
||||
|
||||
} catch (HibernateException h) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails-> ERROR " + h.getMessage());
|
||||
throw h;
|
||||
} catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails-> ERROR " + e.getMessage());
|
||||
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails-> ERROR " + e1.getMessage());
|
||||
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails-> ERROR " + e2.getMessage());
|
||||
|
||||
throw e2;
|
||||
|
||||
} catch (IOException e3) {
|
||||
// e3.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails-> Exception " + e3.getMessage());
|
||||
|
||||
throw e3;
|
||||
}
|
||||
|
||||
catch (Exception e4) {
|
||||
// e4.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails-> Exception " + e4.getMessage());
|
||||
|
||||
throw e4;
|
||||
|
||||
} finally {
|
||||
// close the connection
|
||||
mgt.closeConnection();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In GetTableDetails->setting inputs");
|
||||
|
||||
// parameters specified by the user
|
||||
addStringInput("ResourceName", "The name of the resource", "");
|
||||
addStringInput("DatabaseName", "The name of the database", "");
|
||||
addStringInput("SchemaName", "The name of the schema", "");
|
||||
addStringInput("TableName", "The name of the table", "");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("In GetTableDetails->Shutdown");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In GetTableDetails->retrieving outputs");
|
||||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
|
||||
return output;
|
||||
|
||||
}
|
||||
|
||||
// create the database's connection
|
||||
private void getConnection(List<String> Info) throws IOException {
|
||||
|
||||
// create the connection
|
||||
Iterator<String> iterator = Info.iterator();
|
||||
|
||||
String DatabaseUserName = iterator.next();
|
||||
String DatabasePassword = iterator.next();
|
||||
String DatabaseDriver = iterator.next();
|
||||
String DatabaseDialect = iterator.next();
|
||||
String DatabaseURL = iterator.next();
|
||||
String DatabaseName = iterator.next();
|
||||
|
||||
mgt.createConnection(DatabaseUserName,
|
||||
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
|
||||
DatabaseName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->database " + DatabaseName + ": connected");
|
||||
}
|
||||
|
||||
// Method that recover the info useful for the connection
|
||||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if(resourceName != null){
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if(databaseName != null){
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
|
||||
List<DBResource> resources = discovery.discover();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->number of database resources: "
|
||||
+ resources.size());
|
||||
|
||||
for (int i = 0; i < resources.size(); i++) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->Resource's name: "
|
||||
+ resources.get(i).getResourceName());
|
||||
|
||||
}
|
||||
|
||||
// list that contains information useful for the connection
|
||||
List<String> info = new ArrayList<String>();
|
||||
|
||||
check: for (int i = 0; i < resources.size(); i++) {
|
||||
|
||||
if (resources.get(i).getResourceName().toLowerCase().equals(resourceName.toLowerCase())) {
|
||||
|
||||
normalizeDBInfo(resources.get(i));
|
||||
|
||||
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
|
||||
|
||||
// if (resources.get(i).getAccessPoints().get(j)
|
||||
// .getDatabaseName().equals(databaseName)) {
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getUsername());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getPassword());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver());
|
||||
//
|
||||
// // driverInfo =
|
||||
// // resources.get(i).getAccessPoints().get(j)
|
||||
// // .getDriver();
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getDialect());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .address());
|
||||
//
|
||||
// info.add(databaseName);
|
||||
//
|
||||
// break check;
|
||||
//
|
||||
// }
|
||||
|
||||
// if (resources.get(i).getAccessPoints().get(j)
|
||||
// .address().equals(url)){
|
||||
//
|
||||
// System.out.println("url selezionato");
|
||||
//
|
||||
//
|
||||
//
|
||||
// }
|
||||
|
||||
if (resources.get(i).getAccessPoints().get(j)
|
||||
.getDatabaseName().toLowerCase().equals(databaseName.toLowerCase())) {
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getUsername());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->username: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getUsername());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getPassword());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->password: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getPassword());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver());
|
||||
|
||||
driverInfo = resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->driver: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDriver());
|
||||
|
||||
// driverInfo =
|
||||
// resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver();
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDialect());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->dialect: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDialect());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.address());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->url: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).address());
|
||||
|
||||
info.add(databaseName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->databasename: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDatabaseName());
|
||||
|
||||
break check;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("In GetTableDetails->information useful for connection: retrieved");
|
||||
return info;
|
||||
|
||||
}
|
||||
|
||||
// method that retrieves information such as "CreateTableStatement" and the
|
||||
// number of rows about the table chosen by the user
|
||||
private LinkedHashMap<String, StatisticalType> getDetails()
|
||||
throws Exception {
|
||||
|
||||
|
||||
tableName = getInputParameter("TableName");
|
||||
|
||||
if(tableName != null){
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
}
|
||||
if ((tableName == null) || (tableName.equals(""))) {
|
||||
throw new Exception("Warning: insert the table name");
|
||||
}
|
||||
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
}
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->getting details on the table: " + tableName);
|
||||
|
||||
// recover metadata of the table
|
||||
|
||||
// recover the "show create" statement
|
||||
|
||||
String createTable = null;
|
||||
|
||||
if ((driverInfo.toLowerCase().contains("postgres"))) {
|
||||
|
||||
createTable = mgt.getCreateTable(tableName, schemaName);
|
||||
|
||||
}
|
||||
|
||||
if ((driverInfo.toLowerCase().contains("mysql"))) {
|
||||
|
||||
createTable = mgt.getCreateTable(tableName, databaseName);
|
||||
|
||||
}
|
||||
|
||||
PrimitiveType valCreateTable = new PrimitiveType(
|
||||
String.class.getName(), createTable, PrimitiveTypes.STRING,
|
||||
"Create Table Statement", "Create Table Statement");
|
||||
|
||||
map.put("CreateTable", valCreateTable);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->getting the \"CreateTableStatement\": "
|
||||
+ createTable);
|
||||
|
||||
// to retrieve the column names of a table
|
||||
List<String> listColumnNamesTable = mgt.getListColumnNamesTable();
|
||||
|
||||
String ColumnName = "";
|
||||
|
||||
for (int i = 0; i < listColumnNamesTable.size(); i++) {
|
||||
|
||||
if (i != listColumnNamesTable.size() - 1) {
|
||||
ColumnName = ColumnName + listColumnNamesTable.get(i) + ",";
|
||||
} else {
|
||||
ColumnName = ColumnName + listColumnNamesTable.get(i);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->column names: "
|
||||
+ ColumnName);
|
||||
|
||||
PrimitiveType valListColumnNamesTable = new PrimitiveType(
|
||||
String.class.getName(), ColumnName, PrimitiveTypes.STRING,
|
||||
"Column Name", "Column Name");
|
||||
|
||||
map.put("Column Names", valListColumnNamesTable);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->getting the column names list: "
|
||||
+ createTable);
|
||||
|
||||
// recover the number of rows
|
||||
// BigInteger rows = mgt.getNumberOfRows(tableName);
|
||||
|
||||
long rows = 0;
|
||||
|
||||
if ((driverInfo.toLowerCase().contains("postgres"))) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->schemaName:" + schemaName);;
|
||||
|
||||
rows = mgt.getNumberOfRows(tableName, schemaName);
|
||||
|
||||
}
|
||||
|
||||
if ((driverInfo.toLowerCase().contains("mysql"))) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->*databasename:" + databaseName);;
|
||||
|
||||
rows = mgt.getNumberOfRows(tableName, databaseName);
|
||||
|
||||
}
|
||||
|
||||
// long rows = mgt.getNumberOfRows(tableName);
|
||||
|
||||
// PrimitiveType valRows = new PrimitiveType(
|
||||
// String.class.getName(), rows.toString(),
|
||||
// PrimitiveTypes.STRING, "NumberRows", "Rows' Number");
|
||||
|
||||
PrimitiveType valRows = new PrimitiveType(String.class.getName(),
|
||||
Long.toString(rows), PrimitiveTypes.STRING, "Number Rows",
|
||||
"Rows' Number");
|
||||
|
||||
map.put("NumberRows", valRows);
|
||||
|
||||
// AnalysisLogger
|
||||
// .getLogger()
|
||||
// .debug("In TableDetails->getting the number of rows: " +
|
||||
// rows.toString());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->getting the number of rows: "
|
||||
+ Long.toString(rows));
|
||||
|
||||
return map;
|
||||
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In GetTableDetails->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
//
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
//
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
//
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In GetTableDetails->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
//
|
||||
// throw e;
|
||||
//
|
||||
// }
|
||||
//
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,293 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource.AccessPoint;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
|
||||
/** Class that allows to retrieve information about the chosen resource */
|
||||
public class ListDBInfo extends StandardLocalExternalAlgorithm {
|
||||
|
||||
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
// database's parameters specified by the user
|
||||
private String resourceName = null;
|
||||
|
||||
// list that contains information about the resource
|
||||
private List<AccessPoint> ap = new ArrayList<AccessPoint>();
|
||||
|
||||
// variable that keeps track of database platform version
|
||||
private String platformVersion = "";
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo->scope set by config object: " + scope);
|
||||
|
||||
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
|
||||
scope = ScopeProvider.instance.get();
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo->scope set by ScopeProvider: " + scope);
|
||||
|
||||
|
||||
}else{
|
||||
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
// add a simple description for the algorithm
|
||||
return "Algorithm that allows to view information about one chosen resource of Database Type in the Infrastructure";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception, IOException, IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo->Processing");
|
||||
|
||||
AnalysisLogger.getLogger().debug("Scope: " + ScopeProvider.instance.get());
|
||||
|
||||
|
||||
try{
|
||||
// retrieve information
|
||||
List<AccessPoint> apInfo = retrieveInfo();
|
||||
|
||||
AnalysisLogger.getLogger().debug("access point dimension: " + apInfo.size());
|
||||
|
||||
for (int i = 0; i < apInfo.size(); i++) {
|
||||
String name = "Database name "+(i+1);
|
||||
PrimitiveType DBName = new PrimitiveType(String.class.getName(),
|
||||
apInfo.get(i).getDatabaseName(), PrimitiveTypes.STRING,
|
||||
name
|
||||
, name);
|
||||
|
||||
|
||||
|
||||
map.put(name, DBName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->Database Name: "
|
||||
+ apInfo.get(i).getDatabaseName());
|
||||
String urlId="URL "+(i+1);
|
||||
PrimitiveType url = new PrimitiveType(String.class.getName(),
|
||||
apInfo.get(i).address(), PrimitiveTypes.STRING, urlId,
|
||||
urlId);
|
||||
|
||||
map.put(urlId, url);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->URL: " + apInfo.get(i).address());
|
||||
|
||||
String driverId="Driver name "+(i+1);
|
||||
PrimitiveType driver = new PrimitiveType(String.class.getName(),
|
||||
apInfo.get(i).getDriver(), PrimitiveTypes.STRING,
|
||||
driverId, driverId);
|
||||
|
||||
map.put(driverId, driver);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->Driver Name: "
|
||||
+ apInfo.get(i).getDriver());
|
||||
|
||||
String dialectId="Dialect name "+(i+1);
|
||||
PrimitiveType dialect = new PrimitiveType(String.class.getName(),
|
||||
apInfo.get(i).getDialect(), PrimitiveTypes.STRING,
|
||||
dialectId, dialectId);
|
||||
|
||||
map.put(dialectId, dialect);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->Dialect Name: "
|
||||
+ apInfo.get(i).getDialect());
|
||||
|
||||
String platformId="Platform Version "+(i+1);
|
||||
PrimitiveType platformVersionValue = new PrimitiveType(String.class.getName(),
|
||||
platformVersion, PrimitiveTypes.STRING,
|
||||
platformId, platformId);
|
||||
|
||||
map.put(platformId, platformVersionValue);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->Platform Version: "
|
||||
+ platformVersion);
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo-> ERROR " + e.getMessage());
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo-> ERROR " + e1.getMessage());
|
||||
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo-> ERROR " + e2.getMessage());
|
||||
|
||||
|
||||
throw e2;
|
||||
|
||||
}
|
||||
// catch(IOException e3){
|
||||
//// e3.printStackTrace();
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug("In ListDBInfo-> Exception " + e3.getMessage());
|
||||
//
|
||||
// throw e3;
|
||||
// }
|
||||
|
||||
catch(Exception e4){
|
||||
|
||||
// e4.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo-> Exception " + e4.getMessage());
|
||||
|
||||
throw e4;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In ListDBInfo->setting inputs");
|
||||
|
||||
// resource name specified by the user
|
||||
addStringInput("ResourceName", "The name of the resource", "");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo->Shutdown");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListDBInfo->retrieving outputs");
|
||||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
|
||||
return output;
|
||||
|
||||
}
|
||||
|
||||
private List<AccessPoint> retrieveInfo() throws Exception, IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
// retrieve information about the chosen resource
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
|
||||
List<DBResource> resources = discovery.discover();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->number of database resources: "
|
||||
+ resources.size());
|
||||
|
||||
check: for (int i = 0; i < resources.size(); i++) {
|
||||
|
||||
if (resources.get(i).getResourceName().toLowerCase().equals(resourceName.toLowerCase())) {
|
||||
|
||||
platformVersion = resources.get(i).getPlatformVersion();
|
||||
|
||||
// ap = resources.get(i).getAccessPoints();
|
||||
|
||||
normalizeDBInfo(resources.get(i));
|
||||
|
||||
ap = resources.get(i).getAccessPoints();
|
||||
|
||||
break check;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
return ap;
|
||||
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListDBInfo->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
//
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
//
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// }
|
||||
//// catch (IOException e) {
|
||||
// catch (IOException e) {
|
||||
// // TODO Auto-generated catch block
|
||||
//// e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In ListDBInfo->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
//
|
||||
// }
|
||||
//
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,177 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
|
||||
/** Class that allows to retrieve a list of database resources */
|
||||
public class ListNames extends StandardLocalExternalAlgorithm {
|
||||
|
||||
private HashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListNames->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListNames->scope set by config object: " + scope);
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
|
||||
scope = ScopeProvider.instance.get();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListNames->scope set by ScopeProvider: " + scope);
|
||||
|
||||
} else {
|
||||
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
// add a simple description for the algorithm
|
||||
return "Algorithm that allows to view the available database resources names in the Infrastructure";
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListNames->Processing");
|
||||
|
||||
String scope = ScopeProvider.instance.get();
|
||||
|
||||
if (scope != null) {
|
||||
|
||||
AnalysisLogger.getLogger().debug("getting scope: " + scope);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"getting scope through config: " + config.getGcubeScope());
|
||||
|
||||
}
|
||||
|
||||
try {
|
||||
// retrieve resources
|
||||
List<DBResource> resources = this.retrieveResources();
|
||||
|
||||
// add the name to the list
|
||||
|
||||
// list that contains the resource's names
|
||||
ArrayList<String> listnames = new ArrayList<String>();
|
||||
|
||||
for (int i = 0; i < resources.size(); i++) {
|
||||
String name = "Resource name "+(i+1);
|
||||
PrimitiveType val = new PrimitiveType(String.class.getName(),
|
||||
null, PrimitiveTypes.STRING, name, name);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListNames->Resource's name: "
|
||||
+ resources.get(i).getResourceName());
|
||||
|
||||
listnames.add(resources.get(i).getResourceName());
|
||||
|
||||
val.setContent(listnames.get(i));
|
||||
|
||||
map.put(name, val);
|
||||
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListNames->Output Map Size: " + map.size());
|
||||
|
||||
} catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListNames-> ERROR " + e.getMessage());
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListNames-> ERROR " + e1.getMessage());
|
||||
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListNames-> ERROR " + e2.getMessage());
|
||||
|
||||
throw e2;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In ListNames->setting inputs");
|
||||
addStringInput("MaxNumber", "Max Number of Resources (-1 for all)",
|
||||
"-1");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("In ListNames->Shutdown");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListNames->retrieving outputs");
|
||||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(Map.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
|
||||
return output;
|
||||
|
||||
}
|
||||
|
||||
|
||||
private List<DBResource> retrieveResources() throws IllegalStateException,
|
||||
DiscoveryException, InvalidResultException {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListNames->retrieving resources");
|
||||
List<DBResource> resources = new ArrayList<DBResource>();
|
||||
|
||||
// retrieve the resources
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
|
||||
resources = discovery.discover();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListNames->number of database resources: "
|
||||
+ resources.size());
|
||||
|
||||
return resources;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,473 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
/**
|
||||
* class that allows to retrieve schema's names of a chosen database. In this
|
||||
* case the database's type is "postgresql"
|
||||
*/
|
||||
public class ListSchemas extends StandardLocalExternalAlgorithm {
|
||||
|
||||
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
private DatabaseManagement mgt;
|
||||
|
||||
// database's parameters specified by the user
|
||||
private String resourceName = null;
|
||||
private String databaseName = null;
|
||||
|
||||
private SessionFactory sf;
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
mgt = new DatabaseManagement(config.getConfigPath());
|
||||
AnalysisLogger.getLogger().debug("In ListSchemas->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->scope set by config object: " + scope);
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
|
||||
scope = ScopeProvider.instance.get();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->scope set by ScopeProvider: " + scope);
|
||||
|
||||
} else {
|
||||
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
// add a simple description for the algorithm
|
||||
|
||||
return "Algorithm that allows to view the schema names of a chosen database for which the type is Postgres";
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception, IOException,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException,
|
||||
HibernateException {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListSchemas->Processing");
|
||||
|
||||
try {
|
||||
|
||||
// retrieve information useful for connection
|
||||
|
||||
List<String> Info = retrieveInfo();
|
||||
|
||||
// create the connection
|
||||
getConnection(Info);
|
||||
|
||||
// get the schema's list
|
||||
|
||||
List<String> listSchemas = new ArrayList<String>();
|
||||
listSchemas = getSchemas();
|
||||
|
||||
if (listSchemas.size() == 0) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->Warning: no schema available");
|
||||
|
||||
}
|
||||
|
||||
for (int i = 0; i < listSchemas.size(); i++) {
|
||||
|
||||
PrimitiveType val = new PrimitiveType(String.class.getName(),
|
||||
null, PrimitiveTypes.STRING, "schema's name",
|
||||
"schema's name");
|
||||
|
||||
val.setContent(listSchemas.get(i));
|
||||
|
||||
map.put(String.valueOf(i), val);
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In ListSchemas->getting schema's name: "
|
||||
// + val.getContent());
|
||||
|
||||
}
|
||||
|
||||
} catch (HibernateException h) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas-> ERROR " + h.getMessage());
|
||||
throw h;
|
||||
}
|
||||
|
||||
catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas-> ERROR " + e.getMessage());
|
||||
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas-> ERROR " + e1.getMessage());
|
||||
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas-> ERROR " + e2.getMessage());
|
||||
|
||||
throw e2;
|
||||
|
||||
} catch (IOException e3) {
|
||||
// e3.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas-> Exception " + e3.getMessage());
|
||||
|
||||
throw e3;
|
||||
}
|
||||
|
||||
catch (Exception e4) {
|
||||
// e4.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas-> Exception " + e4.getMessage());
|
||||
|
||||
throw e4;
|
||||
|
||||
} finally {
|
||||
// close the connection
|
||||
mgt.closeConnection();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In ListSchemas->setting inputs");
|
||||
|
||||
// resource and database's name specified by the user
|
||||
|
||||
addStringInput("ResourceName", "The name of the resource", "");
|
||||
addStringInput("DatabaseName", "The name of the database", "");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("In ListSchemas->Shutdown");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListSchemas->retrieving outputs");
|
||||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap" + UUID.randomUUID(),
|
||||
"Results Map");
|
||||
|
||||
return output;
|
||||
|
||||
}
|
||||
|
||||
// method that retrieves the schema's list
|
||||
private List<String> getSchemas() throws Exception {
|
||||
|
||||
List<String> listSchemas = new ArrayList<String>();
|
||||
|
||||
try {
|
||||
|
||||
listSchemas = mgt.getSchemas();
|
||||
|
||||
} catch (Exception e) {
|
||||
|
||||
// e.printStackTrace();
|
||||
|
||||
// System.out.println(e.getMessage());
|
||||
|
||||
throw e;
|
||||
|
||||
}
|
||||
|
||||
// finally {
|
||||
// if (sf.isClosed() == false) {
|
||||
// mgt.closeConnection();
|
||||
// }
|
||||
// }
|
||||
|
||||
return listSchemas;
|
||||
|
||||
}
|
||||
|
||||
// method that retrieves information useful for the connection
|
||||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
// the user specifies the resource and the database'name
|
||||
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
|
||||
// list that contains information useful for the connection
|
||||
List<String> info = new ArrayList<String>();
|
||||
|
||||
// try{
|
||||
|
||||
List<DBResource> resources = discovery.discover();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->number of database resources: "
|
||||
+ resources.size());
|
||||
|
||||
for (int i = 0; i < resources.size(); i++) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->Resource's name: "
|
||||
+ resources.get(i).getResourceName());
|
||||
|
||||
}
|
||||
|
||||
check: for (int i = 0; i < resources.size(); i++) {
|
||||
|
||||
if (resources.get(i).getResourceName().toLowerCase()
|
||||
.equals(resourceName.toLowerCase())) {
|
||||
|
||||
normalizeDBInfo(resources.get(i));
|
||||
|
||||
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
|
||||
|
||||
// if (resources.get(i).getAccessPoints().get(j)
|
||||
// .getDatabaseName().equals(databaseName)) {
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getUsername());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getPassword());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver());
|
||||
//
|
||||
// // driverInfo =
|
||||
// // resources.get(i).getAccessPoints().get(j)
|
||||
// // .getDriver();
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getDialect());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .address());
|
||||
//
|
||||
// info.add(databaseName);
|
||||
//
|
||||
// break check;
|
||||
//
|
||||
// }
|
||||
|
||||
// if (resources.get(i).getAccessPoints().get(j)
|
||||
// .address().equals(url)){
|
||||
//
|
||||
// System.out.println("url selezionato");
|
||||
//
|
||||
//
|
||||
//
|
||||
// }
|
||||
|
||||
if (resources.get(i).getAccessPoints().get(j)
|
||||
.getDatabaseName().toLowerCase()
|
||||
.equals(databaseName.toLowerCase())) {
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getUsername());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->username: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getUsername());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getPassword());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->password: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getPassword());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->driver: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDriver());
|
||||
|
||||
// driverInfo =
|
||||
// resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver();
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDialect());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->dialect: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDialect());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.address());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->url: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).address());
|
||||
|
||||
info.add(databaseName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->databasename: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDatabaseName());
|
||||
|
||||
break check;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->information useful for connection: retrieved");
|
||||
|
||||
// }
|
||||
// catch(IllegalStateException e)
|
||||
// {
|
||||
// // e.printStackTrace();
|
||||
// throw e;
|
||||
// }
|
||||
// catch(DiscoveryException e1)
|
||||
// {
|
||||
// e1.printStackTrace();
|
||||
// throw e1;
|
||||
// }
|
||||
// catch(InvalidResultException e2)
|
||||
// {
|
||||
// e2.printStackTrace();
|
||||
// throw e2;
|
||||
// }
|
||||
|
||||
return info;
|
||||
|
||||
}
|
||||
|
||||
// method that allows to create the connection
|
||||
private void getConnection(List<String> Info) throws IOException {
|
||||
|
||||
// create the connection
|
||||
Iterator<String> iterator = Info.iterator();
|
||||
|
||||
String DatabaseUserName = iterator.next();
|
||||
String DatabasePassword = iterator.next();
|
||||
String DatabaseDriver = iterator.next();
|
||||
String DatabaseDialect = iterator.next();
|
||||
String DatabaseURL = iterator.next();
|
||||
String DatabaseName = iterator.next();
|
||||
|
||||
mgt.createConnection(DatabaseUserName,
|
||||
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
|
||||
DatabaseName);
|
||||
|
||||
// if (sf.isClosed()){
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In ListSchemas->database "+DatabaseName+": connected");
|
||||
|
||||
// }
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListSchemas->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
//
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
//
|
||||
//// try {
|
||||
// resource.normalize(i);
|
||||
//// } catch (IOException e) {
|
||||
//
|
||||
// // e.printStackTrace();
|
||||
//// AnalysisLogger.getLogger().debug(
|
||||
//// "In ListTables->: Error in normalization process"
|
||||
//// + e.getMessage());
|
||||
//
|
||||
//// throw e;
|
||||
//
|
||||
//// }
|
||||
//
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,460 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
/** Class that allows to retrieve the tables's names of a chosen database */
|
||||
public class ListTables extends StandardLocalExternalAlgorithm {
|
||||
|
||||
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
private DatabaseManagement mgt;
|
||||
// variable that keeps track of the database's type
|
||||
private String driverInfo;
|
||||
|
||||
// database's parameters specified by the user
|
||||
private String resourceName = null;
|
||||
private String databaseName = null;
|
||||
private String schemaName = null;
|
||||
|
||||
private SessionFactory sf;
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
mgt = new DatabaseManagement(config.getConfigPath());
|
||||
AnalysisLogger.getLogger().debug("In ListTables->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->scope set by config object: " + scope);
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
|
||||
scope = ScopeProvider.instance.get();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->scope set by ScopeProvider: " + scope);
|
||||
|
||||
} else {
|
||||
|
||||
ScopeProvider.instance.set(scope);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
// add a simple description for the algorithm
|
||||
|
||||
return "Algorithm that allows to view the table names of a chosen database";
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception, IOException,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException,
|
||||
HibernateException {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListTables->Processing");
|
||||
|
||||
try {
|
||||
|
||||
// retrieve information useful for the connection
|
||||
|
||||
List<String> Info = retrieveInfo();
|
||||
|
||||
// create the connection
|
||||
getConnection(Info);
|
||||
|
||||
// get the table' list
|
||||
|
||||
List<String> listTables = new ArrayList<String>();
|
||||
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
}
|
||||
|
||||
// if (!schemaName.equals("")) {
|
||||
|
||||
listTables = mgt.getTables(databaseName, schemaName);
|
||||
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("In ListTables->getting table's name for database postgres");
|
||||
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
if (driverInfo.toLowerCase().contains("mysql")) {
|
||||
|
||||
listTables = mgt.getTables(databaseName, null);
|
||||
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("In ListTables->getting table's name for database mysql");
|
||||
|
||||
}
|
||||
|
||||
// if (listTables.size()==0){
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug("In ListTables->Warning: no table available");
|
||||
//
|
||||
// }
|
||||
|
||||
// TODO: manage also the oracle type
|
||||
|
||||
if (listTables == null) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->Warning: no tables available");
|
||||
|
||||
} else {
|
||||
|
||||
for (int i = 0; i < listTables.size(); i++) {
|
||||
|
||||
PrimitiveType val = new PrimitiveType(
|
||||
String.class.getName(), null,
|
||||
PrimitiveTypes.STRING, "Table's name",
|
||||
"Table's name");
|
||||
|
||||
val.setContent(listTables.get(i));
|
||||
|
||||
map.put(String.valueOf(i), val);
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In ListTables->getting table's name: "
|
||||
// + val.getContent());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
} catch (HibernateException h) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables-> ERROR " + h.getMessage());
|
||||
throw h;
|
||||
} catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables-> ERROR " + e.getMessage());
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables-> ERROR " + e1.getMessage());
|
||||
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables-> ERROR " + e2.getMessage());
|
||||
|
||||
throw e2;
|
||||
|
||||
} catch (IOException e3) {
|
||||
// e3.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables-> Exception " + e3.getMessage());
|
||||
|
||||
throw e3;
|
||||
}
|
||||
|
||||
catch (Exception e4) {
|
||||
// e4.printStackTrace();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables-> Exception " + e4.getMessage());
|
||||
|
||||
throw e4;
|
||||
|
||||
} finally {
|
||||
// close the connection
|
||||
mgt.closeConnection();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In ListTables->setting inputs");
|
||||
|
||||
// parameters specified by the user
|
||||
addStringInput("ResourceName", "The name of the resource", "");
|
||||
addStringInput("DatabaseName", "The name of the database", "");
|
||||
addStringInput("SchemaName", "The name of the schema", "");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("In ListTables->Shutdown");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In ListTables->retrieving outputs");
|
||||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap" + UUID.randomUUID(),
|
||||
"Results Map");
|
||||
|
||||
return output;
|
||||
|
||||
}
|
||||
|
||||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
// parameters specified by the user
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
|
||||
List<DBResource> resources = discovery.discover();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->number of database resources: "
|
||||
+ resources.size());
|
||||
|
||||
for (int i = 0; i < resources.size(); i++) {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->Resource's name: "
|
||||
+ resources.get(i).getResourceName());
|
||||
|
||||
}
|
||||
|
||||
// list that contains information useful for the connection
|
||||
List<String> info = new ArrayList<String>();
|
||||
|
||||
check: for (int i = 0; i < resources.size(); i++) {
|
||||
|
||||
if (resources.get(i).getResourceName().toLowerCase()
|
||||
.equals(resourceName.toLowerCase())) {
|
||||
|
||||
normalizeDBInfo(resources.get(i));
|
||||
|
||||
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
|
||||
|
||||
// if (resources.get(i).getAccessPoints().get(j)
|
||||
// .getDatabaseName().equals(databaseName)) {
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getUsername());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getPassword());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver());
|
||||
//
|
||||
// // driverInfo =
|
||||
// // resources.get(i).getAccessPoints().get(j)
|
||||
// // .getDriver();
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .getDialect());
|
||||
//
|
||||
// info.add(resources.get(i).getAccessPoints().get(j)
|
||||
// .address());
|
||||
//
|
||||
// info.add(databaseName);
|
||||
//
|
||||
// break check;
|
||||
//
|
||||
// }
|
||||
|
||||
// if (resources.get(i).getAccessPoints().get(j)
|
||||
// .address().equals(url)){
|
||||
//
|
||||
// System.out.println("url selezionato");
|
||||
//
|
||||
//
|
||||
//
|
||||
// }
|
||||
|
||||
if (resources.get(i).getAccessPoints().get(j)
|
||||
.getDatabaseName().toLowerCase()
|
||||
.equals(databaseName.toLowerCase())) {
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getUsername());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->username: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getUsername());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getPassword());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->password: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getPassword());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver());
|
||||
|
||||
driverInfo = resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->driver: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDriver());
|
||||
|
||||
// driverInfo =
|
||||
// resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver();
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDialect());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->dialect: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDialect());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.address());
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->url: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).address());
|
||||
|
||||
info.add(databaseName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->databasename: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDatabaseName());
|
||||
|
||||
break check;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->information useful for connection: retrieved");
|
||||
|
||||
return info;
|
||||
|
||||
}
|
||||
|
||||
private void getConnection(List<String> Info) throws IOException {
|
||||
|
||||
// create the connection
|
||||
Iterator<String> iterator = Info.iterator();
|
||||
|
||||
String DatabaseUserName = iterator.next();
|
||||
String DatabasePassword = iterator.next();
|
||||
String DatabaseDriver = iterator.next();
|
||||
String DatabaseDialect = iterator.next();
|
||||
String DatabaseURL = iterator.next();
|
||||
String DatabaseName = iterator.next();
|
||||
|
||||
mgt.createConnection(DatabaseUserName,
|
||||
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
|
||||
DatabaseName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->database " + DatabaseName + ": connected");
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In ListTables->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
//
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
//
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In ListTables->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
//
|
||||
// throw e;
|
||||
// }
|
||||
//
|
||||
// }
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,411 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
/**
|
||||
* Class that allows to perform a random sample operation on a table of a chosen
|
||||
* database. It retrieves 100 rows of a table randomly.
|
||||
*/
|
||||
|
||||
public class RandomSampleOnTable extends StandardLocalExternalAlgorithm {
|
||||
|
||||
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
// object that allows to manage some operations on a database
|
||||
private DatabaseManagement mgt;
|
||||
// database's parameters specified by the user
|
||||
private String resourceName = null;
|
||||
private String databaseName = null;
|
||||
private String schemaName = null;
|
||||
private String tableName = null;
|
||||
|
||||
private SessionFactory sf;
|
||||
// variable that keeps track of the driver information
|
||||
private String driverInfo;
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
|
||||
mgt = new DatabaseManagement(config.getConfigPath());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable->scope set by config object: "
|
||||
+ scope);
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
scope = ScopeProvider.instance.get();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable->scope set by ScopeProvider: "
|
||||
+ scope);
|
||||
} else {
|
||||
ScopeProvider.instance.set(scope);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Algorithm that allows to perform a sample operation on a table randomly";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable->Processing");
|
||||
|
||||
try {
|
||||
// retrieve information
|
||||
List<String> Info = retrieveInfo();
|
||||
|
||||
// check on table name field
|
||||
tableName = getInputParameter("TableName");
|
||||
if(tableName != null){
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
}
|
||||
if ((tableName == null) || (tableName.equals(""))) {
|
||||
throw new Exception("Warning: insert the table name");
|
||||
}
|
||||
|
||||
// check on schema name field
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
}
|
||||
}
|
||||
|
||||
// create the connection
|
||||
getConnection(Info);
|
||||
|
||||
// smart sample operation on table
|
||||
map = randomSampleOnTable();
|
||||
|
||||
} catch (HibernateException h) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable-> ERROR " + h.getMessage());
|
||||
throw h;
|
||||
}
|
||||
|
||||
catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable-> ERROR " + e.getMessage());
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable-> ERROR " + e1.getMessage());
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable-> ERROR " + e2.getMessage());
|
||||
throw e2;
|
||||
|
||||
} catch (IOException e3) {
|
||||
// e3.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable-> ERROR " + e3.getMessage());
|
||||
throw e3;
|
||||
}
|
||||
|
||||
catch (Exception e4) {
|
||||
// e4.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSmartSampleOnTable-> Exception "
|
||||
+ e4.getMessage());
|
||||
throw e4;
|
||||
|
||||
} finally {
|
||||
// close the connection
|
||||
mgt.closeConnection();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
// parameters specified by the user
|
||||
addStringInput("ResourceName", "The name of the resource", "");
|
||||
addStringInput("DatabaseName", "The name of the database", "");
|
||||
addStringInput("SchemaName", "The name of the schema", "");
|
||||
addStringInput("TableName", "The name of the table", "");
|
||||
}
|
||||
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->retrieving outputs");
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap" + UUID.randomUUID(),
|
||||
"Results Map");
|
||||
return output;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("In RandomSampleOnTable->Shutdown");
|
||||
}
|
||||
|
||||
// Method that recovers the info useful for the connection
|
||||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
List<DBResource> resources = discovery.discover();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->number of elements: "
|
||||
+ resources.size());
|
||||
|
||||
// list that contains information useful for the connection
|
||||
List<String> info = new ArrayList<String>();
|
||||
|
||||
check: for (int i = 0; i < resources.size(); i++) {
|
||||
if (resources.get(i).getResourceName().toLowerCase()
|
||||
.equals(resourceName.toLowerCase())) {
|
||||
normalizeDBInfo(resources.get(i));
|
||||
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
|
||||
|
||||
if (resources.get(i).getAccessPoints().get(j)
|
||||
.getDatabaseName().toLowerCase()
|
||||
.equals(databaseName.toLowerCase())) {
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getUsername());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->username: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getUsername());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getPassword());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->password: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getPassword());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver());
|
||||
driverInfo = resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->driver: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDriver());
|
||||
|
||||
// driverInfo =
|
||||
// resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver();
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDialect());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->dialect: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDialect());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.address());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->url: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).address());
|
||||
|
||||
info.add(databaseName);
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->databasename: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDatabaseName());
|
||||
break check;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("In RandomSampleOnTable->information useful for connection: retrieved");
|
||||
return info;
|
||||
}
|
||||
|
||||
// to normalize the information related to a database
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In RandomSampleOnTable->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// create the database's connection
|
||||
private void getConnection(List<String> Info) throws IOException {
|
||||
|
||||
// create the connection
|
||||
Iterator<String> iterator = Info.iterator();
|
||||
|
||||
String DatabaseUserName = iterator.next();
|
||||
String DatabasePassword = iterator.next();
|
||||
String DatabaseDriver = iterator.next();
|
||||
String DatabaseDialect = iterator.next();
|
||||
String DatabaseURL = iterator.next();
|
||||
String DatabaseName = iterator.next();
|
||||
|
||||
mgt.createConnection(DatabaseUserName,
|
||||
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
|
||||
DatabaseName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->database " + DatabaseName
|
||||
+ ": connected");
|
||||
|
||||
}
|
||||
|
||||
// to perform the sample operation on the table randomly
|
||||
private LinkedHashMap<String, StatisticalType> randomSampleOnTable()
|
||||
throws Exception {
|
||||
|
||||
LinkedHashMap<String, StatisticalType> mapResults = new LinkedHashMap<String, StatisticalType>();
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("In RandomSampleOnTable->starting the sample operation on table randomly");
|
||||
|
||||
// sample on table operation
|
||||
// List<Object> resultSet = null;
|
||||
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
// for a postgres database the second parameter is the schema name
|
||||
// resultSet = mgt.randomSampleOnTable(tableName, schemaName,
|
||||
// config.getPersistencePath());
|
||||
mgt.randomSampleOnTable(tableName, schemaName,
|
||||
config.getPersistencePath());
|
||||
}
|
||||
|
||||
if (driverInfo.toLowerCase().contains("mysql")) {
|
||||
// for a mysql database the second parameter is the database name
|
||||
// resultSet = mgt.randomSampleOnTable(tableName, databaseName,
|
||||
// config.getPersistencePath());
|
||||
mgt.randomSampleOnTable(tableName, databaseName,
|
||||
config.getPersistencePath());
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In RandomSampleOnTable->result retrieved");
|
||||
|
||||
// to add the results to the variable map
|
||||
// to add the map
|
||||
|
||||
HashMap<String, String> mapResult = new HashMap<String, String>();
|
||||
mapResult = mgt.getMapSampleTableResult();
|
||||
|
||||
String encoded = null;
|
||||
encoded = new String(mapResult.get("HEADERS").getBytes(), "UTF-8");
|
||||
|
||||
// // check the encoded value
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In RandomSampleOnTable->string encoded: " + encoded);
|
||||
|
||||
PrimitiveType val = new PrimitiveType(String.class.getName(), encoded,
|
||||
PrimitiveTypes.STRING, "Row", "Row");
|
||||
mapResults.put("HEADERS", val);
|
||||
|
||||
for (int i = 0; i < mapResult.size() - 1; i++) {
|
||||
encoded = new String(mapResult.get(String.valueOf(i)).getBytes(),
|
||||
"UTF-8");
|
||||
|
||||
// // check the encoded value
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In RandomSampleOnTable->string encoded: " + encoded);
|
||||
|
||||
PrimitiveType val1 = new PrimitiveType(String.class.getName(),
|
||||
encoded, PrimitiveTypes.STRING, "Row", "Row");
|
||||
mapResults.put(String.valueOf(i), val1);
|
||||
// //check value contained in map
|
||||
// String value = (String) val1.getContent();
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In RandomSampleOnTable->value: " + value);
|
||||
}
|
||||
|
||||
// to add the file
|
||||
PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
|
||||
mgt.getFile(), PrimitiveTypes.FILE, "File",
|
||||
"File");
|
||||
mapResults.put("File", fileResult);
|
||||
|
||||
return mapResults;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,407 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLDecoder;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
/**
|
||||
* Class that allows to perform a sample operation on a table of a chosen
|
||||
* database. It retrieves the first 100 rows of a table.
|
||||
*/
|
||||
public class SampleOnTable extends StandardLocalExternalAlgorithm {
|
||||
|
||||
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
// object that allows to manage some operations on a database
|
||||
private DatabaseManagement mgt;
|
||||
// database's parameters specified by the user
|
||||
private String resourceName = null;
|
||||
private String databaseName = null;
|
||||
private String schemaName = null;
|
||||
private String tableName = null;
|
||||
|
||||
private SessionFactory sf;
|
||||
// variable that keeps track of the driver information
|
||||
private String driverInfo;
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
|
||||
mgt = new DatabaseManagement(config.getConfigPath());
|
||||
AnalysisLogger.getLogger().debug("In SampleOnTable->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->scope set by config object: " + scope);
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
scope = ScopeProvider.instance.get();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->scope set by ScopeProvider: " + scope);
|
||||
} else {
|
||||
ScopeProvider.instance.set(scope);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Algorithm that allows to perform a sample operation on a table";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
AnalysisLogger.getLogger().debug("In SampleOnTable->Processing");
|
||||
|
||||
try {
|
||||
// retrieve information
|
||||
List<String> Info = retrieveInfo();
|
||||
|
||||
// check on table name field
|
||||
tableName = getInputParameter("TableName");
|
||||
if(tableName != null){
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
}
|
||||
if ((tableName == null) || (tableName.equals(""))) {
|
||||
throw new Exception("Warning: insert the table name");
|
||||
}
|
||||
|
||||
// check on schema name field
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
}
|
||||
}
|
||||
|
||||
// create the connection
|
||||
getConnection(Info);
|
||||
|
||||
// sample operation on table
|
||||
map = sampleOnTable();
|
||||
|
||||
} catch (HibernateException h) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable-> ERROR " + h.getMessage());
|
||||
throw h;
|
||||
}
|
||||
|
||||
catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable-> ERROR " + e.getMessage());
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable-> ERROR " + e1.getMessage());
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable-> ERROR " + e2.getMessage());
|
||||
throw e2;
|
||||
|
||||
} catch (IOException e3) {
|
||||
// e3.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable-> ERROR " + e3.getMessage());
|
||||
throw e3;
|
||||
}
|
||||
|
||||
catch (Exception e4) {
|
||||
// e4.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable-> Exception " + e4.getMessage());
|
||||
throw e4;
|
||||
|
||||
} finally {
|
||||
// close the connection
|
||||
mgt.closeConnection();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
// parameters specified by the user
|
||||
addStringInput("ResourceName", "The name of the resource", "");
|
||||
addStringInput("DatabaseName", "The name of the database", "");
|
||||
addStringInput("SchemaName", "The name of the schema", "");
|
||||
addStringInput("TableName", "The name of the table", "");
|
||||
}
|
||||
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger()
|
||||
.debug("In SampleOnTable->retrieving outputs");
|
||||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("In SampleOnTable->Shutdown");
|
||||
|
||||
}
|
||||
|
||||
// Method that recovers the info useful for the connection
|
||||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
List<DBResource> resources = discovery.discover();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->number of elements: " + resources.size());
|
||||
|
||||
// list that contains information useful for the connection
|
||||
List<String> info = new ArrayList<String>();
|
||||
|
||||
check: for (int i = 0; i < resources.size(); i++) {
|
||||
if (resources.get(i).getResourceName().toLowerCase()
|
||||
.equals(resourceName.toLowerCase())) {
|
||||
normalizeDBInfo(resources.get(i));
|
||||
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
|
||||
|
||||
if (resources.get(i).getAccessPoints().get(j)
|
||||
.getDatabaseName().toLowerCase()
|
||||
.equals(databaseName.toLowerCase())) {
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getUsername());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->username: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getUsername());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getPassword());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->password: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getPassword());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver());
|
||||
driverInfo = resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->driver: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDriver());
|
||||
|
||||
// driverInfo =
|
||||
// resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver();
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDialect());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->dialect: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDialect());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.address());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->url: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).address());
|
||||
|
||||
info.add(databaseName);
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->databasename: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDatabaseName());
|
||||
break check;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("In SampleOnTable->information useful for connection: retrieved");
|
||||
|
||||
return info;
|
||||
}
|
||||
|
||||
// to normalize the information related to a database
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SampleOnTable->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// create the database's connection
|
||||
private void getConnection(List<String> Info) throws IOException {
|
||||
|
||||
// create the connection
|
||||
Iterator<String> iterator = Info.iterator();
|
||||
|
||||
String DatabaseUserName = iterator.next();
|
||||
String DatabasePassword = iterator.next();
|
||||
String DatabaseDriver = iterator.next();
|
||||
String DatabaseDialect = iterator.next();
|
||||
String DatabaseURL = iterator.next();
|
||||
String DatabaseName = iterator.next();
|
||||
|
||||
mgt.createConnection(DatabaseUserName,
|
||||
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
|
||||
DatabaseName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->database " + DatabaseName + ": connected");
|
||||
}
|
||||
|
||||
// to perform the sample operation on the table
|
||||
private LinkedHashMap<String, StatisticalType> sampleOnTable()
|
||||
throws Exception {
|
||||
|
||||
LinkedHashMap<String, StatisticalType> mapResults = new LinkedHashMap<String, StatisticalType>();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SampleOnTable->starting the sample operation on table");
|
||||
|
||||
// sample on table operation
|
||||
// List<Object> resultSet = null;
|
||||
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
// for a postgres database the second parameter is the schema name
|
||||
// resultSet = mgt.sampleOnTable(tableName, schemaName,
|
||||
// config.getPersistencePath());
|
||||
mgt.sampleOnTable(tableName, schemaName,
|
||||
config.getPersistencePath());
|
||||
}
|
||||
|
||||
if (driverInfo.toLowerCase().contains("mysql")) {
|
||||
// for a mysql database the second parameter is the database name
|
||||
// resultSet = mgt.sampleOnTable(tableName, databaseName,
|
||||
// config.getPersistencePath());
|
||||
mgt.sampleOnTable(tableName, databaseName,
|
||||
config.getPersistencePath());
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug("In SampleOnTable->result retrieved");
|
||||
|
||||
// to add the results to the variable map
|
||||
// to add the map
|
||||
|
||||
HashMap<String, String> mapResult = new HashMap<String, String>();
|
||||
mapResult = mgt.getMapSampleTableResult();
|
||||
|
||||
String encoded = null;
|
||||
encoded = new String(mapResult.get("HEADERS").getBytes(), "UTF-8");
|
||||
|
||||
// // check encoded value
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SampleOnTable->string encoded: " + encoded);
|
||||
|
||||
PrimitiveType val = new PrimitiveType(String.class.getName(), encoded,
|
||||
PrimitiveTypes.STRING, "Row", "Row");
|
||||
mapResults.put("HEADERS", val);
|
||||
|
||||
for (int i = 0; i < mapResult.size() - 1; i++) {
|
||||
encoded = new String(mapResult.get(String.valueOf(i)).getBytes(),
|
||||
"UTF-8");
|
||||
|
||||
// // check encoded value
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SampleOnTable->string encoded: " + encoded);
|
||||
|
||||
PrimitiveType val1 = new PrimitiveType(String.class.getName(),
|
||||
encoded, PrimitiveTypes.STRING, "Row", "Row");
|
||||
mapResults.put(String.valueOf(i), val1);
|
||||
// //check value contained in the map
|
||||
// String value = (String) val1.getContent();
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SampleOnTable->value: " + value);
|
||||
|
||||
}
|
||||
|
||||
// to add the file
|
||||
PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
|
||||
mgt.getFile(), PrimitiveTypes.FILE, "File",
|
||||
"File");
|
||||
mapResults.put("File", fileResult);
|
||||
|
||||
return mapResults;
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
public enum SmartCorrectionEnum {
|
||||
NONE,
|
||||
POSTGRES,
|
||||
MYSQL,
|
||||
}
|
|
@ -0,0 +1,409 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
/**
|
||||
* Class that allows to perform a smart sample operation on a table of a chosen
|
||||
* database. It retrieves 100 rows of a table randomly that have the maximum
|
||||
* number of columns not null.
|
||||
*/
|
||||
|
||||
public class SmartSampleOnTable extends StandardLocalExternalAlgorithm {
|
||||
|
||||
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
// object that allows to manage some operations on a database
|
||||
private DatabaseManagement mgt;
|
||||
// database's parameters specified by the user
|
||||
private String resourceName = null;
|
||||
private String databaseName = null;
|
||||
private String schemaName = null;
|
||||
private String tableName = null;
|
||||
|
||||
private SessionFactory sf;
|
||||
// variable that keeps track of the driver information
|
||||
private String driverInfo;
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
|
||||
mgt = new DatabaseManagement(config.getConfigPath());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->scope set by config object: " + scope);
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
scope = ScopeProvider.instance.get();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->scope set by ScopeProvider: "
|
||||
+ scope);
|
||||
} else {
|
||||
ScopeProvider.instance.set(scope);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Algorithm that allows to perform a smart sample operation on a table";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
AnalysisLogger.getLogger().debug("In SmartSampleOnTable->Processing");
|
||||
|
||||
try {
|
||||
|
||||
// retrieve information
|
||||
List<String> Info = retrieveInfo();
|
||||
|
||||
// check on table name field
|
||||
tableName = getInputParameter("TableName");
|
||||
if(tableName != null){
|
||||
tableName = getInputParameter("TableName").trim();
|
||||
}
|
||||
if ((tableName == null) || (tableName.equals(""))) {
|
||||
throw new Exception("Warning: insert the table name");
|
||||
}
|
||||
|
||||
// check on schema name field
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
schemaName = getInputParameter("SchemaName");
|
||||
if(schemaName != null){
|
||||
schemaName = getInputParameter("SchemaName").trim();
|
||||
}
|
||||
if ((schemaName == null) || (schemaName.equals(""))) {
|
||||
throw new Exception("Warning: insert the schema name");
|
||||
}
|
||||
}
|
||||
|
||||
// create the connection
|
||||
getConnection(Info);
|
||||
|
||||
// smart sample operation on table
|
||||
map = smartSampleOnTable();
|
||||
|
||||
} catch (HibernateException h) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable-> ERROR " + h.getMessage());
|
||||
throw h;
|
||||
}
|
||||
|
||||
catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable-> ERROR " + e.getMessage());
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable-> ERROR " + e1.getMessage());
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable-> ERROR " + e2.getMessage());
|
||||
throw e2;
|
||||
|
||||
} catch (IOException e3) {
|
||||
// e3.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable-> ERROR " + e3.getMessage());
|
||||
throw e3;
|
||||
}
|
||||
|
||||
catch (Exception e4) {
|
||||
// e4.printStackTrace();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable-> Exception " + e4.getMessage());
|
||||
throw e4;
|
||||
|
||||
} finally {
|
||||
mgt.closeConnection();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
// parameters specified by the user
|
||||
addStringInput("ResourceName", "The name of the resource", "");
|
||||
addStringInput("DatabaseName", "The name of the database", "");
|
||||
addStringInput("SchemaName", "The name of the schema", "");
|
||||
addStringInput("TableName", "The name of the table", "");
|
||||
}
|
||||
|
||||
public StatisticalType getOutput() {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->retrieving outputs");
|
||||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
return output;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("In SmartSampleOnTable->Shutdown");
|
||||
}
|
||||
|
||||
// Method that recovers the info useful for the connection
|
||||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
List<DBResource> resources = discovery.discover();
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->number of elements: "
|
||||
+ resources.size());
|
||||
|
||||
// list that contains information useful for the connection
|
||||
List<String> info = new ArrayList<String>();
|
||||
|
||||
check: for (int i = 0; i < resources.size(); i++) {
|
||||
|
||||
if (resources.get(i).getResourceName().toLowerCase()
|
||||
.equals(resourceName.toLowerCase())) {
|
||||
|
||||
normalizeDBInfo(resources.get(i));
|
||||
|
||||
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
|
||||
|
||||
if (resources.get(i).getAccessPoints().get(j)
|
||||
.getDatabaseName().toLowerCase()
|
||||
.equals(databaseName.toLowerCase())) {
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getUsername());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->username: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getUsername());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getPassword());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->password: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getPassword());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver());
|
||||
driverInfo = resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->driver: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDriver());
|
||||
|
||||
// driverInfo =
|
||||
// resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver();
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDialect());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->dialect: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDialect());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.address());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->url: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).address());
|
||||
|
||||
info.add(databaseName);
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->databasename: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDatabaseName());
|
||||
break check;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("In SmartSampleOnTable->information useful for connection: retrieved");
|
||||
return info;
|
||||
}
|
||||
|
||||
// to normalize the information related to a database
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SmartSampleOnTable->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// create the database's connection
|
||||
private void getConnection(List<String> Info) throws IOException {
|
||||
|
||||
// create the connection
|
||||
Iterator<String> iterator = Info.iterator();
|
||||
|
||||
String DatabaseUserName = iterator.next();
|
||||
String DatabasePassword = iterator.next();
|
||||
String DatabaseDriver = iterator.next();
|
||||
String DatabaseDialect = iterator.next();
|
||||
String DatabaseURL = iterator.next();
|
||||
String DatabaseName = iterator.next();
|
||||
|
||||
mgt.createConnection(DatabaseUserName,
|
||||
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
|
||||
DatabaseName);
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->database " + DatabaseName
|
||||
+ ": connected");
|
||||
}
|
||||
|
||||
// to perform the sample operation on the table
|
||||
private LinkedHashMap<String, StatisticalType> smartSampleOnTable()
|
||||
throws Exception {
|
||||
|
||||
LinkedHashMap<String, StatisticalType> mapResults = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("In SmartSampleOnTable->starting the smart sample operation on table");
|
||||
|
||||
// sample on table operation
|
||||
// List<Object> resultSet = null;
|
||||
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
// for a postgres database the second parameter is the schema name
|
||||
// resultSet = mgt.smartSampleOnTable(tableName, schemaName,
|
||||
// config.getPersistencePath());
|
||||
mgt.smartSampleOnTable(tableName, schemaName,
|
||||
config.getPersistencePath());
|
||||
}
|
||||
|
||||
if (driverInfo.toLowerCase().contains("mysql")) {
|
||||
// for a mysql database the second parameter is the database name
|
||||
// resultSet = mgt.smartSampleOnTable(tableName, databaseName,
|
||||
// config.getPersistencePath());
|
||||
mgt.smartSampleOnTable(tableName, databaseName,
|
||||
config.getPersistencePath());
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SmartSampleOnTable->result retrieved");
|
||||
|
||||
// to add the results to the variable map
|
||||
// to add the map
|
||||
HashMap<String, String> mapResult = new HashMap<String, String>();
|
||||
mapResult = mgt.getMapSampleTableResult();
|
||||
|
||||
String encoded = null;
|
||||
encoded = new String(mapResult.get("HEADERS").getBytes(), "UTF-8");
|
||||
|
||||
// //check encoded value
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SmartSampleOnTable->string encoded: " + encoded);
|
||||
|
||||
PrimitiveType val = new PrimitiveType(String.class.getName(), encoded,
|
||||
PrimitiveTypes.STRING, "Row", "Row");
|
||||
mapResults.put("HEADERS", val);
|
||||
|
||||
for (int i = 0; i < mapResult.size() - 1; i++) {
|
||||
encoded = new String(mapResult.get(String.valueOf(i)).getBytes(),
|
||||
"UTF-8");
|
||||
|
||||
// // check encoded value
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SmartSampleOnTable->string encoded: " + encoded);
|
||||
|
||||
PrimitiveType val1 = new PrimitiveType(String.class.getName(),
|
||||
encoded, PrimitiveTypes.STRING, "Row", "Row");
|
||||
mapResults.put(String.valueOf(i), val1);
|
||||
// //check value
|
||||
// String value = (String) val1.getContent();
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SmartSampleOnTable->value: " + value);
|
||||
|
||||
}
|
||||
|
||||
// to add the file
|
||||
PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
|
||||
mgt.getFile(), PrimitiveTypes.FILE, "File",
|
||||
"File");
|
||||
mapResults.put("File", fileResult);
|
||||
|
||||
return mapResults;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,691 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.httpclient.UsernamePasswordCredentials;
|
||||
import org.gcube.common.scope.api.ScopeProvider;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
|
||||
import org.gcube.dataaccess.databases.lexer.MySQLLexicalAnalyzer;
|
||||
import org.gcube.dataaccess.databases.lexer.PostgresLexicalAnalyzer;
|
||||
import org.gcube.dataaccess.databases.resources.DBResource;
|
||||
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.gcube.resources.discovery.client.api.DiscoveryException;
|
||||
import org.gcube.resources.discovery.client.api.InvalidResultException;
|
||||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.SessionFactory;
|
||||
|
||||
import com.adventnet.swissqlapi.sql.exception.ConvertException;
|
||||
import com.adventnet.swissqlapi.sql.parser.ParseException;
|
||||
|
||||
/**
|
||||
* Class that allows to submit a query. It retrieves results in a file and in a
|
||||
* map.
|
||||
*/
|
||||
public class SubmitQuery extends StandardLocalExternalAlgorithm {
|
||||
|
||||
static long maximum_execution_time = 30*60*1000; //time to stop execution query
|
||||
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
// object that allows to manage some operations on a database
|
||||
private DatabaseManagement mgt;
|
||||
// variable that keeps track of the database's type
|
||||
private String driverInfo;
|
||||
|
||||
private Connection dbconnection;
|
||||
// database's parameters specified by the user
|
||||
private String resourceName = null;
|
||||
private String databaseName = null;
|
||||
private String schemaName = null;
|
||||
private String tableName = null;
|
||||
private String query = null;
|
||||
|
||||
private String valueReadOnly = "Read-Only Query";
|
||||
private String smartCorrection = "Apply Smart Correction";
|
||||
private String dialect = "Language";
|
||||
|
||||
private String valueRO;
|
||||
private String valueSC;
|
||||
private String valueDialect = "";
|
||||
|
||||
// variable used to filter the disallowed queries
|
||||
private boolean NotAllowedQuery = false;
|
||||
|
||||
//class for the timer to stop execution query
|
||||
private class ExecutionStopper extends TimerTask {
|
||||
@Override
|
||||
public void run() {
|
||||
AnalysisLogger.getLogger().debug("ExecutionStopper: Stopping execution");
|
||||
shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
|
||||
mgt = new DatabaseManagement(config.getConfigPath());
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery->Initialization");
|
||||
|
||||
String scope = config.getGcubeScope();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->scope set by config object: " + scope);
|
||||
|
||||
if (scope == null || scope.length() == 0) {
|
||||
scope = ScopeProvider.instance.get();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->scope set by ScopeProvider: " + scope);
|
||||
} else {
|
||||
ScopeProvider.instance.set(scope);
|
||||
}
|
||||
|
||||
valueRO = config.getParam(valueReadOnly);
|
||||
valueSC = config.getParam(smartCorrection);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "Algorithm that allows to submit a query";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception, IOException,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException,
|
||||
HibernateException {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery->Processing");
|
||||
|
||||
Timer stopper = new Timer();
|
||||
stopper.schedule(new ExecutionStopper(),maximum_execution_time);
|
||||
|
||||
try {
|
||||
// retrieve information
|
||||
List<String> Info = retrieveInfo();
|
||||
|
||||
// create the connection
|
||||
dbconnection = getConnection(Info);
|
||||
|
||||
// submit a query
|
||||
map = submitQuery();
|
||||
|
||||
// // close the connection
|
||||
// dbconnection.close();
|
||||
|
||||
} catch (HibernateException h) {
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery-> ERROR " + h.getMessage());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery-> " + h.getMessage());
|
||||
throw h;
|
||||
}
|
||||
|
||||
catch (IllegalStateException e) {
|
||||
// e.printStackTrace();
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery-> ERROR " + e.getMessage());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery-> " + e.getMessage());
|
||||
throw e;
|
||||
|
||||
} catch (DiscoveryException e1) {
|
||||
// e1.printStackTrace();
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery-> ERROR " + e1.getMessage());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery-> " + e1.getMessage());
|
||||
throw e1;
|
||||
|
||||
} catch (InvalidResultException e2) {
|
||||
// e2.printStackTrace();
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery-> ERROR " + e2.getMessage());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery-> " + e2.getMessage());
|
||||
throw e2;
|
||||
|
||||
} catch (IOException e3) {
|
||||
// e3.printStackTrace();
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery-> ERROR " + e3.getMessage());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery-> " + e3.getMessage());
|
||||
throw e3;
|
||||
}
|
||||
|
||||
catch (Exception e4) {
|
||||
// e4.printStackTrace();
|
||||
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery-> Exception " + e4.getMessage());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery-> " + e4.getMessage());
|
||||
throw e4;
|
||||
|
||||
} finally {
|
||||
if (dbconnection!=null) {
|
||||
dbconnection.close();
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery-> Connection closed");
|
||||
dbconnection=null;
|
||||
}
|
||||
//remove the timer if the execution query has already terminated
|
||||
if (stopper!=null){
|
||||
try{
|
||||
stopper.cancel();
|
||||
stopper.purge();
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery-> Execution stopper terminated");
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery-> Could not stop execution stopper "+e.getMessage() );
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery->retrieving outputs");
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
|
||||
map, PrimitiveTypes.MAP, "ResultsMap"+UUID.randomUUID(), "Results Map");
|
||||
return output;
|
||||
}
|
||||
|
||||
public List<StatisticalType> getInputParameters() {
|
||||
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
|
||||
// parameters specified by the user
|
||||
PrimitiveType p0 = new PrimitiveType(String.class.getName(), "",
|
||||
PrimitiveTypes.STRING, "ResourceName",
|
||||
"The name of the resource");
|
||||
|
||||
PrimitiveType p1 = new PrimitiveType(String.class.getName(), "",
|
||||
PrimitiveTypes.STRING, "DatabaseName",
|
||||
"The name of the database");
|
||||
|
||||
PrimitiveType p2 = new PrimitiveType(Boolean.class.getName(), null,
|
||||
PrimitiveTypes.BOOLEAN, valueReadOnly,
|
||||
"Check the box if the query must be read-only", "true");
|
||||
|
||||
PrimitiveType p3 = new PrimitiveType(Boolean.class.getName(), null,
|
||||
PrimitiveTypes.BOOLEAN, smartCorrection,
|
||||
"Check the box for smart correction", "true");
|
||||
|
||||
// addEnumerateInput(SmartCorrectionEnum.values(), dialect, "Language",
|
||||
// "");
|
||||
PrimitiveType p4 = new PrimitiveType(Enum.class.getName(),
|
||||
SmartCorrectionEnum.values(), PrimitiveTypes.ENUMERATED,
|
||||
dialect, "Language", SmartCorrectionEnum.NONE.name());
|
||||
|
||||
PrimitiveType p5 = new PrimitiveType(String.class.getName(), "",
|
||||
PrimitiveTypes.STRING, "Query", "query");
|
||||
|
||||
parameters.add(p0);
|
||||
parameters.add(p1);
|
||||
parameters.add(p2);
|
||||
parameters.add(p3);
|
||||
parameters.add(p4);
|
||||
parameters.add(p5);
|
||||
|
||||
return parameters;
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
// AnalysisLogger.getLogger().debug("In SubmitQuery->setting inputs");
|
||||
|
||||
// parameters specified by the user
|
||||
// addStringInput("ResourceName", "The name of the resource", "");
|
||||
// addStringInput("DatabaseName", "The name of the database", "");
|
||||
|
||||
// PrimitiveType p2 = new PrimitiveType(Boolean.class.getName(), null,
|
||||
// PrimitiveTypes.BOOLEAN, valueReadOnly,
|
||||
// "Check the box if the query must be read-only","true");
|
||||
|
||||
// addEnumerateInput(SubmitQueryEnum.values(), valueReadOnly,
|
||||
// "Check the box if the query must be read-only",
|
||||
// SubmitQueryEnum.TRUE.name());
|
||||
// addStringInput("SchemaName", "The name of the schema", "");
|
||||
// addStringInput("TableName", "The name of the table", "");
|
||||
|
||||
// PrimitiveType p3 = new PrimitiveType(Boolean.class.getName(), null,
|
||||
// PrimitiveTypes.BOOLEAN, smartCorrection,
|
||||
// "Check the box for smart correction","true");
|
||||
// List<StatisticalType> parameters = new ArrayList<StatisticalType>();
|
||||
// parameters.add(p2);
|
||||
// parameters.add(p3);
|
||||
|
||||
// addEnumerateInput(
|
||||
// SubmitQueryEnum.values(),
|
||||
// smartCorrection,
|
||||
// "Check the box for smart correction",
|
||||
// SubmitQueryEnum.TRUE.name());
|
||||
|
||||
// addEnumerateInput(SmartCorrectionEnum.values(), dialect, "Language)",
|
||||
// SmartCorrectionEnum.POSTGRES.name());
|
||||
|
||||
// addEnumerateInput(SmartCorrectionEnum.values(), dialect, "Language",
|
||||
// "");
|
||||
// addEnumerateInput(SmartCorrectionEnum.values(), dialect, "Language",
|
||||
// "");
|
||||
|
||||
// addStringInput("Query", "query", "");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery->Shutdown");
|
||||
try{
|
||||
if (dbconnection!=null) {
|
||||
dbconnection.close();
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery-> Connection closed");
|
||||
dbconnection=null;
|
||||
}
|
||||
}catch(Exception e){
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery->Unable to close connection "+e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// Method that recovers the info useful for the connection
|
||||
private List<String> retrieveInfo() throws Exception,
|
||||
IllegalStateException, DiscoveryException, InvalidResultException {
|
||||
|
||||
resourceName = getInputParameter("ResourceName");
|
||||
|
||||
if (resourceName != null) {
|
||||
resourceName = getInputParameter("ResourceName").trim();
|
||||
}
|
||||
|
||||
if ((resourceName == null) || (resourceName.equals(""))) {
|
||||
throw new Exception("Warning: insert the resource name");
|
||||
}
|
||||
|
||||
databaseName = getInputParameter("DatabaseName");
|
||||
|
||||
if (databaseName != null) {
|
||||
databaseName = getInputParameter("DatabaseName").trim();
|
||||
}
|
||||
if ((databaseName == null) || (databaseName.equals(""))) {
|
||||
throw new Exception("Warning: insert the database name");
|
||||
}
|
||||
|
||||
// retrieve the chosen resource
|
||||
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
|
||||
List<DBResource> resources = discovery.discover();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->number of elements: " + resources.size());
|
||||
|
||||
// list that contains information useful for the connection
|
||||
List<String> info = new ArrayList<String>();
|
||||
|
||||
check: for (int i = 0; i < resources.size(); i++) {
|
||||
if (resources.get(i).getResourceName().toLowerCase()
|
||||
.equals(resourceName.toLowerCase())) {
|
||||
normalizeDBInfo(resources.get(i));
|
||||
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
|
||||
|
||||
if (resources.get(i).getAccessPoints().get(j)
|
||||
.getDatabaseName().toLowerCase()
|
||||
.equals(databaseName.toLowerCase())) {
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getUsername());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->username: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getUsername());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getPassword());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->password: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getPassword());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver());
|
||||
driverInfo = resources.get(i).getAccessPoints().get(j)
|
||||
.getDriver();
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->driver: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDriver());
|
||||
|
||||
// driverInfo =
|
||||
// resources.get(i).getAccessPoints().get(j)
|
||||
// .getDriver();
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.getDialect());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->dialect: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDialect());
|
||||
|
||||
info.add(resources.get(i).getAccessPoints().get(j)
|
||||
.address());
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->url: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).address());
|
||||
|
||||
info.add(databaseName);
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->databasename: "
|
||||
+ resources.get(i).getAccessPoints()
|
||||
.get(j).getDatabaseName());
|
||||
|
||||
break check;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->information useful for connection: retrieved");
|
||||
return info;
|
||||
}
|
||||
|
||||
// create the database's connection
|
||||
private Connection getConnection(List<String> Info) throws Exception {
|
||||
|
||||
// create the connection
|
||||
Iterator<String> iterator = Info.iterator();
|
||||
|
||||
String DatabaseUserName = iterator.next();
|
||||
String DatabasePassword = iterator.next();
|
||||
String DatabaseDriver = iterator.next();
|
||||
String DatabaseDialect = iterator.next();
|
||||
String DatabaseURL = iterator.next();
|
||||
String DatabaseName = iterator.next();
|
||||
|
||||
// Load the database driver
|
||||
Class.forName(DatabaseDriver) ;
|
||||
// Get a connection to the database
|
||||
Connection conn = DriverManager.getConnection(DatabaseURL,DatabaseUserName,DatabasePassword) ;
|
||||
if (conn!=null){
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->database " + DatabaseName + ": connected");
|
||||
}
|
||||
return conn;
|
||||
}
|
||||
|
||||
// Method that allows to submit a query
|
||||
private LinkedHashMap<String, StatisticalType> submitQuery()
|
||||
throws Exception, ParseException, ConvertException {
|
||||
|
||||
// LinkedHashMap<String, StatisticalType> results = new
|
||||
// LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
LinkedHashMap<String, StatisticalType> mapResults = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
query = getInputParameter("Query");
|
||||
if ((query == null) || (query.equals(""))) {
|
||||
throw new Exception("Warning: insert the query");
|
||||
}
|
||||
|
||||
// analyze the query to filter it if it is not read-only compliant
|
||||
// String valueRO = getInputParameter(valueReadOnly);
|
||||
|
||||
// //print check
|
||||
AnalysisLogger.getLogger().debug("In SubmitQuery->valueRO: " + valueRO);
|
||||
|
||||
if (valueRO.equals("true")) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->Analyzing the query: " + query);
|
||||
NotAllowedQuery = analyzeQuery(query);
|
||||
|
||||
// //print check
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery->NotAllowedQuery valueRO: " + NotAllowedQuery);
|
||||
}
|
||||
|
||||
if (NotAllowedQuery == false) {
|
||||
|
||||
// formatWithQuotes(query);
|
||||
|
||||
// submit query
|
||||
List<Object> result = new ArrayList<Object>();
|
||||
|
||||
// path file
|
||||
// AnalysisLogger.getLogger()
|
||||
// .debug("In SubmitQuery->path file: "
|
||||
// + config.getPersistencePath());
|
||||
|
||||
// if user specifies to use the smart correction a translation in
|
||||
// applied on the query
|
||||
// String valueSC = getInputParameter(smartCorrection);
|
||||
|
||||
// //print check
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->valueSC: " + valueSC);
|
||||
|
||||
// dialect to which a query is converted
|
||||
// String valueDialect = getInputParameter(dialect);
|
||||
valueDialect = getInputParameter(dialect);
|
||||
|
||||
// //print check
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->valueDialect: " + valueDialect);
|
||||
|
||||
if ((valueSC.equals("true")) && (!(valueDialect.equals("NONE")))) {
|
||||
String smartCorrectedQuery = "";
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->applying smart correction on the query: "
|
||||
+ query);
|
||||
|
||||
if (valueDialect.equals("POSTGRES")) {
|
||||
|
||||
// //print check
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery->query: " + query);
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery->dialect: " +
|
||||
// DatabaseManagement.POSTGRESQLDialect);
|
||||
|
||||
// call the SwisSQL library functionality
|
||||
smartCorrectedQuery = mgt.smartCorrectionOnQuery(query,
|
||||
DatabaseManagement.POSTGRESQLDialect);
|
||||
}
|
||||
|
||||
if (valueDialect.equals("MYSQL")) {
|
||||
// call the SwisSQL library functionality
|
||||
smartCorrectedQuery = mgt.smartCorrectionOnQuery(query,
|
||||
DatabaseManagement.MYSQLDialect);
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery-> query converted: "
|
||||
+ smartCorrectedQuery);
|
||||
query = smartCorrectedQuery;
|
||||
|
||||
if (!(smartCorrectedQuery.equals(""))) {
|
||||
PrimitiveType valQuery = new PrimitiveType(
|
||||
String.class.getName(), smartCorrectedQuery,
|
||||
PrimitiveTypes.STRING, "Converted Query",
|
||||
"Query Converted");
|
||||
|
||||
mapResults.put("Query Converted", valQuery);
|
||||
}
|
||||
}
|
||||
|
||||
// else if ((valueSC.equals("true")) &&
|
||||
// (valueDialect.equals("NONE"))) {
|
||||
//
|
||||
// throw new Exception("Warning: specify the language");
|
||||
//
|
||||
// }
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->Submitting the query: " + query);
|
||||
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
|
||||
// for a postgres database the second parameter is the
|
||||
// schema
|
||||
// name
|
||||
|
||||
// result = mgt.submitQuery(query, tableName, schemaName,
|
||||
// sf, config.getPersistencePath());
|
||||
|
||||
result = mgt
|
||||
.submitQuery(query, dbconnection, config.getPersistencePath());
|
||||
}
|
||||
|
||||
if (driverInfo.toLowerCase().contains("mysql")) {
|
||||
// for a mysql database the second parameter is the database
|
||||
// name
|
||||
|
||||
// result = mgt.submitQuery(query, tableName, databaseName,
|
||||
// sf, config.getPersistencePath());
|
||||
|
||||
result = mgt
|
||||
.submitQuery(query, dbconnection, config.getPersistencePath());
|
||||
}
|
||||
|
||||
if (result == null) {
|
||||
throw new Exception("Warning: the table has not rows");
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->Query's Result retrieved");
|
||||
|
||||
HashMap<String, String> mapResult = new HashMap<String, String>();
|
||||
mapResult = mgt.getMapQueryResult();
|
||||
// System.out.println("map size alg with header: " + mapResult.size());
|
||||
|
||||
String encoded = null;
|
||||
encoded = new String(mapResult.get("HEADERS").getBytes(), "UTF-8");
|
||||
|
||||
// // check the encoded value
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In RandomSampleOnTable->string encoded: " + encoded);
|
||||
|
||||
PrimitiveType val = new PrimitiveType(String.class.getName(),
|
||||
encoded, PrimitiveTypes.STRING, "Row", "Row");
|
||||
|
||||
mapResults.put("HEADERS", val);
|
||||
|
||||
// to add the rows (result of the query)
|
||||
for (int i = 0; i < mapResult.size() - 1; i++) {
|
||||
encoded = new String(mapResult.get(String.valueOf(i))
|
||||
.getBytes(), "UTF-8");
|
||||
|
||||
// // check the encoded value
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In RandomSampleOnTable->string encoded: " + encoded);
|
||||
|
||||
PrimitiveType val1 = new PrimitiveType(String.class.getName(),
|
||||
encoded, PrimitiveTypes.STRING, "Row", "Row");
|
||||
mapResults.put(String.valueOf(i), val1);
|
||||
// //check value contained in map
|
||||
// String value = (String) val1.getContent();
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In RandomSampleOnTable->value: " + value);
|
||||
}
|
||||
|
||||
// to add the file
|
||||
PrimitiveType fileResult = new
|
||||
PrimitiveType(File.class.getName(),
|
||||
mgt.getFile(), PrimitiveTypes.FILE, "File",
|
||||
"File");
|
||||
mapResults.put("File", fileResult);
|
||||
|
||||
// //to add the number of total rows for a result of a submit query operation
|
||||
PrimitiveType totalRows = new PrimitiveType(String.class.getName(),
|
||||
String.valueOf(mgt.getSubmitQueryTotalRows()), PrimitiveTypes.STRING, "Total Rows",
|
||||
"Total Rows");
|
||||
mapResults.put("Total Rows", totalRows);
|
||||
|
||||
}
|
||||
return mapResults;
|
||||
}
|
||||
|
||||
// method that allows to analyze the query in order to filter it if it is
|
||||
// not read-only compliant
|
||||
private boolean analyzeQuery(String query) throws Exception {
|
||||
boolean NotAllowed = false;
|
||||
// check if the query is allowed
|
||||
|
||||
// TODO: check also the oracle case
|
||||
if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
PostgresLexicalAnalyzer obj = new PostgresLexicalAnalyzer();
|
||||
NotAllowed = obj.analyze(query);
|
||||
}
|
||||
if (driverInfo.toLowerCase().contains("mysql")) {
|
||||
MySQLLexicalAnalyzer obj = new MySQLLexicalAnalyzer();
|
||||
NotAllowed = obj.analyze(query);
|
||||
}
|
||||
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->Warning: query filtered: " + NotAllowed);
|
||||
return NotAllowed;
|
||||
}
|
||||
|
||||
private void normalizeDBInfo(DBResource resource) throws Exception {
|
||||
|
||||
|
||||
try{
|
||||
int ap = resource.getAccessPoints().size();
|
||||
|
||||
for (int i = 0; i < ap; i++) {
|
||||
resource.normalize(i);
|
||||
}
|
||||
|
||||
}catch (Exception e) {
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"In SubmitQuery->: Error in normalization process"
|
||||
+ e.getMessage());
|
||||
|
||||
throw e;
|
||||
}
|
||||
|
||||
// int ap = resource.getAccessPoints().size();
|
||||
// for (int i = 0; i < ap; i++) {
|
||||
// try {
|
||||
// resource.normalize(i);
|
||||
// } catch (IOException e) {
|
||||
// // e.printStackTrace();
|
||||
// AnalysisLogger.getLogger().debug(
|
||||
// "In SubmitQuery->: Error in normalization process"
|
||||
// + e.getMessage());
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
// private void formatWithQuotes(String Query) {
|
||||
// if (driverInfo.toLowerCase().contains("postgres")) {
|
||||
// if (Query.contains(tableName)) {
|
||||
// query = Query.replaceAll(tableName, "\"" + tableName + "\"");
|
||||
// }
|
||||
// if (driverInfo.toLowerCase().contains("mysql")) {
|
||||
// query = Query.replaceAll(tableName, "\"" + tableName + "\"");
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
package org.gcube.dataaccess.algorithms.drmalgorithms;
|
||||
|
||||
public enum SubmitQueryEnum {
|
||||
TRUE,
|
||||
FALSE
|
||||
}
|
|
@ -0,0 +1,358 @@
|
|||
package org.gcube.dataaccess.algorithms.examples;
|
||||
|
||||
import java.awt.Image;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileWriter;
|
||||
import java.io.IOException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
import java.util.HashMap;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
|
||||
import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph;
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
import org.jfree.chart.JFreeChart;
|
||||
import org.jfree.data.category.DefaultCategoryDataset;
|
||||
|
||||
public class AbsoluteSpeciesBarChartsAlgorithm extends
|
||||
StandardLocalExternalAlgorithm {
|
||||
|
||||
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
static String databaseName = "DatabaseName";
|
||||
static String userParameterName = "DatabaseUserName";
|
||||
static String passwordParameterName = "DatabasePassword";
|
||||
static String urlParameterName = "DatabaseURL";
|
||||
|
||||
// static String databaseName = "Obis2Repository";
|
||||
// static String userParameterName = "postgres";
|
||||
// static String passwordParameterName = "0b1s@d4sc13nc3";
|
||||
// static String urlParameterName =
|
||||
// "jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis";
|
||||
|
||||
protected String fileName;
|
||||
BufferedWriter out;
|
||||
private String firstSpeciesNumber = " SpeciesNumber :";
|
||||
private String yearStart = "Start year :";
|
||||
private String yearEnd = "End year :";
|
||||
private int speciesNumber;
|
||||
String databaseJdbc;
|
||||
String year_start;
|
||||
String year_end;
|
||||
String databaseUser;
|
||||
String databasePwd;
|
||||
private Connection connection = null;
|
||||
private DefaultCategoryDataset defaultcategorydataset;
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Initialization");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return ": A transducer algorithm that produces the list of top n most observed taxa, i.e. the species taxa having the largest number of occurrence records, in the OBIS database in a given time interval";
|
||||
}
|
||||
|
||||
public void fulfilParameters() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("fulfilParameters method");
|
||||
|
||||
// String tmp = getInputParameter(firstSpeciesNumber);
|
||||
|
||||
List<StatisticalType> list = getInputParameters();
|
||||
|
||||
System.out.println("size: " + list.size());
|
||||
|
||||
for (int i = 0; i < list.size(); i++) {
|
||||
|
||||
System.out.println(list.get(i).getName()+" "+list.get(i).getDefaultValue());
|
||||
|
||||
if (list.get(i).getName().equals(firstSpeciesNumber)) {
|
||||
// System.out.println(list.get(i).getName());
|
||||
String tmp = list.get(i).getDefaultValue();
|
||||
speciesNumber = Integer.parseInt(tmp);
|
||||
|
||||
}
|
||||
|
||||
if (list.get(i).getName().equals(yearStart)) {
|
||||
|
||||
year_start = list.get(i).getDefaultValue();
|
||||
|
||||
}
|
||||
|
||||
if (list.get(i).getName().equals(yearEnd)) {
|
||||
|
||||
year_end = list.get(i).getDefaultValue();
|
||||
|
||||
}
|
||||
// if (list.get(i).getName().equals(urlParameterName)) {
|
||||
//
|
||||
// databaseJdbc = list.get(i).getDefaultValue();
|
||||
//
|
||||
// }
|
||||
// if (list.get(i).getName().equals(userParameterName)) {
|
||||
//
|
||||
// databaseUser = list.get(i).getDefaultValue();
|
||||
//
|
||||
// }
|
||||
// if (list.get(i).getName().equals(passwordParameterName)) {
|
||||
//
|
||||
// databasePwd = list.get(i).getDefaultValue();
|
||||
//
|
||||
// }
|
||||
|
||||
databaseJdbc = getInputParameter("DatabaseURL");
|
||||
databaseUser= getInputParameter("DatabaseUserName");
|
||||
databasePwd= getInputParameter("DatabasePassword");
|
||||
|
||||
|
||||
}
|
||||
|
||||
// System.out.println(tmp);
|
||||
|
||||
// databaseJdbc = getInputParameter(urlParameterName);
|
||||
// year_start = getInputParameter(yearStart);
|
||||
// year_end = getInputParameter(yearEnd);
|
||||
// databaseUser = getInputParameter(userParameterName);
|
||||
// databasePwd = getInputParameter(passwordParameterName);
|
||||
|
||||
// fileName = super.config.getPersistencePath() + "results.csv";
|
||||
|
||||
fileName = config.getConfigPath() + "results.csv";
|
||||
|
||||
// fileName = "./cfg/" + "results.csv";
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("Percorso file: " + fileName);
|
||||
|
||||
AnalysisLogger.getLogger().debug("fulfilParameters method");
|
||||
}
|
||||
|
||||
private ResultSet performeQuery() throws SQLException {
|
||||
|
||||
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
|
||||
databasePwd);
|
||||
|
||||
Statement stmt = connection.createStatement();
|
||||
String query = "SELECT tname, sum(count)AS count FROM public.count_species_per_year WHERE year::integer >="
|
||||
+ year_start
|
||||
+ "AND year::integer <="
|
||||
+ year_end
|
||||
+ "GROUP BY tname ORDER BY count desc;";
|
||||
return stmt.executeQuery(query);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
|
||||
|
||||
|
||||
System.out.println("In the process");
|
||||
|
||||
defaultcategorydataset = new DefaultCategoryDataset();
|
||||
// String driverName = "org.postgresql.Driver";
|
||||
// Class driverClass = Class.forName(driverName);
|
||||
// Driver driver = (Driver) driverClass.newInstance();
|
||||
|
||||
System.out.println("pre fulfill");
|
||||
fulfilParameters();
|
||||
System.out.println("post fulfill");
|
||||
|
||||
// String tmp = getInputParameter(firstSpeciesNumber);
|
||||
// System.out.println("process-> speciesnumber value: " + tmp);
|
||||
|
||||
// String tmp="10";
|
||||
// speciesNumber = Integer.parseInt(tmp);
|
||||
|
||||
// year_start = getInputParameter(yearStart);
|
||||
// year_start="1800";
|
||||
|
||||
// year_end = getInputParameter(yearEnd);
|
||||
// year_end="2020";
|
||||
|
||||
// fileName = super.config.getPersistencePath() + "results.csv";
|
||||
|
||||
// fileName = "results.csv";
|
||||
|
||||
out = new BufferedWriter(new FileWriter(fileName));
|
||||
|
||||
System.out.println("pre query");
|
||||
ResultSet rs = performeQuery();
|
||||
System.out.println("post query");
|
||||
|
||||
// connection =
|
||||
// DriverManager.getConnection("jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis",
|
||||
// "postgres",
|
||||
// "0b1s@d4sc13nc3");
|
||||
// Statement stmt = connection.createStatement();
|
||||
// String query =
|
||||
// "SELECT tname, sum(count)AS count FROM public.count_species_per_year WHERE year::integer >="
|
||||
// + year_start
|
||||
// + "AND year::integer <="
|
||||
// + year_end
|
||||
// + "GROUP BY tname ORDER BY count desc;";
|
||||
|
||||
//
|
||||
// System.out.println("pre query");
|
||||
// ResultSet rs=stmt.executeQuery(query);
|
||||
//
|
||||
// System.out.println("post query");
|
||||
//
|
||||
int i = 0;
|
||||
String s = "Species";
|
||||
while (rs.next() && i < speciesNumber) {
|
||||
|
||||
System.out.println(rs.toString());
|
||||
|
||||
String tname = rs.getString("tname");
|
||||
String count = rs.getString("count");
|
||||
|
||||
System.out.println("tname:" + tname);
|
||||
|
||||
System.out.println("count:" + count);
|
||||
|
||||
write(tname + "," + count);
|
||||
int countOcc = Integer.parseInt(count);
|
||||
|
||||
PrimitiveType val = new PrimitiveType(String.class.getName(),
|
||||
count, PrimitiveTypes.STRING, tname, tname);
|
||||
|
||||
|
||||
if (i < 100)
|
||||
map.put(tname, val);
|
||||
if (i < 16)
|
||||
defaultcategorydataset.addValue(countOcc, s, tname);
|
||||
i++;
|
||||
|
||||
}
|
||||
out.close();
|
||||
connection.close();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
|
||||
System.out.println("Sono in SetParameters");
|
||||
|
||||
addStringInput(
|
||||
firstSpeciesNumber,
|
||||
"Number of species to report (max 17 will be visualized on the chart)",
|
||||
"10");
|
||||
|
||||
// System.out.println(firstSpeciesNumber);
|
||||
|
||||
addStringInput(yearStart, "Starting year of the analysis", "1800");
|
||||
// System.out.println(yearStart);
|
||||
|
||||
addStringInput(yearEnd, "Ending year of the analysis", "2020");
|
||||
// System.out.println(yearEnd);
|
||||
|
||||
// addRemoteDatabaseInput("Obis2Repository", urlParameterName,
|
||||
// userParameterName, passwordParameterName, "driver", "dialect");
|
||||
|
||||
// addRemoteDatabaseInput("Obis2Repository", urlParameterName,
|
||||
// userParameterName, passwordParameterName, "org.postgresql.Driver",
|
||||
// "org.hibernate.dialect.PostgreSQLDialect");
|
||||
|
||||
System.out.println("pre addRemoteDB");
|
||||
// addRemoteDatabaseInput(
|
||||
// "Obis2Repository",
|
||||
// "jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis",
|
||||
// "postgres", "0b1s@d4sc13nc3", "org.postgresql.Driver",
|
||||
// "org.hibernate.dialect.PostgreSQLDialect");
|
||||
|
||||
// addRemoteDatabaseInput("Obis2Repository", urlParameterName,
|
||||
// userParameterName, passwordParameterName, "driver", "dialect");
|
||||
|
||||
|
||||
System.out.println("post addRemoteDB");
|
||||
|
||||
// super.config.setConfigPath("./cfg/");
|
||||
// config.setConfigPath();
|
||||
|
||||
// super.config.setParam("DatabaseUserName","gcube");
|
||||
// super.config.setParam("DatabasePassword","d4science2");
|
||||
// super.config.setParam("DatabaseURL","jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis");
|
||||
// super.config.setParam("DatabaseDriver","org.postgresql.Driver");
|
||||
|
||||
// System.out.println("URL: "+ super.config.getDatabaseURL());
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("Shutdown");
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
AnalysisLogger.getLogger().debug("In getOutput");
|
||||
|
||||
PrimitiveType p = new PrimitiveType(Map.class.getName(),
|
||||
PrimitiveType.stringMap2StatisticalMap(outputParameters),
|
||||
PrimitiveTypes.MAP, "Discrepancy Analysis", "");
|
||||
|
||||
|
||||
AnalysisLogger
|
||||
.getLogger()
|
||||
.debug("MapsComparator: Producing Gaussian Distribution for the errors");
|
||||
// build image:
|
||||
HashMap<String, Image> producedImages = new HashMap<String, Image>();
|
||||
|
||||
JFreeChart chart = HistogramGraph
|
||||
.createStaticChart(defaultcategorydataset);
|
||||
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
|
||||
|
||||
producedImages.put("Species Observations", image);
|
||||
|
||||
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),
|
||||
producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
|
||||
"Graphical representation of the error spread");
|
||||
// PrimitiveType images = new PrimitiveType("Species Observations",
|
||||
// producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
|
||||
// "Graphical representation of the error spread");
|
||||
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
|
||||
fileName), PrimitiveTypes.FILE, "OccFile", "OccFile");
|
||||
// end build image
|
||||
AnalysisLogger.getLogger().debug(
|
||||
"Bar Charts Species Occurrences Produced");
|
||||
// collect all the outputs
|
||||
map.put("File", f);
|
||||
map.put("Result", p);
|
||||
map.put("Images", images);
|
||||
|
||||
// generate a primitive type for the collection
|
||||
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
|
||||
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
|
||||
|
||||
// PrimitiveType output=null;
|
||||
|
||||
return output;
|
||||
}
|
||||
|
||||
public void write(String writeSt) {
|
||||
try {
|
||||
out.write(writeSt);
|
||||
out.newLine();
|
||||
} catch (IOException e) {
|
||||
// TODO Auto-generated catch block
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,107 @@
|
|||
package org.gcube.dataaccess.algorithms.examples;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
|
||||
|
||||
|
||||
public class SimpleAlg extends
|
||||
StandardLocalExternalAlgorithm{
|
||||
|
||||
|
||||
@Override
|
||||
public void init() throws Exception {
|
||||
AnalysisLogger.getLogger().debug("Initialization");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDescription() {
|
||||
return "An algorithm for testing";
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void process() throws Exception {
|
||||
|
||||
AnalysisLogger.getLogger().debug("in process");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void setInputParameters() {
|
||||
addStringInput("Name","name","");
|
||||
|
||||
addStringInput("Surname","surname","Liccardo");
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
AnalysisLogger.getLogger().debug("Shutdown");
|
||||
|
||||
// closes database's connection
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public StatisticalType getOutput() {
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("retrieving results");
|
||||
|
||||
String name= getInputParameter("Name");
|
||||
|
||||
String surname= getInputParameter("Surname");
|
||||
|
||||
|
||||
|
||||
|
||||
List<StatisticalType> list = getInputParameters();
|
||||
|
||||
System.out.println("size: " + list.size());
|
||||
|
||||
for (int i = 0; i < list.size(); i++) {
|
||||
|
||||
System.out.println(list.get(i).getName()+" "+list.get(i).getDefaultValue());
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
PrimitiveType n = new PrimitiveType(
|
||||
String.class.getName(),
|
||||
getInputParameter("Name") ,
|
||||
PrimitiveTypes.STRING,
|
||||
"Name",
|
||||
"name");
|
||||
|
||||
PrimitiveType s = new PrimitiveType(
|
||||
String.class.getName(),
|
||||
getInputParameter("Surname") ,
|
||||
PrimitiveTypes.STRING,
|
||||
"Surname",
|
||||
"surname");
|
||||
|
||||
|
||||
|
||||
|
||||
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
|
||||
|
||||
map.put("Name", n);
|
||||
|
||||
map.put("Surname", s);
|
||||
|
||||
|
||||
AnalysisLogger.getLogger().debug("name: " + name);
|
||||
|
||||
AnalysisLogger.getLogger().debug("surname: " + surname);
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
package org.gcube.dataaccess.algorithms.examples;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class TestSimpleAlg {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("TEST 1");
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
|
||||
|
||||
Regressor.process(trans.get(0));
|
||||
trans.get(0).getOutput();
|
||||
|
||||
trans = null;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("TEST_ALG");
|
||||
|
||||
// AlgorithmConfiguration config=new AlgorithmConfiguration();
|
||||
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,67 @@
|
|||
package org.gcube.dataaccess.algorithms.examples;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class TestTransducers {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("TEST 1");
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
|
||||
|
||||
Regressor.process(trans.get(0));
|
||||
trans.get(0).getOutput();
|
||||
|
||||
trans = null;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
// AlgorithmConfiguration config=new AlgorithmConfiguration();
|
||||
|
||||
|
||||
config.setConfigPath("./cfg");
|
||||
|
||||
config.setParam("DatabaseName", "Obis2Repository");
|
||||
config.setParam("DatabaseUserName","postgres");
|
||||
config.setParam("DatabasePassword","0b1s@d4sc13nc3");
|
||||
config.setParam("databaseNamebaseDriver","org.postgresql.Driver");
|
||||
config.setParam("DatabaseURL", "jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis");
|
||||
|
||||
|
||||
System.out.println("config: " + config.getParam("DatabaseUserName"));
|
||||
|
||||
|
||||
|
||||
|
||||
// System.out.println(config.getDatabaseURL());
|
||||
|
||||
config.setAgent("LISTNAMES_TABLES");
|
||||
|
||||
config.setParam("longitudeColumn", "decimallongitude");
|
||||
config.setParam("latitudeColumn", "decimallatitude");
|
||||
config.setParam("recordedByColumn", "recordedby");
|
||||
config.setParam("scientificNameColumn", "scientificname");
|
||||
config.setParam("eventDateColumn", "eventdate");
|
||||
config.setParam("lastModificationColumn", "modified");
|
||||
config.setParam("OccurrencePointsTableName", "whitesharkoccurrences2");
|
||||
config.setParam("finalTableName", "whitesharkoccurrencesnoduplicates");
|
||||
config.setParam("spatialTolerance", "0.5");
|
||||
config.setParam("confidence", "80");
|
||||
|
||||
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
package org.gcube.dataaccess.algorithms.test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class TestListDBInfo {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("TEST 1");
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
|
||||
|
||||
Regressor.process(trans.get(0));
|
||||
trans.get(0).getOutput();
|
||||
|
||||
trans = null;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBINFO");
|
||||
config.setParam("ResourceName", "TrendyLyzerObis");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devNext/NextNext");
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,36 @@
|
|||
package org.gcube.dataaccess.algorithms.test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class TestListNames {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("TEST 1");
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
|
||||
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType output = trans.get(0).getOutput();
|
||||
System.out.println(output);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBNAMES");
|
||||
config.setGcubeScope("/gcube/devNext/NextNext");
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,71 @@
|
|||
package org.gcube.dataaccess.algorithms.test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class TestListSchemas {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("TEST 1");
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
|
||||
Regressor.process(trans.get(0));
|
||||
trans.get(0).getOutput();
|
||||
|
||||
trans = null;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBSCHEMA");
|
||||
|
||||
//A test with a database postgres
|
||||
config.setParam("ResourceName", "GP DB");
|
||||
|
||||
|
||||
|
||||
|
||||
//connection's parameters for a database postgres
|
||||
|
||||
//// config.setParam("databaseName", "aquamapsdb");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("userName", "postgres");
|
||||
// config.setParam("password", "d4science2");
|
||||
// config.setParam("driverName", "org.postgresql.Driver");
|
||||
// config.setParam("URL",
|
||||
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
|
||||
|
||||
|
||||
// config.setParam("DatabaseName", "mysql");
|
||||
|
||||
|
||||
|
||||
|
||||
// Another test with database postgres
|
||||
|
||||
// config.setParam("ResourceName", "TabularData Database");
|
||||
|
||||
// config.setParam("DatabaseName", "tabulardata");
|
||||
|
||||
// config.setParam("userName", "tabulardataadmin");
|
||||
// config.setParam("password", "gcube2010");
|
||||
// config.setParam("driverName", "org.postgresql.Driver");
|
||||
// config.setParam("URL",
|
||||
// "jdbc:postgresql://node7.d.d4science.research-infrastructures.eu:5432/tabulardata");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube");
|
||||
// config.setGcubeScope("/d4science.research-infrastructures.eu");
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package org.gcube.dataaccess.algorithms.test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class TestListTables {
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("TEST 1");
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
|
||||
|
||||
Regressor.process(trans.get(0));
|
||||
trans.get(0).getOutput();
|
||||
|
||||
trans = null;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLES");
|
||||
|
||||
//A test with a database postgres
|
||||
config.setParam("ResourceName", "GP DB");
|
||||
|
||||
//connection's parameters for a database postgres
|
||||
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
|
||||
// config.setParam("userName", "postgres");
|
||||
// config.setParam("password", "d4science2");
|
||||
// config.setParam("driverName", "org.postgresql.Driver");
|
||||
// config.setParam("URL",
|
||||
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
|
||||
|
||||
|
||||
//a test with a database mysql
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube");
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
package org.gcube.dataaccess.algorithms.test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class TestSubmitQuery {
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("TEST 1");
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
|
||||
Regressor.process(trans.get(0));
|
||||
trans.get(0).getOutput();
|
||||
|
||||
trans = null;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// // //A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// //connection's parameters for a database postgres
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName","area" );
|
||||
// config.setParam("Query","select * from area limit 3" );
|
||||
|
||||
|
||||
// // config.setParam("TableName", "Divisions");
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("Query", "select * from all_world");
|
||||
|
||||
// config.setParam("userName", "postgres");
|
||||
// config.setParam("password", "d4science2");
|
||||
// config.setParam("driverName", "org.postgresql.Driver");
|
||||
// config.setParam("URL",
|
||||
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
|
||||
// config.setParam("dialect",
|
||||
// "org.hibernate.dialect.PostgreSQLDialect");
|
||||
|
||||
|
||||
// a test with a database mysql
|
||||
// config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
// config.setParam("DatabaseName", "col2oct2010");
|
||||
// config.setParam("TableName", "common_names");
|
||||
//// config.setParam("Query", "select record_id, name_code from common_names limit 3");
|
||||
//// config.setParam("Query", "select record_id as a, name_code as b from common_names limit 3");
|
||||
//
|
||||
//// config.setParam("Query", "select name_code, record_id from common_names limit 3");
|
||||
//
|
||||
// config.setParam("Query", "select record_id, name_code from common_names limit 3");
|
||||
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
config.setParam("Query", "select count (*)from (select csquarecode from hcaf_d)");
|
||||
|
||||
|
||||
|
||||
// // a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//
|
||||
// config.setParam("TableName", "Divisions");
|
||||
// config.setParam("Query", "select gid, area from \"Divisions\" limit 30");
|
||||
//
|
||||
//// config.setParam("Query", "select the_geom from Divisions limit 30");
|
||||
//
|
||||
//// config.setParam("Query", "select text(the_geom) from Divisions limit 30");
|
||||
//
|
||||
// config.setParam("Query", "EXPLAIN ANALYZE select gid from \"Divisions\" limit 30");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
// config.setGcubeScope("/gcube");
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,106 @@
|
|||
package org.gcube.dataaccess.algorithms.test;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class TestTableDetails {
|
||||
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("TEST 1");
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(testConfigLocal());
|
||||
trans.get(0).init();
|
||||
|
||||
|
||||
Regressor.process(trans.get(0));
|
||||
trans.get(0).getOutput();
|
||||
|
||||
trans = null;
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testConfigLocal() {
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLEDETAILS");
|
||||
|
||||
// //A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// //connection's parameters for a database postgres
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "Divisions");
|
||||
//
|
||||
//// config.setParam("TableName", "all_world");
|
||||
//
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
|
||||
//
|
||||
// config.setParam("userName", "postgres");
|
||||
// config.setParam("password", "d4science2");
|
||||
// config.setParam("driverName", "org.postgresql.Driver");
|
||||
// config.setParam("URL",
|
||||
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
|
||||
// config.setParam("dialect", "org.hibernate.dialect.PostgreSQLDialect");
|
||||
|
||||
|
||||
// //A test with a database postgres
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
//
|
||||
// //connection's parameters for a database postgres
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "Divisions");
|
||||
//
|
||||
//// config.setParam("TableName", "all_world");
|
||||
//
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
////
|
||||
//// config.setParam("userName", "utente");
|
||||
//// config.setParam("password", "d4science");
|
||||
//// config.setParam("driverName", "org.postgresql.Driver");
|
||||
//// config.setParam("URL",
|
||||
//// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
|
||||
//// config.setParam("dialect", "org.hibernate.dialect.PostgreSQLDialect");
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// //a test with a database mysql
|
||||
// config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
// config.setParam("DatabaseName", "col2oct2010");
|
||||
// config.setParam("TableName", "Common_names");
|
||||
|
||||
|
||||
//a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "divisions");
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,88 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionListDBInfo {
|
||||
|
||||
// static String[] algorithms = { "Postgres", "NullInputValue"};
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(),
|
||||
// Postgres3() };
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(), NullInputValue()};
|
||||
static String[] algorithms = { "Postgres"};
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1()};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
|
||||
|
||||
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
|
||||
// Print Result
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgres1() {
|
||||
|
||||
System.out.println("TEST 1: Postgres");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBINFO");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("ResourceName", "TabularData Database");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue() {
|
||||
|
||||
System.out.println("TEST 2: NullInputValue");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBINFO");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,75 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionListNames {
|
||||
|
||||
static String[] algorithms = { "Postgres1", "Postgres2", "Postgis",
|
||||
"Mysql", "NullInputValue" };
|
||||
|
||||
static AlgorithmConfiguration config;
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
System.out.println("TEST 1");
|
||||
|
||||
// for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + "test");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBNAMES");
|
||||
config.setGcubeScope("/gcube");
|
||||
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(config);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
|
||||
|
||||
// // Print Result
|
||||
// PrimitiveType obj= (PrimitiveType)st;
|
||||
// Object result=(Object) (obj.getContent());
|
||||
// LinkedHashMap map=new LinkedHashMap<String, String>();
|
||||
//
|
||||
// map= (LinkedHashMap) result;
|
||||
//
|
||||
// for(int j=0;j<map.size();j++){
|
||||
//
|
||||
// AnalysisLogger.getLogger().debug("value:" + map.get(j));
|
||||
//
|
||||
// }
|
||||
|
||||
trans = null;
|
||||
// }
|
||||
|
||||
}
|
||||
|
||||
// private static AlgorithmConfiguration test() {
|
||||
//
|
||||
// AlgorithmConfiguration config = Regressor.getConfig();
|
||||
//
|
||||
// config.setAgent("LISTDBNAMES");
|
||||
// config.setGcubeScope("/gcube");
|
||||
// return config;
|
||||
//
|
||||
// }
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,129 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionListSchemas {
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "NullInputValue1", "NullInputValue2"};
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(),
|
||||
// Postgres3() };
|
||||
|
||||
|
||||
|
||||
|
||||
static AlgorithmConfiguration[] configs = {testPostgres1()};
|
||||
static String[] algorithms = { "Postgres1"};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
|
||||
|
||||
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
|
||||
// Print Result
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration testPostgres1() {
|
||||
|
||||
System.out.println("TEST 1: Postgres table without rows");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBSCHEMA");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
|
||||
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
|
||||
// config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
// config.setParam("DatabaseName", "col2oct2010");
|
||||
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
|
||||
config.setParam("ResourceName", "TimeSeriesDatabase");
|
||||
config.setParam("DatabaseName", "timeseries");
|
||||
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue1() {
|
||||
|
||||
System.out.println("TEST 2: NullInputValue1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBSCHEMA");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue2() {
|
||||
|
||||
System.out.println("TEST 3: NullInputValue2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTDBSCHEMA");
|
||||
|
||||
// A test with a database postgres
|
||||
config.setParam("ResourceName", "GP DB");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionListTables {
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Mysql", "NullInputValue1", "NullInputValue2", "NullInputValue3" };
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),Mysql(), NullInputValue1(), NullInputValue2(), NullInputValue3()};
|
||||
|
||||
|
||||
static String[] algorithms = { "Postgres1"};
|
||||
static AlgorithmConfiguration[] configs = { Mysql()};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
|
||||
|
||||
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
|
||||
// Print Result
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgres1() {
|
||||
|
||||
System.out.println("TEST 1: Postgres table without rows");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLES");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("SchemaName", "aquamapsvre");
|
||||
|
||||
|
||||
// config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
// config.setParam("DatabaseName", "col2oct2010");
|
||||
//// config.setParam("SchemaName", "");
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration Mysql() {
|
||||
|
||||
System.out.println("TEST 2: Mysql");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLES");
|
||||
|
||||
// a test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("SchemaName", "");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue1() {
|
||||
|
||||
System.out.println("TEST 3: Postgis NullInputValue1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLES");
|
||||
|
||||
// a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue2() {
|
||||
|
||||
System.out.println("TEST 4: Postgis NullInputValue2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLES");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue3() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue3");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLES");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,273 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionRandomSampleOnTable {
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgis(), testMysql1(),testMysql2(), NullInputValue1(),
|
||||
// NullInputValue2(), NullInputValue3(), NullInputValue4()};
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1",
|
||||
// "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3",
|
||||
// "NullInputValue4"};
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testMysql1() };
|
||||
static String[] algorithms = { "Postgres1" };
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
|
||||
|
||||
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
|
||||
// Print Result
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Posgresql database
|
||||
|
||||
private static AlgorithmConfiguration testPostgres1() {
|
||||
|
||||
System.out.println("TEST 1: Postgres table without rows");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "area"); // it has not rows
|
||||
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
// // config.setParam("TableName",
|
||||
// "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
// config.setParam("TableName",
|
||||
// "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
|
||||
// // config.setParam("TableName",
|
||||
// "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
// // config.setParam("TableName", "bionymoutlevfaked2csvpreprcsv");
|
||||
//
|
||||
// // config.setParam("TableName",
|
||||
// "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
|
||||
// // config.setParam("TableName",
|
||||
// "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
|
||||
// config.setParam("TableName",
|
||||
// "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// //
|
||||
// //
|
||||
// //
|
||||
// // config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
//
|
||||
// // config.setParam("TableName", "custom2013_12_04_15_27_16_493_cet");
|
||||
//
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
// config.setParam("TableName", "hspen");
|
||||
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "SeaVoX_sea_areas_polygons_v14");
|
||||
|
||||
// Obis
|
||||
config.setParam("ResourceName", "Obis2Repository");
|
||||
config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("SchemaName", "calc");
|
||||
// config.setParam("TableName", "map1d");
|
||||
config.setParam("SchemaName", "newd20110525");
|
||||
config.setParam("TableName", "edc");
|
||||
|
||||
// config.setParam("Query", "select * from area limit 3");
|
||||
|
||||
// config.setParam("Query",
|
||||
// "select text(the_geom) from \"Divisions\" limit 1");
|
||||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgis() {
|
||||
|
||||
System.out.println("TEST 2: Postgis");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// A test with a database postgres
|
||||
config.setParam("ResourceName", "GP DB");
|
||||
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testMysql1() {
|
||||
|
||||
System.out.println("TEST 3: Mysql1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "common_names");
|
||||
// config.setParam("TableName", "Common_names"); // mysql is not case
|
||||
// sensitive
|
||||
// config.setParam("TableName", "databases");
|
||||
// config.setParam("TableName", "simple_search");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testMysql2() {
|
||||
|
||||
System.out.println("TEST 4: Mysql2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "example"); // the table does not exist
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue1() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue2() {
|
||||
System.out.println("TEST 6: Postgis NullInputValue2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue3() {
|
||||
|
||||
System.out.println("TEST 7: Postgis NullInputValue3");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue4() {
|
||||
System.out.println("TEST 8: Postgis NullInputValue4");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("RANDOMSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,266 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionSampleOnTable {
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(), testPostgis(), testMysql1(),testMysql2(), NullInputValue1(), NullInputValue2(), NullInputValue3(), NullInputValue4()};
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1", "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4"};
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testMysql1()};
|
||||
|
||||
static String[] algorithms = { "Postgres1"};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
|
||||
|
||||
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
|
||||
// Print Result
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgres1() {
|
||||
|
||||
System.out.println("TEST 1: Postgres table without rows");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "biodiversity");
|
||||
|
||||
config.setParam("TableName", "SeaVoX_sea_areas_polygons_v14");
|
||||
// config.setParam("TableName", "");
|
||||
|
||||
// A test with a database postgres Geoserver
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "area"); // it has not rows
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
// config.setParam("Query", "select * from area limit 3");
|
||||
|
||||
// config.setParam("Query", "select text(the_geom) from \"Divisions\" limit 1");
|
||||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
|
||||
// config.setParam("ResourceName", "DionysusDB");
|
||||
// config.setParam("DatabaseName", "World");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableNamefcatalog", "countrylanguage"); //mysql is not case sensitive
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "speciesoccursum");
|
||||
//// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
// config.setParam("TableName", "hspen");
|
||||
|
||||
//Statistical
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
//// config.setParam("TableName", "taxamatchinput");
|
||||
//// config.setParam("TableName", "Divisions");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
|
||||
//Obis
|
||||
// config.setParam("ResourceName", "Obis2Repository");
|
||||
// config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("SchemaName", "calc");
|
||||
// config.setParam("TableName", "map1d");
|
||||
|
||||
//Obis
|
||||
// config.setParam("ResourceName", "Obis2Repository");
|
||||
// config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("SchemaName", "newd20110525");
|
||||
//// config.setParam("TableName", "map1d");
|
||||
// config.setParam("TableName", "edc");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgis() {
|
||||
|
||||
System.out.println("TEST 2: Postgis");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
config.setParam("DatabaseName", "testdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "hcaf_d");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testMysql1() {
|
||||
|
||||
System.out.println("TEST 3: Mysql1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
// config.setParam("TableName", "Common_names"); //mysql is not case sensitive
|
||||
// config.setParam("TableName", "databases");
|
||||
// config.setParam("TableName", "families");
|
||||
config.setParam("TableName", "common_names");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testMysql2() {
|
||||
|
||||
System.out.println("TEST 4: Mysql2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "example"); //the table does not exist
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
private static AlgorithmConfiguration NullInputValue1() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue2() {
|
||||
System.out.println("TEST 6: Postgis NullInputValue2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
private static AlgorithmConfiguration NullInputValue3() {
|
||||
|
||||
System.out.println("TEST 7: Postgis NullInputValue3");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
private static AlgorithmConfiguration NullInputValue4() {
|
||||
System.out.println("TEST 8: Postgis NullInputValue4");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,305 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionSmartSampleOnTable {
|
||||
|
||||
//static AlgorithmConfiguration[] configs = { testPostgres1(), testPostgis(), testMysql1(),testMysql2(), NullInputValue1(), NullInputValue2(), NullInputValue3(), NullInputValue4()};
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1", "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4"};
|
||||
|
||||
static AlgorithmConfiguration[] configs = { testMysql1()};
|
||||
static String[] algorithms = { "Postgres1"};
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
|
||||
|
||||
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
|
||||
// Print Result
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgres1() {
|
||||
|
||||
System.out.println("TEST 1: Postgres table without rows");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SMARTSAMPLEONTABLE");
|
||||
|
||||
// // A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "area"); // it has not rows
|
||||
|
||||
// config.setParam("Query", "select * from area limit 3");
|
||||
|
||||
// config.setParam("Query", "select text(the_geom) from \"Divisions\" limit 1");
|
||||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
//
|
||||
//// config.setParam("TableName", "hcaf_d");
|
||||
//
|
||||
// config.setParam("TableName", "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
|
||||
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
|
||||
// config.setParam("TableName", "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
|
||||
// config.setParam("TableName", "bionymoutlevfaked2csvpreprcsv");
|
||||
|
||||
// config.setParam("TableName", "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
|
||||
// config.setParam("TableName", "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
|
||||
|
||||
// config.setParam("TableName", "processedoccurrences_id_e3b82f7f_6bd6_493a_bd2c_552cd486004a");
|
||||
// config.setParam("TableName", "hspen_mini_100");
|
||||
// config.setParam("TableName", "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
|
||||
|
||||
// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
//// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
// config.setParam("TableName", "hspen");
|
||||
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "SeaVoX_sea_areas_polygons_v14");
|
||||
|
||||
|
||||
////// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
//// config.setParam("TableName", "hspec_suitable_executor_1");
|
||||
// config.setParam("TableName", "custom2013_12_04_15_27_16_493_cet");
|
||||
// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
|
||||
|
||||
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
|
||||
|
||||
//// //statistical
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
////// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
//Obis
|
||||
config.setParam("ResourceName", "Obis2Repository");
|
||||
config.setParam("DatabaseName", "obis");
|
||||
config.setParam("SchemaName", "newd20110525");
|
||||
// config.setParam("TableName", "map1d");
|
||||
config.setParam("TableName", "_positions1");
|
||||
// config.setParam("TableName", "map1d");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgis() {
|
||||
|
||||
System.out.println("TEST 2: Postgis");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SMARTSAMPLEONTABLE");
|
||||
|
||||
// A test with a database postgres
|
||||
config.setParam("ResourceName", "GP DB");
|
||||
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testMysql1() {
|
||||
|
||||
System.out.println("TEST 3: Mysql1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SMARTSAMPLEONTABLE");
|
||||
|
||||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "common_names"); //mysql is not case sensitive
|
||||
// config.setParam("TableName", "databases");
|
||||
// config.setParam("TableName", "simple_search");
|
||||
// config.setParam("TableName", "scientific_names");
|
||||
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testMysql2() {
|
||||
|
||||
System.out.println("TEST 4: Mysql2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SMARTSAMPLEONTABLE");
|
||||
|
||||
// A test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "example"); //the table does not exist
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
//private static AlgorithmConfiguration testMysql3() { //dati sbagliati
|
||||
//
|
||||
// System.out.println("TEST 4.1: Mysql3");
|
||||
//
|
||||
// AlgorithmConfiguration config = Regressor.getConfig();
|
||||
//
|
||||
// config.setAgent("SMARTSAMPLEONTABLE");
|
||||
//
|
||||
// // A test with a database mysql
|
||||
// config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
// config.setParam("DatabaseName", "aquamaps");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
//
|
||||
// config.setGcubeScope("/gcube/devsec");
|
||||
//
|
||||
// return config;
|
||||
//
|
||||
//}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue1() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SMARTSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue2() {
|
||||
System.out.println("TEST 6: Postgis NullInputValue2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SMARTSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
private static AlgorithmConfiguration NullInputValue3() {
|
||||
|
||||
System.out.println("TEST 7: Postgis NullInputValue3");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SMARTSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
private static AlgorithmConfiguration NullInputValue4() {
|
||||
System.out.println("TEST 8: Postgis NullInputValue4");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("SMARTSAMPLEONTABLE");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,373 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
|
||||
public class RegressionSubmitQuery {
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Mysql", "Postgres3", "Postgres4", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4", "NullInputValue5", "Postgis"};
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(),
|
||||
// Postgres3() };
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(), Mysql(), Postgres3(), Postgres4(), NullInputValue1(), NullInputValue2(), NullInputValue3(), NullInputValue4(), NullInputValue5(), Postgis()};
|
||||
|
||||
static String[] algorithms = {"Postgres1"};
|
||||
static AlgorithmConfiguration[] configs = { testPostgres1() };
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
|
||||
|
||||
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
|
||||
// Print Result
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgres1() {
|
||||
|
||||
System.out.println("TEST 1: Postgres table without rows");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// // A test with a database postgres
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
////// config.setParam("", "TRUE");
|
||||
// config.setParam("Read-Only Query", "true");
|
||||
// config.setParam("Apply Smart Correction", "FALSE");
|
||||
// config.setParam("Language", "NONE");
|
||||
//// config.setParam("Query", "DELETE from test_gsay_03217cfda4244870b4d11f9e0eca58fe");
|
||||
// config.setParam("Query", "select * from hcaf_d limit 20");
|
||||
|
||||
// config.setParam("Apply Smart Correction", "TRUE");
|
||||
// config.setParam("Language", "POSTGRES");
|
||||
|
||||
|
||||
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
|
||||
|
||||
//
|
||||
//
|
||||
// config.setParam("Query", "select * from hcaf_d limit 1");
|
||||
// config.setParam("Query", "select * from hspen limit 6");
|
||||
|
||||
// config.setParam("Query", "select * from bionymfaked18csvpreprcsv limit 2");
|
||||
|
||||
// config.setParam("Query", "select count (*)from (select csquarecode from hcaf_d)");
|
||||
|
||||
// config.setParam("Query", "select csquarecode,months,sum(effort) as effort, sum(total_yft_catch) as total_yft_catch from (select csquarecode,to_char(time,'MM') months,sum(effort) as effort,sum(total_yft_catch) as total_yft_catch from timeseries_idacdbb646_7500_4920_8e0d_aa38cc99a4a6 group by csquarecode,time order by time ASC) as a group by csquarecode,months order by csquarecode");
|
||||
|
||||
// config.setParam("Query", "select csquarecode, total_yft_catch from timeseries_idacdbb646_7500_4920_8e0d_aa38cc99a4a6 limit 3");
|
||||
|
||||
// config.setParam("ResourceName", "DionysusDB");
|
||||
// config.setParam("DatabaseName", "World");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "countrylanguage"); //mysql is not case sensitive
|
||||
//// config.setParam("Query", "select * from countrylanguage limit 10");
|
||||
// config.setParam("Query", "SELECT * FROM information_schema.COLUMNS WHERE table_name ='countrylanguage' and table_schema='public'");
|
||||
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("", "TRUE");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
// config.setParam("Query","select gid, area, perimeter, nafo_, nafo_id, zone from \"Divisions\" limit 100");
|
||||
|
||||
// config.setParam("TableName", "area"); // it has not rows
|
||||
|
||||
// config.setParam("Query", "select * from area limit 3");
|
||||
// config.setParam("Query", "select gid, area, perimeter, CAST(the_geom as text) from \"Divisions\" limit 10");
|
||||
|
||||
// config.setParam("Query", "select text(the_geom) from \"Divisions\" limit 1");
|
||||
|
||||
// config.setParam("Query", "select perimeter,zone from \"Divisions\" where gid='7'");
|
||||
|
||||
// config.setParam("Query", "select area, CAST(perimeter as text) from \"Divisions\" order by random() limit 2");
|
||||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
// //Obis
|
||||
// config.setParam("ResourceName", "Obis2Repository");
|
||||
// config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("Read-Only Query", "trie");
|
||||
// config.setParam("Apply Smart Correction", "FALSE");
|
||||
// config.setParam("Language", "NONE");
|
||||
//// config.setParam("Query", "select id from fmap.randomdrs limit 1");
|
||||
//// config.setParam("Query", "select lifestage from randomdrs");
|
||||
// config.setParam("Query", "select * from newd20110525."+"\""+"edc"+ "\""+" where id='76864082'");
|
||||
|
||||
|
||||
// config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
// config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
// config.setParam("Read-Only Query", "TRUE");
|
||||
// config.setParam("Apply Smart Correction", "FALSE");
|
||||
// config.setParam("Language", "NONE");
|
||||
//// config.setParam("Query", "select id from fmap.randomdrs limit 1");
|
||||
//// config.setParam("Query", "select * from (select * from maxminlat_hspen2012_02_28_17_45_49_572 as a join maxminlat_hspen2011_09_23_15_31_47_530 as b on a.maxclat=b.maxclat limit 2");
|
||||
//
|
||||
//// config.setParam("Query", "select * from maxminlat_hspen2012_02_28_17_45_49_572 as a join maxminlat_hspen2011_09_23_15_31_47_530 as b on a.maxclat=b.maxclat limit 2");
|
||||
//
|
||||
//// config.setParam("Query", "select * from hcaf_d_2018_linear_01341919234605 as a join hcaf_d_2024_linear_11341919235343 as b on a.csquarecode = b.csquarecode limit 1");
|
||||
//
|
||||
// config.setParam("Query", "select * from hcaf_d_2018_linear_01341919234605 as a, hcaf_d_2024_linear_11341919235343 as b where a.csquarecode = b.csquarecode limit 1");
|
||||
//
|
||||
|
||||
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("Read-Only Query", "true");
|
||||
config.setParam("Apply Smart Correction", "FALSE");
|
||||
config.setParam("Language", "NONE");
|
||||
config.setParam("Query", "select st_astext(the_geom) from" +"\""+"SeaVoX_sea_areas_polygons_v14"+"\""+"limit 1");
|
||||
// config.setParam("Query", "select * from public.depthmean limit 10");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration Mysql() {
|
||||
|
||||
System.out.println("TEST 2: Mysql");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// // a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
|
||||
|
||||
//a test with a database mysql
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
|
||||
// config.setParam("Query", "select * from common_names limit 10");
|
||||
config.setParam("Read-Only Query", "TRUE");
|
||||
config.setParam("Apply Smart Correction", "FALSE");
|
||||
config.setParam("Language", "NONE");
|
||||
|
||||
// config.setParam("Query", "select a.name_code as uno, b.name_code as due from common_names as a join distribution as b on a.name_code=b.name_code limit 2");
|
||||
config.setParam("Query", "select * from common_names as a join distribution as b on a.name_code=b.name_code");
|
||||
// config.setParam("TableName", "specialists");
|
||||
|
||||
// config.setParam("Query", "select * from specialists limit 3");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration Postgres3() {
|
||||
|
||||
System.out.println("TEST 3: Postgis");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions"); //postgres is case sensitive
|
||||
config.setParam("Query", "select the_geom from Divisions limit 3");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration Postgres4() {
|
||||
|
||||
System.out.println("TEST 4: Postgis filter query");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions"); //postgres is case sensitive
|
||||
// config.setParam("Query", "select the_geom from Divisions limit 30");
|
||||
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue1() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue1");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue2() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue2");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue3() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue3");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue4() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue4");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue5() {
|
||||
|
||||
System.out.println("TEST 6: Postgis NullInputValue5");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
// config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration Postgis() {
|
||||
|
||||
System.out.println("TEST 7: Postgis");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTSUBMITQUERY");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions"); //postgres is case sensitive
|
||||
config.setParam("Query", "select * from \"Divisions\" limit 1");
|
||||
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,227 @@
|
|||
package org.gcube.dataaccess.algorithms.test.regressiontest;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
|
||||
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
|
||||
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
|
||||
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
|
||||
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
|
||||
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
|
||||
|
||||
public class RegressionTableDetails {
|
||||
|
||||
// static String[] algorithms = { "Postgres1", "Postgres2", "Postgis",
|
||||
// "Mysql", "NullInputValue", "Postgres3" };
|
||||
|
||||
// static AlgorithmConfiguration[] configs = { testPostgres1(),
|
||||
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(), Postgres3()};
|
||||
|
||||
static String[] algorithms = { "Postgres1" };
|
||||
static AlgorithmConfiguration[] configs = { testPostgis() };
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
|
||||
// System.out.println("TEST 1");
|
||||
|
||||
for (int i = 0; i < algorithms.length; i++) {
|
||||
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
|
||||
|
||||
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
|
||||
|
||||
List<ComputationalAgent> trans = null;
|
||||
trans = TransducerersFactory.getTransducerers(configs[i]);
|
||||
trans.get(0).init();
|
||||
|
||||
// trans.setConfiguration(configs[i]);
|
||||
// trans.init();
|
||||
Regressor.process(trans.get(0));
|
||||
StatisticalType st = trans.get(0).getOutput();
|
||||
|
||||
// Print Result
|
||||
AnalysisLogger.getLogger().debug("ST:" + st);
|
||||
trans = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgres1() {
|
||||
|
||||
System.out.println("TEST 1: Postgres");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("GETTABLEDETAILS");
|
||||
|
||||
// A test with a database postgres
|
||||
// config.setParam("ResourceName", "GP DB");
|
||||
//
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
// StatisticalManager
|
||||
// config.setParam("ResourceName", "StatisticalManagerDataBase");
|
||||
// config.setParam("DatabaseName", "testdb");
|
||||
// // config.setParam("SchemaName", "publicd");
|
||||
//
|
||||
// config.setParam("SchemaName", "public");
|
||||
//
|
||||
// config.setParam("TableName", "hcaf_d");
|
||||
|
||||
// config.setParam("TableName",
|
||||
// "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
|
||||
|
||||
// // config.setParam("TableName",
|
||||
// "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
|
||||
// // config.setParam("TableName",
|
||||
// "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
|
||||
// // config.setParam("TableName",
|
||||
// "occcluster_id_15271993_5129_4eda_92a2_fe8d22737007");
|
||||
// config.setParam("TableName",
|
||||
// "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
|
||||
|
||||
//AquaMaps
|
||||
config.setParam("ResourceName", "AquaMaps Service DataBase");
|
||||
config.setParam("DatabaseName", "aquamapsorgupdated");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "geometry_columns");
|
||||
config.setParam("TableName", "source_generation_requests");
|
||||
// //// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
|
||||
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
|
||||
|
||||
// Obis
|
||||
// config.setParam("ResourceName", "Obis2Repository");
|
||||
// config.setParam("DatabaseName", "obis");
|
||||
// config.setParam("SchemaName", "calc");
|
||||
// config.setParam("TableName", "map1d");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgres2() {
|
||||
|
||||
System.out.println("TEST 2: Postgres");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("TableDetails");
|
||||
|
||||
// A test with a database postgres
|
||||
config.setParam("ResourceName", "GP DB");
|
||||
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "area"); // it has not rows
|
||||
|
||||
// config.setParam("TableName", "all_world");
|
||||
// config.setParam("TableName", "biodiversity_lme");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration testPostgis() {
|
||||
|
||||
System.out.println("TEST 3: Postgis");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("GETTABLEDETAILS");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
// config.setParam("TableName", "ContinentalMargins");
|
||||
|
||||
// config.setParam("TableName", "SeaVoX_sea_areas_polygons_v14");
|
||||
config.setParam("TableName", "laldrovandiaoleosa20130718230308233cest ");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration Mysql() {
|
||||
|
||||
System.out.println("TEST 4: Mysql");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("GETTABLEDETAILS");
|
||||
|
||||
// // a test with postgis
|
||||
// config.setParam("ResourceName", "Geoserver database ");
|
||||
// config.setParam("SchemaName", "public");
|
||||
// config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("TableName", "Divisions");
|
||||
|
||||
// a test with a database mysql
|
||||
// config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("ResourceName", "CatalogOfLife2010");
|
||||
config.setParam("DatabaseName", "col2oct2010");
|
||||
// config.setParam("TableName", "Common_names"); //mysql is not case
|
||||
// sensitive
|
||||
|
||||
config.setParam("TableName", "databases");
|
||||
|
||||
// config.setParam("Query", "select * from common_names limit 3");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration NullInputValue() {
|
||||
|
||||
System.out.println("TEST 5: Postgis NullInputValue");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLEDETAILS");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
// config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "Divisions");
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
private static AlgorithmConfiguration Postgres3() {
|
||||
|
||||
System.out.println("TEST 6: Postgres");
|
||||
|
||||
AlgorithmConfiguration config = Regressor.getConfig();
|
||||
|
||||
config.setAgent("LISTTABLEDETAILS");
|
||||
|
||||
// a test with postgis
|
||||
config.setParam("ResourceName", "Geoserver database ");
|
||||
config.setParam("DatabaseName", "aquamapsdb");
|
||||
config.setParam("SchemaName", "public");
|
||||
config.setParam("TableName", "divisions"); // postgres is case sensitive
|
||||
|
||||
config.setGcubeScope("/gcube/devsec");
|
||||
|
||||
return config;
|
||||
|
||||
}
|
||||
|
||||
}
|
Loading…
Reference in New Issue