project on algorithm moved to dataaccess

git-svn-id: https://svn.d4science.research-infrastructures.eu/gcube/trunk/data-access/DatabasesResourcesManagerAlgorithms@96621 82a268e6-3cf1-43bd-a215-b396298e98cf
This commit is contained in:
Loredana Liccardo 2014-06-04 14:31:03 +00:00
parent 4a73641630
commit da3697efa1
68 changed files with 935175 additions and 0 deletions

27
.classpath Normal file
View File

@ -0,0 +1,27 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src/main/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="src" output="target/test-classes" path="src/test/java">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="var" path="GLOBUS_LOCATION"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>

23
.project Normal file
View File

@ -0,0 +1,23 @@
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>DatabasesResourcesManagerAlgorithms</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
<buildCommand>
<name>org.eclipse.m2e.core.maven2Builder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
<nature>org.eclipse.m2e.core.maven2Nature</nature>
</natures>
</projectDescription>

View File

@ -0,0 +1,4 @@
eclipse.preferences.version=1
encoding//src/main/java=UTF-8
encoding//src/test/java=UTF-8
encoding/<project>=UTF-8

View File

@ -0,0 +1,12 @@
eclipse.preferences.version=1
org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
org.eclipse.jdt.core.compiler.codegen.unusedLocal=preserve
org.eclipse.jdt.core.compiler.compliance=1.6
org.eclipse.jdt.core.compiler.debug.lineNumber=generate
org.eclipse.jdt.core.compiler.debug.localVariable=generate
org.eclipse.jdt.core.compiler.debug.sourceFile=generate
org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
org.eclipse.jdt.core.compiler.source=1.6

View File

@ -0,0 +1,4 @@
activeProfiles=
eclipse.preferences.version=1
resolveWorkspaceProjects=true
version=1

462919
Analysis.log.1 Normal file

File diff suppressed because it is too large Load Diff

461334
Analysis.log.2 Normal file

File diff suppressed because it is too large Load Diff

11
QueryResult.csv Normal file
View File

@ -0,0 +1,11 @@
record_id, name_code, common_name, language, country, reference_id, database_id, is_infraspecies
"1","AFD-Pul-1838","pinhead spot","English","USA","52836","7","0"
"16","Alg-111","Piedra de mar","Spanish","Spain","52834","7","0"
"17","Alg-111","Pierre vermiculée","French","France","52834","7","0"
"18","Alg-111","Stone weed","English","UK","52834","7","0"
"20","Alg-11694","Neptunes Necklace","English","New Zealand","0","2","0"
"1111710","IOP-171","long-style rush","","USA","52836","7","0"
"66","Alg-18","Gracilaire commune","French","France","52834","7","0"
"67","Alg-18","Gracilaria común","Spanish","Spain","52834","7","0"
"69","Alg-18","Warty gracilaria","English","UK","52834","7","0"
"405527","MOS-35163276","Mexican homomallium moss","","USA","52836","7","0"
1 record_id name_code common_name language country reference_id database_id is_infraspecies
2 1 AFD-Pul-1838 pinhead spot English USA 52836 7 0
3 16 Alg-111 Piedra de mar Spanish Spain 52834 7 0
4 17 Alg-111 Pierre vermiculée French France 52834 7 0
5 18 Alg-111 Stone weed English UK 52834 7 0
6 20 Alg-11694 Neptune’s Necklace English New Zealand 0 2 0
7 1111710 IOP-171 long-style rush USA 52836 7 0
8 66 Alg-18 Gracilaire commune French France 52834 7 0
9 67 Alg-18 Gracilaria común Spanish Spain 52834 7 0
10 69 Alg-18 Warty gracilaria English UK 52834 7 0
11 405527 MOS-35163276 Mexican homomallium moss USA 52836 7 0

0
QueryResult.txt Normal file
View File

101
SampleResult.csv Normal file
View File

@ -0,0 +1,101 @@
record_id, name_code, common_name, language, country, reference_id, database_id, is_infraspecies
"2121604","ITS-175610","Aplomado Falcon","English","","2214020","17","0"
"2163895","Fis-128361","&#31179;&#22993;","Mandarin Chinese","China Main","2541296","10","0"
"2154291","Fis-11737","&#28145;&#26420;&#20029;&#40060;","Mandarin Chinese","China Main","2541296","10","0"
"1081773","ILD-2243","Urd (Hi)","","","86318","15","0"
"179632","Fle-131470","blokha","Russian","Russia","0","22","1"
"2151134","Fis-114720","Notched triplefin","English","Australia","2537694","10","0"
"2262331","Fis-24976","Redstripe rockfish","English","Alaska","2535589","10","0"
"2390952","Fis-46978","Clarence River Cod","English","UK","2539030","10","0"
"2396602","Fis-50441","Lizz ta' lvant","Maltese","Malta","2538156","10","0"
"2160650","Fis-125159","&#26234;&#21033;&#28145;&#28023;&#39977;","Mandarin Chinese","China Main","2538555","10","0"
"2222763","Fis-23191","Cazon","Spanish","Nicaragua","2537779","10","0"
"2385189","Fis-35123","&#40644;&#20116;&#26840;&#40119;","Mandarin Chinese","China Main","2541296","10","0"
"1114713","Rep-252","Rodhain's Purple-glossed Snake","","","","8","0"
"2433898","ITS-30890","southern dawnflower","unspecified","","","17","1"
"2369600","Fis-32417","&#22810;&#26001;&#40154;","Mandarin Chinese","China Main","2541296","10","0"
"2459470","Con-4280","Mexican yew","Eng","GBR, USA","2570458","45","0"
"2426904","ITS-184237","thymeleaf sandwort","unspecified","","","17","1"
"2319090","Fis-28983","Morena","Spanish","Ecuador","2539421","10","0"
"2157206","Fis-122117","Leiltaos Zwergfächerfisch","German","Germany","2537780","10","0"
"179986","Fle-135900","blokha","Russian","Russia","0","22","0"
"2141580","ITS-76551","deepwater pondsnail","unspecified","","2213296","17","0"
"2141881","ITS-81503","chalky pitar","English","","2211683","17","0"
"2134159","ITS-561834","Forty-spotted Pardalote","English","","2214020","17","0"
"2308351","Fis-27223","Kirjotilapia","Finnish","Finland","2540442","10","0"
"2256344","Fis-24442","&#48533;&#46041;&#44040;&#52824;","Korean","Korea Rep","2541912","10","0"
"2267729","Fis-25192","Gelama","Javanese","Indonesia","2536447","10","0"
"2211690","Fis-22909","Tomkod atlantycki","Polish","Poland","2536482","10","0"
"2258740","Fis-24686","&#28784;&#40141;&#22823;&#30524;&#28023;&#40107;","Mandarin Chinese","China Main","2541296","10","0"
"2297412","Fis-26287","&#40657;&#28784;&#26420;&#20029;&#40060;","Mandarin Chinese","China Main","2541296","10","0"
"2167724","Fis-132534","&#19996;&#28023;&#26080;&#40141;&#40144;","Mandarin Chinese","China Main","2541296","10","0"
"2307967","Fis-27168","Lyretail panchax","English","USA","2535977","10","0"
"2180725","Fis-146448","&#30343;&#58127;","Mandarin Chinese","China Main","2538555","10","0"
"2307086","Fis-27070","Indang","Visayan","Philippines","2535272","10","0"
"2457623","Con-2620","Wilson spruce","Eng","GBR, USA","2570450","45","0"
"2279614","Fis-25608","Halfbeak","English","Mozambique","2536167","10","0"
"2312717","Fis-27853","Kokuten Hagi","Japanese","Japan","2537563","10","0"
"2368129","Fis-32293","&#22810;&#39035;&#39035;&#40748;&#40154;","Mandarin Chinese","China Main","2541296","10","0"
"2210079","Fis-22888","Jin mu lu","Mandarin Chinese","Singapore","2538772","10","0"
"178337","Fle-113820","blokha","Russian","Russia","0","22","1"
"2315532","Fis-28284","&#38647;&#27663;&#37329;&#32709;&#38592;&#39899;","Mandarin Chinese","China Main","2538555","10","0"
"2412229","Fis-60060","Huaycuya","Spanish","Spain","2536419","10","1"
"2187966","Fis-155686","Chalamathi","Tamil","India","2536709","10","0"
"2346423","Fis-30588","Olievis","Afrikaans","Namibia","2539068","10","0"
"2365722","Fis-31855","Shadowfin soldier","English","South Africa","2535830","10","0"
"2393963","Fis-48802","&#3611;&#3621;&#3634;&#3618;&#3629;&#3609;","Thai","Thailand","2538225","10","0"
"2152801","Fis-116136","Barbudo","Spanish","Ecuador","2536930","10","0"
"2230329","Fis-23397","Papagaio","Portuguese","Brazil","2536757","10","0"
"2194815","Fis-21693","Two-tone wrasse","English","UK","2537104","10","0"
"2240303","Fis-23658","Yapot","Tagalog","Philippines","2541391","10","0"
"2424944","ITS-179715","Brown Jay","English","","2214989","17","0"
"2274801","Fis-25388","Latilus gwinejski","Polish","Poland","2536482","10","0"
"2188736","Fis-157561","&#24085;&#27663;&#27874;&#39770;","Mandarin Chinese","China Main","2538555","10","0"
"2438603","ITS-42261","desert fescue","unspecified","","","17","1"
"1085149","ILD-3708","Kitsonakoho (Madagascar)","","","87036","15","0"
"2228767","Fis-23339","Tamban","Cebuano","Philippines","2538544","10","0"
"2256680","Fis-24462","Streamer searobin","English","USA","2540555","10","0"
"2119080","ITS-104991","lucerne plant bug","English","","2215183","17","0"
"2395229","Fis-49418","Korop","Ukrainian","Ukraine","2534972","10","1"
"2233860","Fis-23491","Mero negro","Spanish","Mexico","2540555","10","0"
"2279046","Fis-25596","Clouded eel","English","Australia","2541630","10","0"
"2321818","Fis-28979","&#37648;&#38957;&#31895;&#29313;&#40786;&#28023;&#39995;","Mandarin Chinese","China Main","2538555","10","0"
"2443009","ITS-528708","beach pinweed","unspecified","","","17","1"
"2300388","Fis-26559","Bia","Tagalog","Philippines","2536141","10","0"
"2121714","ITS-175673","Slaty-backed Forest Falcon","English","","2214020","17","0"
"2424814","ITS-179573","Greater 'Amakihi","English","","2214589","17","0"
"2338105","Fis-29907","Marut","Other","Philippines","2541391","10","0"
"1077205","ILD-1621","Fula Criqua","","","86686","15","0"
"2194962","Fis-21801","Adavalan-tiriki","Tamil","India","2536709","10","0"
"2175314","Fis-140816","Corvina","Portuguese","Brazil","2539090","10","0"
"2433483","ITS-30496","day flowering jassamine","unspecified","","","17","1"
"2231732","Fis-23424","&#32701;&#40131;&#40121;","Mandarin Chinese","China Main","2541296","10","0"
"2145007","ITS-505084","Chihuahuan fishhook cactus","unspecified","","2214989","17","0"
"2130268","ITS-558399","Collared Myna","English","","2214020","17","0"
"2125700","ITS-179826","Olomao","English","","2214020","17","0"
"2171635","Fis-136714","&#37509;&#39770;","Mandarin Chinese","China Main","2538555","10","0"
"2167169","Fis-132642","Marinka Waltonova","Czech","Czech Rep","2540431","10","0"
"2152681","Fis-116088","apretador","Spanish","Argentina","2540546","10","0"
"2383124","Fis-34758","Susulu","Chokwe","Angola","2537500","10","0"
"2315341","Fis-28265","Schreitmüllers Scheibensalmler","German","Germany","2537730","10","0"
"2366548","Fis-31903","Vuur-dikkop","Afrikaans","South Africa","2535248","10","0"
"1089932","ILD-7149","Arapati","","","85681","15","0"
"2214095","Fis-22951","Ivitaruk","Inuktitut","Canada","2538249","10","0"
"2305261","Fis-26906","Naithatte","Kannada","India","2540202","10","0"
"2287052","Fis-25847","Te bubunabanaba","Kiribati","Kiribati","2538253","10","0"
"2256430","Fis-24447","Bacalhau","Portuguese","Brazil","2539686","10","0"
"2168289","Fis-132793","Kaboiya","Misima-Paneati","Papua N Guin","2539589","10","0"
"2321897","Fis-28992","Snouted mullet","English","UK","2542291","10","0"
"2121369","ITS-175367","buse de Swainson","French","","2215127","17","0"
"2221182","Fis-23136","Rosada","Spanish","Spain","2535372","10","0"
"2156082","Fis-120664","Férit","French","Switzerland","2537804","10","0"
"2294894","Fis-26147","Common lizardfish","English","Philippines","2540195","10","0"
"2352881","Fis-31035","&#38271;&#21103;&#28023;&#34558;&#40060;","Mandarin Chinese","China Main","2541296","10","0"
"1086634","ILD-5478","Konyushina Poliova (Ukr)","","","87805","15","0"
"2425510","ITS-180035","Mariana flying fox","English","","","17","0"
"2222925","Fis-23192","Sucker","English","Trinidad Tob","2538855","10","0"
"2282798","Fis-25718","Képaara","Carolinian","N Marianas","2536481","10","0"
"2360741","Fis-31473","Isdang bato","Tagalog","Philippines","2541391","10","0"
"1666160","Sol-905","Anamama","Malgache","","","64","0"
"2308978","Fis-27318","&#22885;&#27931;&#33609;&#40151;","Mandarin Chinese","China Main","2541296","10","0"
"2380079","Fis-34138","Längsband-Ziersalmler","German","Germany","2537730","10","0"
1 record_id name_code common_name language country reference_id database_id is_infraspecies
2 2121604 ITS-175610 Aplomado Falcon English 2214020 17 0
3 2163895 Fis-128361 &#31179;&#22993; Mandarin Chinese China Main 2541296 10 0
4 2154291 Fis-11737 &#28145;&#26420;&#20029;&#40060; Mandarin Chinese China Main 2541296 10 0
5 1081773 ILD-2243 Urd (Hi) 86318 15 0
6 179632 Fle-131470 blokha Russian Russia 0 22 1
7 2151134 Fis-114720 Notched triplefin English Australia 2537694 10 0
8 2262331 Fis-24976 Redstripe rockfish English Alaska 2535589 10 0
9 2390952 Fis-46978 Clarence River Cod English UK 2539030 10 0
10 2396602 Fis-50441 Lizz ta' lvant Maltese Malta 2538156 10 0
11 2160650 Fis-125159 &#26234;&#21033;&#28145;&#28023;&#39977; Mandarin Chinese China Main 2538555 10 0
12 2222763 Fis-23191 Cazon Spanish Nicaragua 2537779 10 0
13 2385189 Fis-35123 &#40644;&#20116;&#26840;&#40119; Mandarin Chinese China Main 2541296 10 0
14 1114713 Rep-252 Rodhain's Purple-glossed Snake 8 0
15 2433898 ITS-30890 southern dawnflower unspecified 17 1
16 2369600 Fis-32417 &#22810;&#26001;&#40154; Mandarin Chinese China Main 2541296 10 0
17 2459470 Con-4280 Mexican yew Eng GBR, USA 2570458 45 0
18 2426904 ITS-184237 thymeleaf sandwort unspecified 17 1
19 2319090 Fis-28983 Morena Spanish Ecuador 2539421 10 0
20 2157206 Fis-122117 Leiltaos Zwergfächerfisch German Germany 2537780 10 0
21 179986 Fle-135900 blokha Russian Russia 0 22 0
22 2141580 ITS-76551 deepwater pondsnail unspecified 2213296 17 0
23 2141881 ITS-81503 chalky pitar English 2211683 17 0
24 2134159 ITS-561834 Forty-spotted Pardalote English 2214020 17 0
25 2308351 Fis-27223 Kirjotilapia Finnish Finland 2540442 10 0
26 2256344 Fis-24442 &#48533;&#46041;&#44040;&#52824; Korean Korea Rep 2541912 10 0
27 2267729 Fis-25192 Gelama Javanese Indonesia 2536447 10 0
28 2211690 Fis-22909 Tomkod atlantycki Polish Poland 2536482 10 0
29 2258740 Fis-24686 &#28784;&#40141;&#22823;&#30524;&#28023;&#40107; Mandarin Chinese China Main 2541296 10 0
30 2297412 Fis-26287 &#40657;&#28784;&#26420;&#20029;&#40060; Mandarin Chinese China Main 2541296 10 0
31 2167724 Fis-132534 &#19996;&#28023;&#26080;&#40141;&#40144; Mandarin Chinese China Main 2541296 10 0
32 2307967 Fis-27168 Lyretail panchax English USA 2535977 10 0
33 2180725 Fis-146448 &#30343;&#58127; Mandarin Chinese China Main 2538555 10 0
34 2307086 Fis-27070 Indang Visayan Philippines 2535272 10 0
35 2457623 Con-2620 Wilson spruce Eng GBR, USA 2570450 45 0
36 2279614 Fis-25608 Halfbeak English Mozambique 2536167 10 0
37 2312717 Fis-27853 Kokuten Hagi Japanese Japan 2537563 10 0
38 2368129 Fis-32293 &#22810;&#39035;&#39035;&#40748;&#40154; Mandarin Chinese China Main 2541296 10 0
39 2210079 Fis-22888 Jin mu lu Mandarin Chinese Singapore 2538772 10 0
40 178337 Fle-113820 blokha Russian Russia 0 22 1
41 2315532 Fis-28284 &#38647;&#27663;&#37329;&#32709;&#38592;&#39899; Mandarin Chinese China Main 2538555 10 0
42 2412229 Fis-60060 Huaycuya Spanish Spain 2536419 10 1
43 2187966 Fis-155686 Chalamathi Tamil India 2536709 10 0
44 2346423 Fis-30588 Olievis Afrikaans Namibia 2539068 10 0
45 2365722 Fis-31855 Shadowfin soldier English South Africa 2535830 10 0
46 2393963 Fis-48802 &#3611;&#3621;&#3634;&#3618;&#3629;&#3609; Thai Thailand 2538225 10 0
47 2152801 Fis-116136 Barbudo Spanish Ecuador 2536930 10 0
48 2230329 Fis-23397 Papagaio Portuguese Brazil 2536757 10 0
49 2194815 Fis-21693 Two-tone wrasse English UK 2537104 10 0
50 2240303 Fis-23658 Yapot Tagalog Philippines 2541391 10 0
51 2424944 ITS-179715 Brown Jay English 2214989 17 0
52 2274801 Fis-25388 Latilus gwinejski Polish Poland 2536482 10 0
53 2188736 Fis-157561 &#24085;&#27663;&#27874;&#39770; Mandarin Chinese China Main 2538555 10 0
54 2438603 ITS-42261 desert fescue unspecified 17 1
55 1085149 ILD-3708 Kitsonakoho (Madagascar) 87036 15 0
56 2228767 Fis-23339 Tamban Cebuano Philippines 2538544 10 0
57 2256680 Fis-24462 Streamer searobin English USA 2540555 10 0
58 2119080 ITS-104991 lucerne plant bug English 2215183 17 0
59 2395229 Fis-49418 Korop Ukrainian Ukraine 2534972 10 1
60 2233860 Fis-23491 Mero negro Spanish Mexico 2540555 10 0
61 2279046 Fis-25596 Clouded eel English Australia 2541630 10 0
62 2321818 Fis-28979 &#37648;&#38957;&#31895;&#29313;&#40786;&#28023;&#39995; Mandarin Chinese China Main 2538555 10 0
63 2443009 ITS-528708 beach pinweed unspecified 17 1
64 2300388 Fis-26559 Bia Tagalog Philippines 2536141 10 0
65 2121714 ITS-175673 Slaty-backed Forest Falcon English 2214020 17 0
66 2424814 ITS-179573 Greater 'Amakihi English 2214589 17 0
67 2338105 Fis-29907 Marut Other Philippines 2541391 10 0
68 1077205 ILD-1621 Fula Criqua 86686 15 0
69 2194962 Fis-21801 Adavalan-tiriki Tamil India 2536709 10 0
70 2175314 Fis-140816 Corvina Portuguese Brazil 2539090 10 0
71 2433483 ITS-30496 day flowering jassamine unspecified 17 1
72 2231732 Fis-23424 &#32701;&#40131;&#40121; Mandarin Chinese China Main 2541296 10 0
73 2145007 ITS-505084 Chihuahuan fishhook cactus unspecified 2214989 17 0
74 2130268 ITS-558399 Collared Myna English 2214020 17 0
75 2125700 ITS-179826 Olomao English 2214020 17 0
76 2171635 Fis-136714 &#37509;&#39770; Mandarin Chinese China Main 2538555 10 0
77 2167169 Fis-132642 Marinka Waltonova Czech Czech Rep 2540431 10 0
78 2152681 Fis-116088 apretador Spanish Argentina 2540546 10 0
79 2383124 Fis-34758 Susulu Chokwe Angola 2537500 10 0
80 2315341 Fis-28265 Schreitmüllers Scheibensalmler German Germany 2537730 10 0
81 2366548 Fis-31903 Vuur-dikkop Afrikaans South Africa 2535248 10 0
82 1089932 ILD-7149 Arapati 85681 15 0
83 2214095 Fis-22951 Ivitaruk Inuktitut Canada 2538249 10 0
84 2305261 Fis-26906 Naithatte Kannada India 2540202 10 0
85 2287052 Fis-25847 Te bubunabanaba Kiribati Kiribati 2538253 10 0
86 2256430 Fis-24447 Bacalhau Portuguese Brazil 2539686 10 0
87 2168289 Fis-132793 Kaboiya Misima-Paneati Papua N Guin 2539589 10 0
88 2321897 Fis-28992 Snouted mullet English UK 2542291 10 0
89 2121369 ITS-175367 buse de Swainson French 2215127 17 0
90 2221182 Fis-23136 Rosada Spanish Spain 2535372 10 0
91 2156082 Fis-120664 Férit French Switzerland 2537804 10 0
92 2294894 Fis-26147 Common lizardfish English Philippines 2540195 10 0
93 2352881 Fis-31035 &#38271;&#21103;&#28023;&#34558;&#40060; Mandarin Chinese China Main 2541296 10 0
94 1086634 ILD-5478 Konyushina Poliova (Ukr) 87805 15 0
95 2425510 ITS-180035 Mariana flying fox English 17 0
96 2222925 Fis-23192 Sucker English Trinidad Tob 2538855 10 0
97 2282798 Fis-25718 Képaara Carolinian N Marianas 2536481 10 0
98 2360741 Fis-31473 Isdang bato Tagalog Philippines 2541391 10 0
99 1666160 Sol-905 Anamama Malgache 64 0
100 2308978 Fis-27318 &#22885;&#27931;&#33609;&#40151; Mandarin Chinese China Main 2541296 10 0
101 2380079 Fis-34138 Längsband-Ziersalmler German Germany 2537730 10 0

101
SampleResult.txt Normal file
View File

@ -0,0 +1,101 @@
percentage, isofficial, language, countrycode
52.4,true,Pashto,AFG
95.6,true,Dutch,NLD
86.2,true,Papiamento,ANT
97.9,true,Albaniana,ALB
86,true,Arabic,DZA
90.6,true,Samoan,ASM
44.6,false,Spanish,AND
37.2,false,Ovimbundu,AGO
0,true,English,AIA
95.7,false,Creole English,ATG
42,true,Arabic,ARE
96.8,true,Spanish,ARG
93.4,true,Armenian,ARM
76.7,false,Papiamento,ABW
81.2,true,English,AUS
89,true,Azerbaijani,AZE
89.7,false,Creole English,BHS
67.7,true,Arabic,BHR
97.7,true,Bengali,BGD
95.1,false,Bajan,BRB
59.2,true,Dutch,BEL
50.8,true,English,BLZ
39.8,false,Fon,BEN
100,true,English,BMU
50,true,Dzongkha,BTN
87.7,true,Spanish,BOL
99.2,true,Serbo-Croatian,BIH
75.5,false,Tswana,BWA
97.5,true,Portuguese,BRA
97.3,true,English,GBR
0,true,English,VGB
45.5,true,Malay,BRN
83.2,true,Bulgariana,BGR
50.2,false,Mossi,BFA
98.1,true,Kirundi,BDI
0,true,English,CYM
89.7,true,Spanish,CHL
0,true,Maori,COK
97.5,true,Spanish,CRI
43.9,false,Somali,DJI
100,false,Creole English,DMA
98,true,Spanish,DOM
93,true,Spanish,ECU
98.8,true,Arabic,EGY
100,true,Spanish,SLV
49.1,true,Tigrinja,ERI
74.4,true,Spanish,ESP
22.7,true,Zulu,ZAF
31,false,Oromo,ETH
0,true,English,FLK
50.8,true,Fijian,FJI
29.3,true,Pilipino,PHL
100,true,Faroese,FRO
35.8,false,Fang,GAB
34.1,false,Malinke,GMB
71.7,true,Georgiana,GEO
52.4,false,Akan,GHA
88.9,true,English,GIB
100,false,Creole English,GRD
87.5,true,Greenlandic,GRL
95,false,Creole French,GLP
37.5,true,English,GUM
64.7,true,Spanish,GTM
38.6,false,Ful,GIN
36.4,false,Crioulo,GNB
96.4,false,Creole English,GUY
100,false,Haiti Creole,HTI
97.2,true,Spanish,HND
88.7,false,Canton Chinese,HKG
0,true,Norwegian,SJM
39.4,false,Javanese,IDN
39.9,true,Hindi,IND
77.2,true,Arabic,IRQ
45.7,true,Persian,IRN
98.4,true,English,IRL
95.7,true,Icelandic,ISL
63.1,true,Hebrew,ISR
94.1,true,Italian,ITA
0,false,Sunda,TMP
92,true,German,AUT
94.2,false,Creole English,JAM
99.1,true,Japanese,JPN
99.6,true,Arabic,YEM
97.9,true,Arabic,JOR
0,false,Chinese,CXR
75.2,true,Serbo-Croatian,YUG
88.6,true,Khmer,KHM
19.7,false,Fang,CMR
60.4,true,English,CAN
100,false,Crioulo,CPV
46,true,Kazakh,KAZ
20.9,false,Kikuyu,KEN
23.8,false,Gbaya,CAF
92,true,Chinese,CHN
59.7,true,Kirgiz,KGZ
98.9,true,Kiribati,KIR
99,true,Spanish,COL
75,true,Comorian,COM
51.5,false,Kongo,COG
18,false,Luba,COD

Binary file not shown.

View File

@ -0,0 +1 @@
TEST_ALGORITHM=org.gcube.test.algorithm.SimpleAlg

30
cfg/ALog.properties Normal file
View File

@ -0,0 +1,30 @@
#### Use two appenders, one to log to console, another to log to a file
log4j.rootCategory= R
#### First appender writes to console
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
#log4j.appender.stdout.layout.ConversionPattern=%m%n
#log4j.appender.stdout.File=Analysis.log
#### Second appender writes to a file
log4j.logger.AnalysisLogger=trace, stdout,R
log4j.appender.R=org.apache.log4j.RollingFileAppender
log4j.appender.R.File=Analysis.log
log4j.appender.R.MaxFileSize=50000KB
log4j.appender.R.MaxBackupIndex=2
log4j.appender.R.layout=org.apache.log4j.PatternLayout
log4j.appender.R.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n
#log4j.appender.R.layout.ConversionPattern=%m%n
#### Third appender writes to a file
log4j.logger.org.hibernate=H
#log4j.appender.H=org.apache.log4j.RollingFileAppender
log4j.appender.H=org.apache.log4j.AsyncAppender
#log4j.appender.H.File=HibernateLog.log
#log4j.appender.H.MaxFileSize=1024KB
#log4j.appender.H.MaxBackupIndex=2
log4j.appender.H.layout=org.apache.log4j.PatternLayout
log4j.appender.H.layout.ConversionPattern=%d{dd/MM/yyyy HH:mm:ss} %p %t %c - %m%n

View File

@ -0,0 +1,20 @@
<?xml version='1.0' encoding='UTF-8'?>
<hibernate-configuration>
<session-factory>
<property name="connection.driver_class">org.postgresql.Driver</property>
<property name="connection.provider_class">org.hibernate.connection.C3P0ConnectionProvider</property>
<!-- <property name="connection.url">jdbc:postgresql://localhost/testdb</property> -->
<!-- <property name="connection.url">jdbc:postgresql://146.48.87.169/testdb</property> -->
<property name="connection.url">jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis</property>
<property name="connection.username">gcube</property> -->
<property name="connection.password">d4science2</property>
<!-- <property name="dialect">org.hibernatespatial.postgis.PostgisDialect</property>-->
<property name="dialect">org.hibernate.dialect.PostgreSQLDialect</property>
<property name="transaction.factory_class">org.hibernate.transaction.JDBCTransactionFactory</property>
<property name="c3p0.timeout">0</property>
<property name="c3p0.max_size">1</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">1</property>
<property name="current_session_context_class">thread</property>
</session-factory>
</hibernate-configuration>

3
cfg/QueryResult.txt Normal file
View File

@ -0,0 +1,3 @@
1,AFD-Pul-1838,pinhead spot,English,USA,52836,7,0
16,Alg-111,Piedra de mar,Spanish,Spain,52834,7,0
17,Alg-111,Pierre vermiculée,French,France,52834,7,0

View File

680
cfg/SampleOnTable.txt Normal file
View File

@ -0,0 +1,680 @@
9.7703E-7,0.00563891
1.4364E-4,0.0510053
4.19937E-5,0.0279353
2.17743E-5,0.0185856
1.944E-4,0.0632979
3.44589E-6,0.00799436
4.25587E-6,0.00841894
4.78409E-6,0.0109983
6.89079E-5,0.0364259
4.13036E-4,0.121509
2.31896E-5,0.0191968
1.1745E-5,0.0180833
2.35434E-5,0.0207482
1.59746E-4,0.0812034
7.69376E-5,0.0567212
1.33776E-5,0.0141317
1.55566E-5,0.0205317
3.68715E-4,0.116281
31.0365,22.4365
4.9397E-6,0.00880015
1.60402E-5,0.0176136
20.0,18
0.0152733,1.36372
2.79727E-4,0.17039
1.38851E-5,0.0161386
3.36371,19.7354
1.00472E-5,0.0129909
7.62046E-4,0.18213
6.13964E-5,0.0296894
8.63237E-6,0.0123222
9.45758E-6,0.0129748
7.65775E-6,0.0104123
0.333088,8.76726
5.47461E-6,0.0114068
2.10634E-4,0.0661339
0.00120054,0.61099
2.35178E-6,0.00580937
7.06851E-4,0.137094
3.63206E-6,0.00796127
12.6305,36.8429
9.10321E-6,0.0126034
1.5285E-5,0.0157055
4.69194E-6,0.0085918
9.4151E-4,0.189438
1.00963E-5,0.015446
13.407,107.25
2.32949E-5,0.0252971
4.81072E-6,0.0110336
1.38707E-5,0.0180138
46.125,29.5
7.68974E-5,0.0447466
7.74824E-6,0.0104459
6.3402E-6,0.0098905
1.04955E-5,0.0152612
1.17834E-4,0.0500829
3.40577E-6,0.00804342
4.81313E-6,0.0109715
4.8783E-6,0.00874993
5.67719E-6,0.0114547
0.0011657,0.183435
6.38817E-6,0.00985259
5.2251E-5,0.0386048
8.08833E-6,0.012186
2.0055E-6,0.00529332
1.67406E-5,0.0170766
7.32496E-6,0.0117775
4.98469E-4,0.102785
4.21296E-6,0.0108077
0.00144765,0.234982
7.00299E-6,0.0121675
1.59803E-5,0.0157567
1.43714E-5,0.0188496
1.65039E-4,0.05943
3.16313E-5,0.0224181
5.73683E-6,0.0111805
4.93268E-6,0.00879117
1.37418E-5,0.0182006
5.61506E-6,0.011055
3.57414E-5,0.0348078
23.0204,48.8665
5.18973E-5,0.0365254
1.52769E-5,0.0162869
1.87537E-5,0.0195511
1.05333E-5,0.0121601
5.4302E-6,0.0112553
8.63725E-6,0.0182325
1.09125E-5,0.0153535
1.63658E-5,0.0169236
2.78859E-5,0.0256243
6.21926E-6,0.0111308
1.0126E-4,0.042484
4.62966E-4,0.180625
6.32661E-6,0.00981543
1.52606E-4,0.0517762
2.12023E-5,0.0172204
20.0,18
3.74942E-5,0.0295507
2.05478E-5,0.0219105
7.66321E-6,0.0146046
1.30299E-4,0.0465436
2.71046E-5,0.0252701
5.25587E-6,0.0111208
1.51102E-5,0.0167551
2.39602E-5,0.0219328
2.12038E-4,0.063825
9.0321E-6,0.0127005
0.00137113,0.340533
0.00331001,0.865211
0.0249418,2.59526
3.4304E-6,0.00801633
6.51167E-6,0.0115893
25.8366,33.9156
9.11521E-6,0.0173653
1.0255E-5,0.0119973
7.2849E-6,0.0103475
6.98957E-6,0.0142729
1.9099E-4,0.0885071
2.58334E-5,0.0229562
7.2885E-6,0.0145269
6.1871E-6,0.00975862
1.46131E-5,0.0168372
1.35135E-5,0.0147707
2.84612E-5,0.0244796
5.57003E-6,0.0109714
1.1586E-4,0.0596005
1.43117E-5,0.016303
2.83717E-4,0.0676009
3.46651E-5,0.0325543
1.3657E-4,0.0479645
6.32937E-5,0.0364656
4.90611E-6,0.0110709
3.19951E-5,0.0230177
6.50331E-5,0.0408002
5.68722E-6,0.0110757
1.06169E-4,0.0539417
7.59937E-6,0.0103364
6.62968E-6,0.00974917
7.91608E-5,0.0461142
1.47453E-5,0.0188568
2.5939E-7,0.00330998
1.15728E-4,0.102883
3.49181E-5,0.0248281
1.37263E-4,0.0675856
6.22479E-6,0.00976885
3.99077E-5,0.0264196
2.65142E-5,0.020737
3.88691E-6,0.00890433
1.43979E-5,0.0151321
1.62729E-5,0.0167599
5.14887E-5,0.0368274
1.0498E-4,0.0706319
2.97793E-4,0.0834792
5.48516E-6,0.010986
5.393E-8,0.00204208
3.82439E-5,0.0264744
4.12254E-5,0.0303821
8.63469E-5,0.0507739
8.80536E-6,0.0115935
7.98975E-5,0.0650532
8.81541E-6,0.012471
2.74345E-5,0.0269974
1.65052E-5,0.0193822
4.29089E-6,0.0106643
1.55832E-5,0.0191981
20.0,18
0.00120422,0.163573
41.2624,61.4094
1.12162E-4,0.0514486
6.26934E-6,0.00976265
2.87747E-6,0.0107453
5.67042E-6,0.0111357
1.15193E-5,0.0152805
4.94424E-6,0.00917829
7.22401E-6,0.0117199
9.5606E-7,0.00466656
4.90297E-6,0.00878456
5.60354E-6,0.0112022
1.73893E-5,0.0247408
1.26203E-4,0.0684286
10.5093,17.6008
7.54061E-6,0.0103009
6.33218E-5,0.0396868
1.64273E-5,0.0169376
1.5592E-4,0.0567286
38.4655,106.582
7.15368E-6,0.0114791
6.90115E-5,0.042822
3.93766E-4,0.117894
1.17915E-5,0.0156303
15.2436,31.9661
1.63266E-5,0.0185158
1.52654E-4,0.0571452
2.55482E-5,0.0246063
2.02187E-5,0.0196909
4.91042E-6,0.00916803
6.08298E-6,0.00959011
4.86997E-6,0.00873849
9.70018E-6,0.0150352
4.11043E-5,0.0310217
6.79588E-6,0.0119992
4.04501E-6,0.0104922
1.53992E-5,0.0148827
6.72016E-5,0.0365898
27.2604,53.1083
9.51526E-5,0.0424098
1.96001E-4,0.072912
2.22597E-5,0.0204549
4.09066E-6,0.0109499
4.93808E-6,0.00882365
1.1038E-4,0.0522993
1.03582E-5,0.0120528
1.33266E-5,0.0185396
2.49224E-4,0.12057
6.28562E-6,0.0111544
5.48005E-6,0.013855
5.39748E-5,0.043773
7.35521E-6,0.0117967
1.39825E-4,0.0454189
5.58352E-5,0.0321382
5.59136E-6,0.0110979
8.84707E-5,0.036729
3.03348E-5,0.0216452
2.78348E-5,0.0214809
8.949E-5,0.0524961
1.42036E-5,0.0185446
6.88417E-6,0.0120961
8.78723E-6,0.0123838
32.0,24
9.72656E-6,0.0153021
5.14264E-4,0.118109
4.29736E-4,0.124482
4.80843E-6,0.0111309
0.00746446,0.79918
2.3214E-5,0.0209326
7.0459E-5,0.0355598
3.43171E-6,0.00807386
7.0591E-5,0.0487308
7.76933E-6,0.0120165
2.88501E-5,0.0243687
7.24E-5,0.040359
7.55489E-5,0.0469141
1.56898E-4,0.0632761
1.09235E-5,0.0124049
4.05651E-5,0.042648
1.51465E-5,0.0154193
1.78831E-5,0.0203524
1.51837E-5,0.0156147
7.24739E-6,0.0117771
1.64521E-4,0.0643537
1.00094E-5,0.0149554
4.64507E-6,0.00863177
6.99993E-6,0.0115393
3.76117E-5,0.030985
7.7686E-6,0.014735
8.01623E-5,0.0413427
1.1339E-5,0.0133611
4.87665E-6,0.00874776
1.51463E-4,0.0628629
6.31441E-6,0.0114204
1.77934E-5,0.0168571
0.990086,6.94953
3.46749E-6,0.00799544
3.54416E-5,0.0322305
1.45402E-5,0.0146951
3.40148E-5,0.0245084
1.97484E-5,0.0196968
3.87369E-5,0.0247396
5.70696E-6,0.0111178
99.2602,175.239
1.29631E-5,0.014126
10.2418,50.4454
1.09268E-5,0.0152518
12.2007,20.3949
6.87815E-5,0.0532574
8.57708E-4,0.125572
2.15802E-4,0.105642
4.796E-6,0.00870184
5.71075E-6,0.0110694
1.53091E-5,0.0187159
1.26111E-5,0.0140716
1.76523E-5,0.0171751
2.6124E-5,0.0256958
7.23155E-5,0.035976
5.77085E-5,0.030605
2.07301E-5,0.024814
7.63905E-5,0.0381513
0.00503634,0.3736
2.13352E-5,0.0201323
2.14379E-4,0.0765103
1.29366E-4,0.0653318
4.81075E-6,0.0087364
1.2456E-5,0.0157668
4.72969E-6,0.00877609
9.5267E-6,0.0130738
2.70645E-5,0.0233318
4.12543E-4,0.121654
2.28287E-5,0.0245126
6.65116E-6,0.0117262
1.03708E-5,0.0120501
5.11625E-5,0.0310841
8.50127E-6,0.0112504
3.39338E-5,0.0252766
2.3338E-4,0.0896917
4.36054E-4,0.102802
1.0299E-5,0.013153
3.64824E-6,0.0072728
8.7311E-6,0.0124873
4.99389E-6,0.00883864
3.01524E-5,0.0221235
1.43308E-5,0.0185584
0.0135375,1.15703
4.23928E-5,0.0315665
7.87581E-6,0.014493
2.37326E-5,0.0250884
2.33263E-5,0.0210637
6.5444E-5,0.0333008
1.24917E-5,0.0148961
2.5057E-5,0.0258766
7.99923E-5,0.0392109
6.7407E-7,0.00370689
1.64934E-5,0.0156925
7.34689E-6,0.0117761
1.10116E-5,0.0155375
3.49003E-5,0.0240023
4.79389E-5,0.0336569
5.57741E-6,0.0111047
3.36945E-5,0.0233381
7.64675E-6,0.010386
0.00799724,0.659073
6.71526E-5,0.0344918
3.96247E-5,0.0253391
2.47065E-4,0.0686088
9.36907E-5,0.042208
1.94518E-4,0.11588
4.81752E-6,0.0109983
0.00100041,0.179554
4.82336E-6,0.00871537
3.26412E-4,0.101085
2.01583E-5,0.0202071
4.58397E-4,0.0947255
1.23653E-5,0.0160874
5.32341E-6,0.0117989
4.30236,13.4722
1.96971E-5,0.0183275
2.09065E-4,0.0598918
0.0028134,0.359766
4.87499E-4,0.105899
4.89188E-5,0.0320554
4.81601E-6,0.00869791
4.72743E-6,0.00900802
7.07032E-4,0.215604
3.35413E-4,0.078974
2.72193E-5,0.0288247
1.05278E-4,0.0590045
5.48046E-6,0.0113265
7.62165E-5,0.0417477
1.23731E-5,0.015662
1.49468E-5,0.024292
3.48717E-5,0.0348614
0.0164454,0.67151
1.7237E-4,0.0548759
4.66219E-6,0.00863206
7.299E-8,0.00174579
2.00278E-5,0.0187269
0.00899511,0.602984
6.2523E-6,0.00976894
8.42477E-4,0.161537
3.62395E-6,0.00984839
1.04946E-4,0.0435103
6.53428,37.7425
1.39836E-5,0.0147882
4.75411E-4,0.102706
6.92834E-6,0.0124173
36.908,29.2145
3.17329E-5,0.0321644
4.80431E-6,0.0087475
4.70384E-6,0.00854259
8.21236E-5,0.0480911
3.49175E-5,0.0260483
1.94338E-5,0.0173099
5.64119E-5,0.0403665
3.33142E-5,0.025431
3.13217E-5,0.0234571
7.02198E-5,0.0343343
7.3128E-7,0.00380925
4.78409E-6,0.0109895
5.22142E-6,0.00839373
1.32269E-5,0.0195893
4.29958E-6,0.0114484
9.91712E-6,0.0149054
5.07431,29.0843
2.51492E-4,0.0943709
6.32571E-6,0.00984089
6.90728E-5,0.046415
2.93913E-5,0.0214902
2.31863E-5,0.0183523
3.47137E-6,0.00809219
3.3656E-6,0.00790383
4.73489E-5,0.0314832
8.05622E-5,0.0354184
4.74366E-6,0.00861308
16.7146,47.4161
3.51564E-6,0.00806651
8.33728E-5,0.0630348
4.30414E-5,0.0257771
7.08169E-6,0.0121962
1.94905E-5,0.017493
6.39635E-5,0.0425276
1.14504E-5,0.0135943
62.4943,42.9971
1.9094E-5,0.0207893
2.76009E-5,0.0207111
0.0010282,0.204352
2.8121E-6,0.00956001
8.52094E-6,0.0124186
2.35137E-4,0.0780883
1.07303E-4,0.0715886
8.19546E-6,0.0120523
2.738E-5,0.0253871
2.82366E-4,0.102415
7.63725E-6,0.0103654
2.3305E-4,0.0980442
8.84575E-5,0.0414556
1.46621E-5,0.0183132
0.00185081,0.266569
1.61395E-6,0.00704063
7.20101E-4,0.130395
1.60633E-5,0.0169116
8.33848E-4,0.14828
6.05287E-4,0.119328
7.37463E-6,0.0102347
6.90496E-6,0.0114783
0.00762325,0.5351
1.06799E-5,0.0181149
2.26395E-5,0.0190986
2.84254E-6,0.0106435
2.60988E-5,0.0277546
9.38991E-6,0.014974
1.20928E-5,0.0152344
1.18972E-5,0.0162437
1.04221E-4,0.0434984
4.62954E-5,0.0304854
9.29013E-5,0.0450334
6.38068E-6,0.00986119
8.70063E-6,0.0123836
5.47485E-5,0.0343837
1.90552E-4,0.0596715
1.60205E-4,0.0605616
4.83028E-6,0.00870924
1.63153E-5,0.0168672
1.0141E-5,0.0130523
1.85877E-5,0.022111
3.73099E-5,0.0293874
1.20618E-5,0.0141322
4.80132E-4,0.239321
1.86734E-5,0.0183217
2.15333E-6,0.00704786
2.8177E-7,0.00764778
1.19829E-4,0.0600073
2.11027E-4,0.0624342
4.15023E-5,0.0404231
6.3142E-5,0.0475227
7.01177E-5,0.0324059
3.50275E-5,0.0277303
3.76439E-6,0.00804053
4.16921E-6,0.0106673
7.86189E-6,0.0148531
1.17702E-5,0.0140042
2.07788E-6,0.00710602
7.33329E-6,0.0117783
0.00185396,0.334392
1.45967E-5,0.0152665
1.89768E-5,0.0220377
6.33974E-6,0.00988845
3.24418E-4,0.101348
8.90444E-6,0.0126817
2.90857E-5,0.0345513
5.68757E-6,0.00918417
0.00709752,0.577801
1.44068E-4,0.0630899
4.7697E-6,0.00871275
1.24975E-5,0.013882
14.3761,15.9211
2.99573E-4,0.0790785
3.90447E-5,0.0277512
1.04486E-5,0.0120927
7.32497E-5,0.0474162
0.0328583,1.68561
1.25832E-5,0.0169329
6.05401E-6,0.00966575
0.8522,7.41662
2.9527E-5,0.0232313
2.05127E-6,0.00704115
6.69359E-6,0.0142841
3.62366E-5,0.0249559
2.28879E-5,0.0194495
6.56421E-5,0.0413058
2.35693E-5,0.0193053
1.90307E-5,0.0178288
1.00153E-5,0.018204
2.57921E-5,0.0256418
20.0,18
4.8505E-6,0.0109678
5.61072E-6,0.0114127
4.7754E-5,0.0301519
2.42677E-5,0.0232316
1.89188E-5,0.0208227
6.61769E-4,0.167716
2.97141E-5,0.0223764
1.14063E-4,0.0447234
9.09487E-5,0.0446335
4.67532E-5,0.0283271
5.84188E-4,0.12433
2.28362E-5,0.0251913
4.84907E-5,0.0308274
1.11794E-5,0.0137001
27.0718,51.0763
1.08003E-4,0.0454306
9.38431E-6,0.0129279
2.04809E-6,0.0070513
3.12931E-5,0.028814
0.00189977,0.285349
1.20323E-5,0.0145518
6.43857E-6,0.0115764
9.13355E-6,0.0175894
1.13331E-5,0.0144328
0.00112667,0.202879
1.13165E-5,0.0142628
2.8717E-5,0.0212844
4.96852E-4,0.131825
13.4408,74.2049
1.95355E-4,0.0738632
8.48093E-5,0.0387794
8.52129E-6,0.0121269
5.15058E-5,0.031754
3.37542E-5,0.0249784
1.31215E-5,0.0153912
2.96757E-4,0.134141
4.9007E-5,0.0298154
4.18864E-5,0.0305646
2.08578E-6,0.00538811
0.0146687,2.34031
2.59284E-4,0.0825437
1.03252E-4,0.0543814
90.6332,103.019
5.12688E-5,0.0414163
8.2902E-7,0.00603566
3.18194E-6,0.00689977
8.55502E-5,0.0364648
4.8763E-6,0.00877332
3.54058E-6,0.00809896
1.38108E-6,0.00813992
1.90862E-5,0.0175106
4.88955E-6,0.00911519
15.7244,17.2081
4.99868E-6,0.0108084
0.00539245,0.624264
1.69463E-4,0.0630869
0.00259102,0.888393
8.39712E-5,0.0435661
4.73652E-6,0.010929
7.24351E-6,0.0116926
13.419,30.2048
1.11495E-4,0.0497078
6.18346E-6,0.00972724
6.87393E-6,0.0114111
1.79846E-5,0.0175689
4.57238E-5,0.0356232
8.10548E-4,0.140009
2.99394E-4,0.0757078
2.45417E-5,0.019924
5.04439E-6,0.00884144
5.42362E-6,0.0110175
2.20127E-5,0.0267511
9.94813E-5,0.0508959
1.25062,17.9565
1.61344E-4,0.0625028
0.00112262,0.229034
1.04855E-5,0.0131256
0.00125686,0.210136
5.17521,33.5174
4.7696E-6,0.00858219
3.081E-8,0.00359469
1.08837E-5,0.0146307
1.16904E-4,0.0594755
1.20467E-4,0.0768319
1.45649E-5,0.0152501
9.76951E-4,0.208879
2.26888E-5,0.0188056
1.72038E-5,0.0213398
9.7678E-7,0.00611272
5.10934E-6,0.00847851
4.94181E-6,0.00884812
6.06736E-6,0.00963022
4.22465E-4,0.106611
5.61754E-6,0.011148
0.0189674,1.07685
6.30447E-6,0.00967734
0.63882,14.4948
1.05014E-5,0.0152593
3.78932E-5,0.0276698
1.12453E-5,0.0155788
5.50268E-6,0.0113421
6.01866E-6,0.00979631
9.59838E-6,0.0128491
4.78663E-4,0.120393
1.80902E-4,0.0679503
3.50241E-6,0.00803909
1.59405E-5,0.016117
3.68681E-5,0.0332498
1.06166E-5,0.0129311
8.03002E-6,0.0121394
6.56986E-6,0.0116726
6.12983E-6,0.00961705
11.0619,36.9611
1.19807E-4,0.052702
2.12194E-5,0.0225268
8.16354E-6,0.012526
12.5088,23.2871
6.63737E-6,0.011786
8.69258E-6,0.0123714
4.05835E-5,0.0308466
6.67081E-6,0.0117398
1.22291E-4,0.0549408
5.35365E-5,0.037192
9.66958E-4,0.234486
6.06076E-6,0.00968083
5.31188E-6,0.0110805
4.54449E-6,0.00877477
7.30305E-6,0.0120953
2.96538E-4,0.0964961
2.66118E-5,0.0226364
1.95749E-5,0.0201733
5.43957E-5,0.0283473
2.10492E-6,0.00694118
3.22508E-5,0.0245779
4.93528E-6,0.00873575
8.70525E-5,0.03822
1.95783E-5,0.0203246
4.83264E-6,0.0090926
7.3687E-4,0.200016
1.4543E-5,0.0148591
6.35245E-6,0.00988494
5.644E-7,0.00566877
3.38921E-6,0.00804852
1.22734E-4,0.0534114
1.16764E-5,0.0140771
5.72278E-5,0.029283
2.69315E-4,0.0838663
6.55163E-5,0.0582305
8.29973E-6,0.012344
6.14166E-5,0.0414622
2.56287E-5,0.0206759
4.83934E-6,0.011014
1.62917E-5,0.0188659
1.06344E-5,0.0127406
6.25899E-6,0.00976509
1.3609E-5,0.0191601
3.55264E-5,0.0284813
3.06702E-5,0.0236348
45.5391,106.753
1.04514E-5,0.01559
4.62526E-5,0.0378597
1.603E-7,0.00442308
1.80651E-4,0.0792493
2.81819E-5,0.0230531
4.54264E-6,0.0108184
1.25729E-5,0.0140746
5.36799E-5,0.0301957
1.15805E-5,0.0135475
5.02222E-6,0.00877333
8.41231E-5,0.0403267
2.04203E-4,0.113848
5.58337E-6,0.011363
7.66205E-6,0.0104025
3.27681E-6,0.00702314
1.34649E-5,0.0184943
1.03667E-5,0.0120576
8.71444E-6,0.0124203
3.57407E-5,0.0414939

View File

View File

@ -0,0 +1,7 @@
AQUAMAPS_SUITABLE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable
AQUAMAPS_NATIVE=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative
AQUAMAPS_NATIVE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNative2050
AQUAMAPS_SUITABLE_2050=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsSuitable2050
AQUAMAPS_NATIVE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNN
AQUAMAPS_SUITABLE_NEURALNETWORK=org.gcube.dataanalysis.ecoengine.spatialdistributions.AquamapsNNSuitable
FEED_FORWARD_A_N_N_DISTRIBUTION=org.gcube.dataanalysis.ecoengine.spatialdistributions.FeedForwardNeuralNetworkDistribution

View File

@ -0,0 +1,3 @@
DBSCAN=org.gcube.dataanalysis.ecoengine.clustering.DBScan
KMEANS=org.gcube.dataanalysis.ecoengine.clustering.KMeans
XMEANS=org.gcube.dataanalysis.ecoengine.clustering.XMeansWrapper

View File

View File

@ -0,0 +1,3 @@
DISCREPANCY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DiscrepancyAnalysis
QUALITY_ANALYSIS=org.gcube.dataanalysis.ecoengine.evaluation.DistributionQualityAnalysis
HRS=org.gcube.dataanalysis.ecoengine.evaluation.HabitatRepresentativeness

View File

@ -0,0 +1,2 @@
LOCAL_WITH_DATABASE=org.gcube.dataanalysis.ecoengine.processing.LocalSplitGenerator
SIMPLE_LOCAL=org.gcube.dataanalysis.ecoengine.processing.LocalSimpleSplitGenerator

0
cfg/loredana.txt Normal file
View File

1
cfg/modelers.properties Normal file
View File

@ -0,0 +1 @@
HSPEN_MODELER=org.gcube.dataanalysis.ecoengine.modeling.SimpleModeler

4
cfg/models.properties Normal file
View File

@ -0,0 +1,4 @@
HSPEN=org.gcube.dataanalysis.ecoengine.models.ModelHSPEN
AQUAMAPSNN=org.gcube.dataanalysis.ecoengine.models.ModelAquamapsNN
FEED_FORWARD_ANN=org.gcube.dataanalysis.ecoengine.models.FeedForwardNN
FEED_FORWARD_ANN_FILE=org.gcube.dataanalysis.ecoengine.models.testing.FeedForwardNNFile

View File

@ -0,0 +1 @@
AQUAMAPS_SUITABLE=org.gcube.dataanalysis.peeng.models.AquamapsSuitableNode

2620
cfg/operators.xml Normal file

File diff suppressed because it is too large Load Diff

1
cfg/results.csv Normal file
View File

@ -0,0 +1 @@
1,2.87747E-6,2,1.603E-7,3,2.84254E-6,4,1.06799E-5,5,8.2902E-7,6,9.5606E-7,7,12.5088,8,2.8121E-6,9,3.62395E-6,10,15.2436,11,27.0718,12,62.4943,13,27.2604,14,38.4655,15,0.00185396,16,8.42477E-4,17,0.00539245,18,5.84188E-4,19,1.00963E-5,20,1.65052E-5,21,4.22465E-4,22,4.83264E-6,23,2.82366E-4,24,3.37542E-5,25,2.26395E-5,26,1.51102E-5,27,9.59838E-6,28,3.44589E-6,29,1.96001E-4,30,4.7696E-6,
1 1 2.87747E-6 2 1.603E-7 3 2.84254E-6 4 1.06799E-5 5 8.2902E-7 6 9.5606E-7 7 12.5088 8 2.8121E-6 9 3.62395E-6 10 15.2436 11 27.0718 12 62.4943 13 27.2604 14 38.4655 15 0.00185396 16 8.42477E-4 17 0.00539245 18 5.84188E-4 19 1.00963E-5 20 1.65052E-5 21 4.22465E-4 22 4.83264E-6 23 2.82366E-4 24 3.37542E-5 25 2.26395E-5 26 1.51102E-5 27 9.59838E-6 28 3.44589E-6 29 1.96001E-4 30 4.7696E-6

0
cfg/results.txt Normal file
View File

View File

@ -0,0 +1,25 @@
ABSENCE_CELLS_FROM_AQUAMAPS=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.MarineAbsencePointsFromAquamapsDistribution
BIOCLIMATE_HSPEC=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPECTransducer
BIOCLIMATE_HCAF=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHCAFTransducer
BIOCLIMATE_HSPEN=org.gcube.dataanalysis.ecoengine.transducers.BioClimateHSPENTransducer
HCAF_INTERPOLATION=org.gcube.dataanalysis.ecoengine.transducers.InterpolationTransducer
HCAF_FILTER=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.HcafFilter
HSPEN_FILTER=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.HspenFilter
OCCURRENCES_MERGER=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsMerger
OCCURRENCES_INTERSECTOR=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsIntersector
OCCURRENCES_MARINE_TERRESTRIAL=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsInSeaOnEarth
OCCURRENCES_DUPLICATES_DELETER=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsDuplicatesDeleter
OCCURRENCES_SUBTRACTION=org.gcube.dataanalysis.ecoengine.transducers.OccurrencePointsSubtraction
PRESENCE_CELLS_GENERATION=org.gcube.dataanalysis.ecoengine.transducers.simplequeryexecutors.MarinePresencePoints
FIN_TAXA_MATCH=org.gcube.dataanalysis.fin.taxamatch.TaxaMatchTransducer
LISTNAMES_TABLES=org.gcube.dataacces.algorithms.DatabasesResourcesManagerAlgorithms.AbsoluteSpeciesBarChartsAlgorithm
TEST_ALG=org.gcube.dataacces.algorithms.DatabasesResourcesManagerAlgorithms.SimpleAlg
LISTDBINFO=org.gcube.dataacces.algorithms.drmalgorithms.ListDBInfo
LISTDBNAMES=org.gcube.dataacces.algorithms.drmalgorithms.ListNames
LISTDBSCHEMA=org.gcube.dataacces.algorithms.drmalgorithms.ListSchemas
LISTTABLES=org.gcube.dataacces.algorithms.drmalgorithms.ListTables
GETTABLEDETAILS=org.gcube.dataacces.algorithms.drmalgorithms.GetTableDetails
LISTSUBMITQUERY=org.gcube.dataacces.algorithms.drmalgorithms.SubmitQuery
SAMPLEONTABLE=org.gcube.dataacces.algorithms.drmalgorithms.SampleOnTable
SMARTSAMPLEONTABLE=org.gcube.dataacces.algorithms.drmalgorithms.SmartSampleOnTable
RANDOMSAMPLEONTABLE=org.gcube.dataacces.algorithms.drmalgorithms.RandomSampleOnTable

View File

@ -0,0 +1,12 @@
ANOMALIES_DETECTION=DBSCAN,KMEANS,XMEANS
CLASSIFICATION=FEED_FORWARD_A_N_N_DISTRIBUTION
CLIMATE=BIOCLIMATE_HSPEC,BIOCLIMATE_HCAF,BIOCLIMATE_HSPEN,HCAF_INTERPOLATION
CORRELATION_ANALYSIS=HRS
DATA_CLUSTERING=DBSCAN,KMEANS,XMEANS
FILTERING=HCAF_FILTER,HSPEN_FILTER
FUNCTION_SIMULATION=FEED_FORWARD_A_N_N_DISTRIBUTION
OCCURRENCES=ABSENCE_CELLS_FROM_AQUAMAPS,PRESENCE_CELLS_GENERATION,OCCURRENCES_MERGER,OCCURRENCES_INTERSECTOR,OCCURRENCES_MARINE_TERRESTRIAL,OCCURRENCES_DUPLICATES_DELETER,OCCURRENCES_SUBTRACTION
PERFORMANCES_EVALUATION=QUALITY_ANALYSIS,DISCREPANCY_ANALYSIS
SPECIES_SIMULATION=AQUAMAPS_SUITABLE,AQUAMAPS_NATIVE,AQUAMAPS_NATIVE_2050,AQUAMAPS_SUITABLE_2050,AQUAMAPS_NATIVE_NEURALNETWORK,AQUAMAPS_SUITABLE_NEURALNETWORK
TRAINING=HSPEN,AQUAMAPSNN,FEED_FORWARD_ANN
TIME_SERIES=HCAF_INTERPOLATION

0
log.txt Normal file
View File

10
nullresults.csv Normal file
View File

@ -0,0 +1,10 @@
Xiphias gladius,135638
Fulmarus glacialis,131885
Thunnus albacares,124076
Pachymetopon blochii,113597
Aptenodytes patagonicus,112605
Gadus morhua,101777
Caretta caretta,101769
Thyrsites atun,97986
Loligo vulgaris reynaudi,96672
Argyrozona argyrozona,96278
1 Xiphias gladius 135638
2 Fulmarus glacialis 131885
3 Thunnus albacares 124076
4 Pachymetopon blochii 113597
5 Aptenodytes patagonicus 112605
6 Gadus morhua 101777
7 Caretta caretta 101769
8 Thyrsites atun 97986
9 Loligo vulgaris reynaudi 96672
10 Argyrozona argyrozona 96278

51
pom.xml Normal file
View File

@ -0,0 +1,51 @@
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.gcube.dataacces.algorithms</groupId>
<artifactId>DatabasesResourcesManagerAlgorithms</artifactId>
<version>1.0.0-SNAPSHOT</version>
<parent>
<groupId>org.gcube.tools</groupId>
<artifactId>maven-parent</artifactId>
<version>1.0.0</version>
</parent>
<!-- <packaging>maven-plugin</packaging> -->
<!-- <name>DatabasesResourcesManagerAlgorithms Maven Mojo</name> -->
<!-- <url>http://maven.apache.org</url> -->
<dependencies>
<!-- <dependency> -->
<!-- <groupId>org.gcube.dataanalysis</groupId> -->
<!-- <artifactId>ecological-engine</artifactId> -->
<!-- <version>[1.0.0-SNAPSHOT, 3.0.0-SNAPSHOT)</version> -->
<!-- </dependency> -->
<dependency>
<groupId>org.gcube.dataanalysis</groupId>
<artifactId>databases-resources-manager</artifactId>
<version>1.0.0-SNAPSHOT</version>
</dependency>
<!-- <dependency> -->
<!-- <groupId>org.apache.maven</groupId> -->
<!-- <artifactId>maven-plugin-api</artifactId> -->
<!-- <version>2.0</version> -->
<!-- </dependency> -->
<!-- <dependency> -->
<!-- <groupId>junit</groupId> -->
<!-- <artifactId>junit</artifactId> -->
<!-- <version>3.8.1</version> -->
<!-- <scope>test</scope> -->
<!-- </dependency> -->
</dependencies>
</project>

10
results.csv Normal file
View File

@ -0,0 +1,10 @@
Xiphias gladius,135638
Fulmarus glacialis,131885
Thunnus albacares,124076
Pachymetopon blochii,113597
Aptenodytes patagonicus,112605
Gadus morhua,101777
Caretta caretta,101769
Thyrsites atun,97986
Loligo vulgaris reynaudi,96672
Argyrozona argyrozona,96278
1 Xiphias gladius 135638
2 Fulmarus glacialis 131885
3 Thunnus albacares 124076
4 Pachymetopon blochii 113597
5 Aptenodytes patagonicus 112605
6 Gadus morhua 101777
7 Caretta caretta 101769
8 Thyrsites atun 97986
9 Loligo vulgaris reynaudi 96672
10 Argyrozona argyrozona 96278

View File

@ -0,0 +1,503 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
/** Class that allows to retrieve some information about the chosen table */
public class GetTableDetails extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
// object that allows to manage some operations on a database
private DatabaseManagement mgt;
// variable that keeps track of the database's type
private String driverInfo;
// database's parameters specified by the user
private String resourceName = null;
private String databaseName = null;
private String schemaName = null;
private String tableName = null;
private SessionFactory sf;
@Override
public void init() throws Exception {
mgt = new DatabaseManagement(config.getConfigPath());
AnalysisLogger.getLogger().debug("In GetTableDetails->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("In GetTableDetails->scope set by config: " + scope);
// AnalysisLogger.getLogger().debug("In TableDetails->scope set by config: " + config.getGcubeScope());
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug("In GetTableDetails->scope set by ScopeProvider: " + scope);
} else {
ScopeProvider.instance.set(scope);
}
}
@Override
public String getDescription() {
// add a simple description for the algorithm
return "Algorithm that allows to view table details of a chosen database";
}
@Override
protected void process() throws Exception, IOException,
IllegalStateException, DiscoveryException, InvalidResultException,
HibernateException {
AnalysisLogger.getLogger().debug("In GetTableDetails->Processing");
try {
// retrieve information
List<String> Info = retrieveInfo();
// create the connection
sf = getConnection(Info);
// get table's details
// recover information about the "CreateTableStatement" and
// "Number of rows" of the table chosen by the user
map = getDetails();
// close the connection
sf.close();
} catch (HibernateException h) {
AnalysisLogger.getLogger().debug(
"In GetTableDetails-> ERROR " + h.getMessage());
throw h;
} catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In GetTableDetails-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug(
"In GetTableDetails-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug(
"In GetTableDetails-> ERROR " + e2.getMessage());
throw e2;
} catch (IOException e3) {
// e3.printStackTrace();
AnalysisLogger.getLogger().debug(
"In GetTableDetails-> Exception " + e3.getMessage());
throw e3;
}
catch (Exception e4) {
// e4.printStackTrace();
AnalysisLogger.getLogger().debug(
"In GetTableDetails-> Exception " + e4.getMessage());
throw e4;
} finally {
if (sf.isClosed() == false) {
mgt.closeConnection();
}
}
}
@Override
protected void setInputParameters() {
AnalysisLogger.getLogger().debug("In GetTableDetails->setting inputs");
// parameters specified by the user
addStringInput("ResourceName", "The name of the resource", "");
addStringInput("DatabaseName", "The name of the database", "");
addStringInput("SchemaName", "The name of the schema", "");
addStringInput("TableName", "The name of the table", "");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In GetTableDetails->Shutdown");
}
@Override
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug("In GetTableDetails->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
// create the database's connection
private SessionFactory getConnection(List<String> Info) throws IOException {
// create the connection
Iterator<String> iterator = Info.iterator();
String DatabaseUserName = iterator.next();
String DatabasePassword = iterator.next();
String DatabaseDriver = iterator.next();
String DatabaseDialect = iterator.next();
String DatabaseURL = iterator.next();
String DatabaseName = iterator.next();
SessionFactory sf = mgt.createConnection(DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
AnalysisLogger.getLogger().debug(
"In GetTableDetails->database " + DatabaseName + ": connected");
return sf;
}
// Method that recover the info useful for the connection
private List<String> retrieveInfo() throws Exception,
IllegalStateException, DiscoveryException, InvalidResultException {
resourceName = getInputParameter("ResourceName").trim();
if ((resourceName == null) || (resourceName.equals(""))) {
throw new Exception("Warning: insert the resource name");
}
databaseName = getInputParameter("DatabaseName").trim();
if ((databaseName == null) || (databaseName.equals(""))) {
throw new Exception("Warning: insert the database name");
}
// retrieve the chosen resource
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
List<DBResource> resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In GetTableDetails->number of database resources: "
+ resources.size());
for (int i = 0; i < resources.size(); i++) {
AnalysisLogger.getLogger().debug(
"In GetTableDetails->Resource's name: "
+ resources.get(i).getResourceName());
}
// list that contains information useful for the connection
List<String> info = new ArrayList<String>();
check: for (int i = 0; i < resources.size(); i++) {
if (resources.get(i).getResourceName().toLowerCase().equals(resourceName.toLowerCase())) {
normalizeDBInfo(resources.get(i));
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
// if (resources.get(i).getAccessPoints().get(j)
// .getDatabaseName().equals(databaseName)) {
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getUsername());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getPassword());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDriver());
//
// // driverInfo =
// // resources.get(i).getAccessPoints().get(j)
// // .getDriver();
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDialect());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .address());
//
// info.add(databaseName);
//
// break check;
//
// }
// if (resources.get(i).getAccessPoints().get(j)
// .address().equals(url)){
//
// System.out.println("url selezionato");
//
//
//
// }
if (resources.get(i).getAccessPoints().get(j)
.getDatabaseName().toLowerCase().equals(databaseName.toLowerCase())) {
info.add(resources.get(i).getAccessPoints().get(j)
.getUsername());
AnalysisLogger.getLogger().debug(
"In GetTableDetails->username: "
+ resources.get(i).getAccessPoints()
.get(j).getUsername());
info.add(resources.get(i).getAccessPoints().get(j)
.getPassword());
AnalysisLogger.getLogger().debug(
"In GetTableDetails->password: "
+ resources.get(i).getAccessPoints()
.get(j).getPassword());
info.add(resources.get(i).getAccessPoints().get(j)
.getDriver());
driverInfo = resources.get(i).getAccessPoints().get(j)
.getDriver();
AnalysisLogger.getLogger().debug(
"In GetTableDetails->driver: "
+ resources.get(i).getAccessPoints()
.get(j).getDriver());
// driverInfo =
// resources.get(i).getAccessPoints().get(j)
// .getDriver();
info.add(resources.get(i).getAccessPoints().get(j)
.getDialect());
AnalysisLogger.getLogger().debug(
"In GetTableDetails->dialect: "
+ resources.get(i).getAccessPoints()
.get(j).getDialect());
info.add(resources.get(i).getAccessPoints().get(j)
.address());
AnalysisLogger.getLogger().debug(
"In GetTableDetails->url: "
+ resources.get(i).getAccessPoints()
.get(j).address());
info.add(databaseName);
AnalysisLogger.getLogger().debug(
"In GetTableDetails->databasename: "
+ resources.get(i).getAccessPoints()
.get(j).getDatabaseName());
break check;
}
}
}
}
AnalysisLogger
.getLogger()
.debug("In GetTableDetails->information useful for connection: retrieved");
return info;
}
// method that retrieves information such as "CreateTableStatement" and the
// number of rows about the table chosen by the user
private LinkedHashMap<String, StatisticalType> getDetails()
throws Exception {
tableName = getInputParameter("TableName").trim();
if ((tableName == null) || (tableName.equals(""))) {
throw new Exception("Warning: insert the table name");
}
if (driverInfo.toLowerCase().contains("postgres")) {
schemaName = getInputParameter("SchemaName").trim();
if ((schemaName == null) || (schemaName.equals(""))) {
throw new Exception("Warning: insert the schema name");
}
}
AnalysisLogger.getLogger().debug(
"In GetTableDetails->getting details on the table: " + tableName);
// recover metadata of the table
// recover the "show create" statement
String createTable = null;
if ((driverInfo.toLowerCase().contains("postgres"))) {
createTable = mgt.getCreateTable(tableName, schemaName);
}
if ((driverInfo.toLowerCase().contains("mysql"))) {
createTable = mgt.getCreateTable(tableName, databaseName);
}
PrimitiveType valCreateTable = new PrimitiveType(
String.class.getName(), createTable, PrimitiveTypes.STRING,
"Create Table Statement", "Create Table Statement");
map.put("CreateTable", valCreateTable);
AnalysisLogger.getLogger().debug(
"In GetTableDetails->getting the \"CreateTableStatement\": "
+ createTable);
// to retrieve the column names of a table
List<String> listColumnNamesTable = mgt.getListColumnNamesTable();
String ColumnName = "";
for (int i = 0; i < listColumnNamesTable.size(); i++) {
if (i != listColumnNamesTable.size() - 1) {
ColumnName = ColumnName + listColumnNamesTable.get(i) + ",";
} else {
ColumnName = ColumnName + listColumnNamesTable.get(i);
}
}
PrimitiveType valListColumnNamesTable = new PrimitiveType(
String.class.getName(), ColumnName, PrimitiveTypes.STRING,
"Column Name", "Column Name");
map.put("Column Names", valListColumnNamesTable);
AnalysisLogger.getLogger().debug(
"In GetTableDetails->getting the column names list: "
+ createTable);
// recover the number of rows
// BigInteger rows = mgt.getNumberOfRows(tableName);
long rows = mgt.getNumberOfRows(tableName);
// PrimitiveType valRows = new PrimitiveType(
// String.class.getName(), rows.toString(),
// PrimitiveTypes.STRING, "NumberRows", "Rows' Number");
PrimitiveType valRows = new PrimitiveType(String.class.getName(),
Long.toString(rows), PrimitiveTypes.STRING, "Number Rows",
"Rows' Number");
map.put("NumberRows", valRows);
// AnalysisLogger
// .getLogger()
// .debug("In TableDetails->getting the number of rows: " +
// rows.toString());
AnalysisLogger.getLogger().debug(
"In GetTableDetails->getting the number of rows: "
+ Long.toString(rows));
return map;
}
private void normalizeDBInfo(DBResource resource) throws IOException {
int ap = resource.getAccessPoints().size();
for (int i = 0; i < ap; i++) {
try {
resource.normalize(i);
} catch (IOException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In GetTableDetails->: Error in normalization process"
+ e.getMessage());
throw e;
}
}
}
}

View File

@ -0,0 +1,270 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataaccess.databases.resources.DBResource.AccessPoint;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
/** Class that allows to retrieve information about the chosen resource */
public class ListDBInfo extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
// database's parameters specified by the user
private String resourceName = null;
// list that contains information about the resource
private List<AccessPoint> ap = new ArrayList<AccessPoint>();
// variable that keeps track of database platform version
private String platformVersion = "";
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("In ListDBInfo->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug("In ListDBInfo->scope set by config object: " + scope);
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug("In ListDBInfo->scope set by ScopeProvider: " + scope);
}else{
ScopeProvider.instance.set(scope);
}
}
@Override
public String getDescription() {
// add a simple description for the algorithm
return "Algorithm that allows to view information about one chosen resource of Database Type in the Infrastructure";
}
@Override
protected void process() throws Exception, IOException, IllegalStateException, DiscoveryException, InvalidResultException {
AnalysisLogger.getLogger().debug("In ListDBInfo->Processing");
AnalysisLogger.getLogger().debug("Scope: " + ScopeProvider.instance.get());
try{
// retrieve information
List<AccessPoint> apInfo = retrieveInfo();
AnalysisLogger.getLogger().debug("access point dimension: " + apInfo.size());
for (int i = 0; i < apInfo.size(); i++) {
PrimitiveType DBName = new PrimitiveType(String.class.getName(),
apInfo.get(i).getDatabaseName(), PrimitiveTypes.STRING,
"Database Name "
, "Database Name");
int index = i+1;
map.put("Database Name "+index, DBName);
AnalysisLogger.getLogger().debug(
"In ListDBInfo->Database Name: "
+ apInfo.get(i).getDatabaseName());
PrimitiveType url = new PrimitiveType(String.class.getName(),
apInfo.get(i).address(), PrimitiveTypes.STRING, "URL",
"URL");
map.put("URL " +index, url);
AnalysisLogger.getLogger().debug(
"In ListDBInfo->URL: " + apInfo.get(i).address());
PrimitiveType driver = new PrimitiveType(String.class.getName(),
apInfo.get(i).getDriver(), PrimitiveTypes.STRING,
"Driver Name", "Driver Name");
map.put("Driver Name "+index, driver);
AnalysisLogger.getLogger().debug(
"In ListDBInfo->Driver Name: "
+ apInfo.get(i).getDriver());
PrimitiveType dialect = new PrimitiveType(String.class.getName(),
apInfo.get(i).getDialect(), PrimitiveTypes.STRING,
"Dialect Name", "Dialect Name");
map.put("Dialect Name " +index, dialect);
AnalysisLogger.getLogger().debug(
"In ListDBInfo->Dialect Name: "
+ apInfo.get(i).getDialect());
PrimitiveType platformVersionValue = new PrimitiveType(String.class.getName(),
platformVersion, PrimitiveTypes.STRING,
"Platform Version", "Platform Version");
map.put("Platform Version " +index, platformVersionValue);
AnalysisLogger.getLogger().debug(
"In ListDBInfo->Platform Version: "
+ platformVersion);
}
}
catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug("In ListDBInfo-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug("In ListDBInfo-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug("In ListDBInfo-> ERROR " + e2.getMessage());
throw e2;
}
// catch(IOException e3){
//// e3.printStackTrace();
//
// AnalysisLogger.getLogger().debug("In ListDBInfo-> Exception " + e3.getMessage());
//
// throw e3;
// }
catch(Exception e4){
// e4.printStackTrace();
AnalysisLogger.getLogger().debug("In ListDBInfo-> Exception " + e4.getMessage());
throw e4;
}
}
@Override
protected void setInputParameters() {
AnalysisLogger.getLogger().debug("In ListDBInfo->setting inputs");
// resource name specified by the user
addStringInput("ResourceName", "The name of the resource", "");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In ListDBInfo->Shutdown");
}
@Override
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug("In ListDBInfo->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
private List<AccessPoint> retrieveInfo() throws Exception, IllegalStateException, DiscoveryException, InvalidResultException {
resourceName = getInputParameter("ResourceName").trim();
if ((resourceName == null) || (resourceName.equals(""))){
throw new Exception("Warning: insert the resource name");
}
// retrieve information about the chosen resource
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
List<DBResource> resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In ListDBInfo->number of database resources: "
+ resources.size());
check: for (int i = 0; i < resources.size(); i++) {
if (resources.get(i).getResourceName().toLowerCase().equals(resourceName.toLowerCase())) {
platformVersion = resources.get(i).getPlatformVersion();
// ap = resources.get(i).getAccessPoints();
normalizeDBInfo(resources.get(i));
ap = resources.get(i).getAccessPoints();
break check;
}
}
return ap;
}
private void normalizeDBInfo(DBResource resource) throws IOException {
int ap = resource.getAccessPoints().size();
for (int i = 0; i < ap; i++) {
try {
resource.normalize(i);
} catch (IOException e) {
// TODO Auto-generated catch block
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListDBInfo->: Error in normalization process"
+ e.getMessage());
throw e;
}
}
}
}

View File

@ -0,0 +1,173 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
/** Class that allows to retrieve a list of database resources */
public class ListNames extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("In ListNames->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug(
"In ListNames->scope set by config object: " + scope);
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug(
"In ListNames->scope set by ScopeProvider: " + scope);
} else {
ScopeProvider.instance.set(scope);
}
}
@Override
public String getDescription() {
// add a simple description for the algorithm
return "Algorithm that allows to view the available database resources names in the Infrastructure";
}
@Override
protected void process() throws Exception {
AnalysisLogger.getLogger().debug("In ListNames->Processing");
String scope = ScopeProvider.instance.get();
if (scope != null) {
AnalysisLogger.getLogger().debug("getting scope: " + scope);
AnalysisLogger.getLogger().debug(
"getting scope through config: " + config.getGcubeScope());
}
try {
// retrieve resources
List<DBResource> resources = this.retrieveResources();
// add the name to the list
// list that contains the resource's names
ArrayList<String> listnames = new ArrayList<String>();
for (int i = 0; i < resources.size(); i++) {
PrimitiveType val = new PrimitiveType(String.class.getName(),
null, PrimitiveTypes.STRING, "Resource's name",
"Resource's name");
AnalysisLogger.getLogger().debug(
"In ListNames->Resource's name: "
+ resources.get(i).getResourceName());
listnames.add(resources.get(i).getResourceName());
val.setContent(listnames.get(i));
map.put(String.valueOf(i), val);
}
AnalysisLogger.getLogger().debug(
"In ListNames->Output Map Size: " + map.size());
} catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListNames-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListNames-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListNames-> ERROR " + e2.getMessage());
throw e2;
}
}
@Override
protected void setInputParameters() {
AnalysisLogger.getLogger().debug("In ListNames->setting inputs");
addStringInput("MaxNumber", "Max Number of Resources (-1 for all)",
"-1");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In ListNames->Shutdown");
}
@Override
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug("In ListNames->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
private List<DBResource> retrieveResources() throws IllegalStateException,
DiscoveryException, InvalidResultException {
AnalysisLogger.getLogger().debug("In ListNames->retrieving resources");
List<DBResource> resources = new ArrayList<DBResource>();
// retrieve the resources
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In ListNames->number of database resources: "
+ resources.size());
return resources;
}
}

View File

@ -0,0 +1,464 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
/**
* class that allows to retrieve schema's names of a chosen database. In this
* case the database's type is "postgresql"
*/
public class ListSchemas extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
private DatabaseManagement mgt;
// database's parameters specified by the user
private String resourceName = null;
private String databaseName = null;
private SessionFactory sf;
@Override
public void init() throws Exception {
mgt = new DatabaseManagement(config.getConfigPath());
AnalysisLogger.getLogger().debug("In ListSchemas->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug(
"In ListSchemas->scope set by config object: " + scope);
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug(
"In ListSchemas->scope set by ScopeProvider: " + scope);
} else {
ScopeProvider.instance.set(scope);
}
}
@Override
public String getDescription() {
// add a simple description for the algorithm
return "Algorithm that allows to view the schema names of a chosen database for which the type is Postgres";
}
@Override
protected void process() throws Exception, IOException,
IllegalStateException, DiscoveryException, InvalidResultException,
HibernateException {
AnalysisLogger.getLogger().debug("In ListSchemas->Processing");
try {
// retrieve information useful for connection
List<String> Info = retrieveInfo();
// create the connection
sf = getConnection(Info);
// get the schema's list
List<String> listSchemas = new ArrayList<String>();
listSchemas = getSchemas();
if (listSchemas.size() == 0) {
AnalysisLogger.getLogger().debug(
"In ListSchemas->Warning: no schema available");
}
for (int i = 0; i < listSchemas.size(); i++) {
PrimitiveType val = new PrimitiveType(String.class.getName(),
null, PrimitiveTypes.STRING, "schema's name",
"schema's name");
val.setContent(listSchemas.get(i));
map.put(String.valueOf(i), val);
AnalysisLogger.getLogger().debug(
"In ListSchemas->getting schema's name: "
+ val.getContent());
}
sf.close();
} catch (HibernateException h) {
AnalysisLogger.getLogger().debug(
"In ListSchemas-> ERROR " + h.getMessage());
throw h;
}
catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListSchemas-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListSchemas-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListSchemas-> ERROR " + e2.getMessage());
throw e2;
} catch (IOException e3) {
// e3.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListSchemas-> Exception " + e3.getMessage());
throw e3;
}
catch (Exception e4) {
// e4.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListSchemas-> Exception " + e4.getMessage());
throw e4;
} finally {
if (sf.isClosed() == false) {
mgt.closeConnection();
}
}
}
@Override
protected void setInputParameters() {
AnalysisLogger.getLogger().debug("In ListSchemas->setting inputs");
// resource and database's name specified by the user
addStringInput("ResourceName", "The name of the resource", "");
addStringInput("DatabaseName", "The name of the database", "");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In ListSchemas->Shutdown");
}
@Override
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug("In ListSchemas->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
// method that retrieves the schema's list
private List<String> getSchemas() throws Exception {
List<String> listSchemas = new ArrayList<String>();
try {
listSchemas = mgt.getSchemas();
} catch (Exception e) {
// e.printStackTrace();
// System.out.println(e.getMessage());
throw e;
}
finally {
if (sf.isClosed() == false) {
mgt.closeConnection();
}
}
return listSchemas;
}
// method that retrieves information useful for the connection
private List<String> retrieveInfo() throws Exception,
IllegalStateException, DiscoveryException, InvalidResultException {
// the user specifies the resource and the database'name
resourceName = getInputParameter("ResourceName").trim();
if ((resourceName == null) || (resourceName.equals(""))) {
throw new Exception("Warning: insert the resource name");
}
databaseName = getInputParameter("DatabaseName").trim();
if ((databaseName == null) || (databaseName.equals(""))) {
throw new Exception("Warning: insert the database name");
}
// retrieve the chosen resource
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
// list that contains information useful for the connection
List<String> info = new ArrayList<String>();
// try{
List<DBResource> resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In ListSchemas->number of database resources: "
+ resources.size());
for (int i = 0; i < resources.size(); i++) {
AnalysisLogger.getLogger().debug(
"In ListSchemas->Resource's name: "
+ resources.get(i).getResourceName());
}
check: for (int i = 0; i < resources.size(); i++) {
if (resources.get(i).getResourceName().toLowerCase()
.equals(resourceName.toLowerCase())) {
normalizeDBInfo(resources.get(i));
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
// if (resources.get(i).getAccessPoints().get(j)
// .getDatabaseName().equals(databaseName)) {
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getUsername());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getPassword());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDriver());
//
// // driverInfo =
// // resources.get(i).getAccessPoints().get(j)
// // .getDriver();
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDialect());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .address());
//
// info.add(databaseName);
//
// break check;
//
// }
// if (resources.get(i).getAccessPoints().get(j)
// .address().equals(url)){
//
// System.out.println("url selezionato");
//
//
//
// }
if (resources.get(i).getAccessPoints().get(j)
.getDatabaseName().toLowerCase()
.equals(databaseName.toLowerCase())) {
info.add(resources.get(i).getAccessPoints().get(j)
.getUsername());
AnalysisLogger.getLogger().debug(
"In ListSchemas->username: "
+ resources.get(i).getAccessPoints()
.get(j).getUsername());
info.add(resources.get(i).getAccessPoints().get(j)
.getPassword());
AnalysisLogger.getLogger().debug(
"In ListSchemas->password: "
+ resources.get(i).getAccessPoints()
.get(j).getPassword());
info.add(resources.get(i).getAccessPoints().get(j)
.getDriver());
AnalysisLogger.getLogger().debug(
"In ListSchemas->driver: "
+ resources.get(i).getAccessPoints()
.get(j).getDriver());
// driverInfo =
// resources.get(i).getAccessPoints().get(j)
// .getDriver();
info.add(resources.get(i).getAccessPoints().get(j)
.getDialect());
AnalysisLogger.getLogger().debug(
"In ListSchemas->dialect: "
+ resources.get(i).getAccessPoints()
.get(j).getDialect());
info.add(resources.get(i).getAccessPoints().get(j)
.address());
AnalysisLogger.getLogger().debug(
"In ListSchemas->url: "
+ resources.get(i).getAccessPoints()
.get(j).address());
info.add(databaseName);
AnalysisLogger.getLogger().debug(
"In ListSchemas->databasename: "
+ resources.get(i).getAccessPoints()
.get(j).getDatabaseName());
break check;
}
}
}
}
AnalysisLogger.getLogger().debug(
"In ListSchemas->information useful for connection: retrieved");
// }
// catch(IllegalStateException e)
// {
// // e.printStackTrace();
// throw e;
// }
// catch(DiscoveryException e1)
// {
// e1.printStackTrace();
// throw e1;
// }
// catch(InvalidResultException e2)
// {
// e2.printStackTrace();
// throw e2;
// }
return info;
}
// method that allows to create the connection
private SessionFactory getConnection(List<String> Info) throws IOException {
// create the connection
Iterator<String> iterator = Info.iterator();
String DatabaseUserName = iterator.next();
String DatabasePassword = iterator.next();
String DatabaseDriver = iterator.next();
String DatabaseDialect = iterator.next();
String DatabaseURL = iterator.next();
String DatabaseName = iterator.next();
SessionFactory sf = mgt.createConnection(DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
// if (sf.isClosed()){
// AnalysisLogger.getLogger().debug("In ListSchemas->database "+DatabaseName+": connected");
// }
return sf;
}
private void normalizeDBInfo(DBResource resource) throws IOException {
int ap = resource.getAccessPoints().size();
for (int i = 0; i < ap; i++) {
try {
resource.normalize(i);
} catch (IOException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListTables->: Error in normalization process"
+ e.getMessage());
throw e;
}
}
}
}

View File

@ -0,0 +1,450 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
/** Class that allows to retrieve the tables's names of a chosen database */
public class ListTables extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
private DatabaseManagement mgt;
// variable that keeps track of the database's type
private String driverInfo;
// database's parameters specified by the user
private String resourceName = null;
private String databaseName = null;
private String schemaName = null;
private SessionFactory sf;
@Override
public void init() throws Exception {
mgt = new DatabaseManagement(config.getConfigPath());
AnalysisLogger.getLogger().debug("In ListTables->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug(
"In ListTables->scope set by config object: " + scope);
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug(
"In ListTables->scope set by ScopeProvider: " + scope);
} else {
ScopeProvider.instance.set(scope);
}
}
@Override
public String getDescription() {
// add a simple description for the algorithm
return "Algorithm that allows to view the table names of a chosen database";
}
@Override
protected void process() throws Exception, IOException,
IllegalStateException, DiscoveryException, InvalidResultException,
HibernateException {
AnalysisLogger.getLogger().debug("In ListTables->Processing");
try {
// retrieve information useful for the connection
List<String> Info = retrieveInfo();
// create the connection
sf = getConnection(Info);
// get the table' list
List<String> listTables = new ArrayList<String>();
if (driverInfo.toLowerCase().contains("postgres")) {
schemaName = getInputParameter("SchemaName").trim();
if ((schemaName == null) || (schemaName.equals(""))) {
throw new Exception("Warning: insert the schema name");
}
// if (!schemaName.equals("")) {
listTables = mgt.getTables(databaseName, schemaName);
AnalysisLogger
.getLogger()
.debug("In ListTables->getting table's name for database postgres");
// }
}
if (driverInfo.toLowerCase().contains("mysql")) {
listTables = mgt.getTables(databaseName, null);
AnalysisLogger
.getLogger()
.debug("In ListTables->getting table's name for database mysql");
}
// if (listTables.size()==0){
//
// AnalysisLogger.getLogger().debug("In ListTables->Warning: no table available");
//
// }
// TODO: manage also the oracle type
if (listTables == null) {
AnalysisLogger.getLogger().debug(
"In ListTables->Warning: no tables available");
} else {
for (int i = 0; i < listTables.size(); i++) {
PrimitiveType val = new PrimitiveType(
String.class.getName(), null,
PrimitiveTypes.STRING, "Table's name",
"Table's name");
val.setContent(listTables.get(i));
map.put(String.valueOf(i), val);
// AnalysisLogger.getLogger().debug(
// "In ListTables->getting table's name: "
// + val.getContent());
}
}
// close the connection
sf.close();
} catch (HibernateException h) {
AnalysisLogger.getLogger().debug(
"In ListTables-> ERROR " + h.getMessage());
throw h;
} catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListTables-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListTables-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListTables-> ERROR " + e2.getMessage());
throw e2;
} catch (IOException e3) {
// e3.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListTables-> Exception " + e3.getMessage());
throw e3;
}
catch (Exception e4) {
// e4.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListTables-> Exception " + e4.getMessage());
throw e4;
} finally {
if (sf.isClosed() == false) {
mgt.closeConnection();
}
}
}
@Override
protected void setInputParameters() {
AnalysisLogger.getLogger().debug("In ListTables->setting inputs");
// parameters specified by the user
addStringInput("ResourceName", "The name of the resource", "");
addStringInput("DatabaseName", "The name of the database", "");
addStringInput("SchemaName", "The name of the schema", "");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In ListTables->Shutdown");
}
@Override
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug("In ListTables->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
private List<String> retrieveInfo() throws Exception,
IllegalStateException, DiscoveryException, InvalidResultException {
// parameters specified by the user
resourceName = getInputParameter("ResourceName").trim();
if ((resourceName == null) || (resourceName.equals(""))) {
throw new Exception("Warning: insert the resource name");
}
databaseName = getInputParameter("DatabaseName").trim();
if ((databaseName == null) || (databaseName.equals(""))) {
throw new Exception("Warning: insert the database name");
}
// retrieve the chosen resource
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
List<DBResource> resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In ListTables->number of database resources: "
+ resources.size());
for (int i = 0; i < resources.size(); i++) {
AnalysisLogger.getLogger().debug(
"In ListTables->Resource's name: "
+ resources.get(i).getResourceName());
}
// list that contains information useful for the connection
List<String> info = new ArrayList<String>();
check: for (int i = 0; i < resources.size(); i++) {
if (resources.get(i).getResourceName().toLowerCase()
.equals(resourceName.toLowerCase())) {
normalizeDBInfo(resources.get(i));
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
// if (resources.get(i).getAccessPoints().get(j)
// .getDatabaseName().equals(databaseName)) {
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getUsername());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getPassword());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDriver());
//
// // driverInfo =
// // resources.get(i).getAccessPoints().get(j)
// // .getDriver();
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDialect());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .address());
//
// info.add(databaseName);
//
// break check;
//
// }
// if (resources.get(i).getAccessPoints().get(j)
// .address().equals(url)){
//
// System.out.println("url selezionato");
//
//
//
// }
if (resources.get(i).getAccessPoints().get(j)
.getDatabaseName().toLowerCase()
.equals(databaseName.toLowerCase())) {
info.add(resources.get(i).getAccessPoints().get(j)
.getUsername());
AnalysisLogger.getLogger().debug(
"In ListTables->username: "
+ resources.get(i).getAccessPoints()
.get(j).getUsername());
info.add(resources.get(i).getAccessPoints().get(j)
.getPassword());
AnalysisLogger.getLogger().debug(
"In ListTables->password: "
+ resources.get(i).getAccessPoints()
.get(j).getPassword());
info.add(resources.get(i).getAccessPoints().get(j)
.getDriver());
driverInfo = resources.get(i).getAccessPoints().get(j)
.getDriver();
AnalysisLogger.getLogger().debug(
"In ListTables->driver: "
+ resources.get(i).getAccessPoints()
.get(j).getDriver());
// driverInfo =
// resources.get(i).getAccessPoints().get(j)
// .getDriver();
info.add(resources.get(i).getAccessPoints().get(j)
.getDialect());
AnalysisLogger.getLogger().debug(
"In ListTables->dialect: "
+ resources.get(i).getAccessPoints()
.get(j).getDialect());
info.add(resources.get(i).getAccessPoints().get(j)
.address());
AnalysisLogger.getLogger().debug(
"In ListTables->url: "
+ resources.get(i).getAccessPoints()
.get(j).address());
info.add(databaseName);
AnalysisLogger.getLogger().debug(
"In ListTables->databasename: "
+ resources.get(i).getAccessPoints()
.get(j).getDatabaseName());
break check;
}
}
}
}
AnalysisLogger.getLogger().debug(
"In ListTables->information useful for connection: retrieved");
return info;
}
private SessionFactory getConnection(List<String> Info) throws IOException {
// create the connection
Iterator<String> iterator = Info.iterator();
String DatabaseUserName = iterator.next();
String DatabasePassword = iterator.next();
String DatabaseDriver = iterator.next();
String DatabaseDialect = iterator.next();
String DatabaseURL = iterator.next();
String DatabaseName = iterator.next();
SessionFactory sf = mgt.createConnection(DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
AnalysisLogger.getLogger().debug(
"In ListTables->database " + DatabaseName + ": connected");
return sf;
}
private void normalizeDBInfo(DBResource resource) throws IOException {
int ap = resource.getAccessPoints().size();
for (int i = 0; i < ap; i++) {
try {
resource.normalize(i);
} catch (IOException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In ListTables->: Error in normalization process"
+ e.getMessage());
throw e;
}
}
}
}

View File

@ -0,0 +1,591 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
/**
* Class that allows to perform a random sample operation on a table of a chosen
* database. It retrieves 100 rows of a table randomly.
*/
public class RandomSampleOnTable extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
// object that allows to manage some operations on a database
private DatabaseManagement mgt;
// file in which information is written
// private BufferedWriter out;
// private String fileName;
// database's parameters specified by the user
private String resourceName = null;
private String databaseName = null;
private String schemaName = null;
private String tableName = null;
private SessionFactory sf;
// variable that keeps track of the driver information
private String driverInfo;
@Override
public void init() throws Exception {
mgt = new DatabaseManagement(config.getConfigPath());
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable->scope set by config object: "
+ scope);
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable->scope set by ScopeProvider: "
+ scope);
} else {
ScopeProvider.instance.set(scope);
}
}
@Override
public String getDescription() {
return "Algorithm that allows to perform a sample operation on a table randomly";
}
@Override
protected void process() throws Exception {
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable->Processing");
try {
// // file that will contain the result
// String property = "java.io.tmpdir";
// String tempDir = System.getProperty(property);
// tempDir = tempDir + "/";
// fileName = "./cfg/" + "RandomSampleOnTable.txt";
// out = new BufferedWriter(new FileWriter(fileName));
// retrieve information
List<String> Info = retrieveInfo();
// check on table name field
tableName = getInputParameter("TableName").trim();
if ((tableName == null) || (tableName.equals(""))) {
throw new Exception("Warning: insert the table name");
}
// check on schema name field
if (driverInfo.toLowerCase().contains("postgres")) {
schemaName = getInputParameter("SchemaName").trim();
if ((schemaName == null) || (schemaName.equals(""))) {
throw new Exception("Warning: insert the schema name");
}
}
// create the connection
sf = getConnection(Info);
// smart sample operation on table
map = randomSampleOnTable();
// close the connection
sf.close();
} catch (HibernateException h) {
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable-> ERROR " + h.getMessage());
throw h;
}
catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable-> ERROR " + e2.getMessage());
throw e2;
} catch (IOException e3) {
// e3.printStackTrace();
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable-> ERROR " + e3.getMessage());
throw e3;
}
catch (Exception e4) {
// e4.printStackTrace();
AnalysisLogger.getLogger().debug(
"In RandomSmartSampleOnTable-> Exception "
+ e4.getMessage());
throw e4;
} finally {
// close the connection
if (sf.isClosed() == false) {
mgt.closeConnection();
}
}
}
@Override
protected void setInputParameters() {
// parameters specified by the user
addStringInput("ResourceName", "The name of the resource", "");
addStringInput("DatabaseName", "The name of the database", "");
addStringInput("SchemaName", "The name of the schema", "");
addStringInput("TableName", "The name of the table", "");
}
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In RandomSampleOnTable->Shutdown");
}
// Method that recovers the info useful for the connection
private List<String> retrieveInfo() throws Exception,
IllegalStateException, DiscoveryException, InvalidResultException {
resourceName = getInputParameter("ResourceName").trim();
if ((resourceName == null) || (resourceName.equals(""))) {
throw new Exception("Warning: insert the resource name");
}
databaseName = getInputParameter("DatabaseName").trim();
if ((databaseName == null) || (databaseName.equals(""))) {
throw new Exception("Warning: insert the database name");
}
// retrieve the chosen resource
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
List<DBResource> resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->number of elements: "
+ resources.size());
// list that contains information useful for the connection
List<String> info = new ArrayList<String>();
check: for (int i = 0; i < resources.size(); i++) {
if (resources.get(i).getResourceName().toLowerCase()
.equals(resourceName.toLowerCase())) {
normalizeDBInfo(resources.get(i));
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
// if (resources.get(i).getAccessPoints().get(j)
// .getDatabaseName().equals(databaseName)) {
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getUsername());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getPassword());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDriver());
//
// // driverInfo =
// // resources.get(i).getAccessPoints().get(j)
// // .getDriver();
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDialect());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .address());
//
// info.add(databaseName);
//
// break check;
//
// }
// if (resources.get(i).getAccessPoints().get(j)
// .address().equals(url)){
//
// System.out.println("url selezionato");
//
//
//
// }
if (resources.get(i).getAccessPoints().get(j)
.getDatabaseName().toLowerCase()
.equals(databaseName.toLowerCase())) {
info.add(resources.get(i).getAccessPoints().get(j)
.getUsername());
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->username: "
+ resources.get(i).getAccessPoints()
.get(j).getUsername());
info.add(resources.get(i).getAccessPoints().get(j)
.getPassword());
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->password: "
+ resources.get(i).getAccessPoints()
.get(j).getPassword());
info.add(resources.get(i).getAccessPoints().get(j)
.getDriver());
driverInfo = resources.get(i).getAccessPoints().get(j)
.getDriver();
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->driver: "
+ resources.get(i).getAccessPoints()
.get(j).getDriver());
// driverInfo =
// resources.get(i).getAccessPoints().get(j)
// .getDriver();
info.add(resources.get(i).getAccessPoints().get(j)
.getDialect());
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->dialect: "
+ resources.get(i).getAccessPoints()
.get(j).getDialect());
info.add(resources.get(i).getAccessPoints().get(j)
.address());
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->url: "
+ resources.get(i).getAccessPoints()
.get(j).address());
info.add(databaseName);
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->databasename: "
+ resources.get(i).getAccessPoints()
.get(j).getDatabaseName());
break check;
}
}
}
}
AnalysisLogger
.getLogger()
.debug("In RandomSampleOnTable->information useful for connection: retrieved");
return info;
}
// to normalize the information related to a database
private void normalizeDBInfo(DBResource resource) throws IOException {
int ap = resource.getAccessPoints().size();
for (int i = 0; i < ap; i++) {
try {
resource.normalize(i);
} catch (IOException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->: Error in normalization process"
+ e.getMessage());
throw e;
}
}
}
// create the database's connection
private SessionFactory getConnection(List<String> Info) throws IOException {
// create the connection
Iterator<String> iterator = Info.iterator();
String DatabaseUserName = iterator.next();
String DatabasePassword = iterator.next();
String DatabaseDriver = iterator.next();
String DatabaseDialect = iterator.next();
String DatabaseURL = iterator.next();
String DatabaseName = iterator.next();
SessionFactory sf = mgt.createConnection(DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->database " + DatabaseName
+ ": connected");
return sf;
}
// to perform the sample operation on the table randomly
private LinkedHashMap<String, StatisticalType> randomSampleOnTable()
throws Exception {
LinkedHashMap<String, StatisticalType> mapResults = new LinkedHashMap<String, StatisticalType>();
AnalysisLogger
.getLogger()
.debug("In RandomSampleOnTable->starting the sample operation on table randomly");
// sample on table operation
// List<Object> resultSet = null;
if (driverInfo.toLowerCase().contains("postgres")) {
// for a postgres database the second parameter is the schema name
// resultSet = mgt.randomSampleOnTable(tableName, schemaName,
// config.getPersistencePath());
mgt.randomSampleOnTable(tableName, schemaName,
config.getPersistencePath());
}
if (driverInfo.toLowerCase().contains("mysql")) {
// for a mysql database the second parameter is the database name
// resultSet = mgt.randomSampleOnTable(tableName, databaseName,
// config.getPersistencePath());
mgt.randomSampleOnTable(tableName, databaseName,
config.getPersistencePath());
}
AnalysisLogger.getLogger().debug(
"In RandomSampleOnTable->result retrieved");
// // processing the information and adding it to the map
//
// for (int i = 0; i < resultSet.size(); i++) {
//
// if (resultSet.get(i).getClass().isArray()) {
//
// Object[] row = (Object[]) resultSet.get(i);
//
// String RowString = "";
//
// for (int j = 0; j < row.length; j++) {
//
// AnalysisLogger.getLogger().debug(
// "In RandomSampleOnTable->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + row[j].toString());
// // System.out.print("\"" + row[j] + "\"; ");
//
// // write in a file
//
// if (j != row.length - 1) {
//
// out.write(row[j].toString());
// out.write(",");
// RowString = RowString + row[j].toString() + " ";
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->row " + (j+1) + ": " +
// // row[j].toString());
//
// }
// if (j == row.length - 1) {
//
// out.write(row[j].toString());
// out.newLine();
//
// RowString = RowString + row[j].toString();
//
// PrimitiveType valRow = new PrimitiveType(
// String.class.getName(), RowString,
// PrimitiveTypes.STRING, "Row", "Row");
//
// mapResults.put(String.valueOf(i), valRow);
//
// }
//
// }
//
// } else {
// // String RowElement="";
//
// Object RowElement = (Object) resultSet.get(i);
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->Query's Result: ");
//
// AnalysisLogger.getLogger().debug(
// "In RandomSampleOnTable->row: " + (i + 1) + " value= "
// + RowElement.toString());
//
// // write in a file
//
// out.write(RowElement.toString());
// out.newLine();
//
// }
//
// }
// // adding the file
// PrimitiveType valfile = new PrimitiveType(File.class.getName(),
// new File(fileName), PrimitiveTypes.FILE, "ResultQuery",
// "ResultQuery");
//
// mapResults.put("ResultQuery", valfile);
//
// // close the file
// out.close();
//
// return mapResults;
// to add the results to the variable map
// to add the map
HashMap<String, String> mapResult = new HashMap<String, String>();
mapResult = mgt.getMapSampleTableResult();
String encoded = null;
encoded = new String(mapResult.get("HEADERS").getBytes(), "UTF-8");
// // check the encoded value
// AnalysisLogger.getLogger().debug(
// "In RandomSampleOnTable->string encoded: " + encoded);
PrimitiveType val = new PrimitiveType(String.class.getName(), encoded,
PrimitiveTypes.STRING, "Row", "Row");
mapResults.put("HEADERS", val);
for (int i = 0; i < mapResult.size() - 1; i++) {
encoded = new String(mapResult.get(String.valueOf(i)).getBytes(),
"UTF-8");
// // check the encoded value
// AnalysisLogger.getLogger().debug(
// "In RandomSampleOnTable->string encoded: " + encoded);
PrimitiveType val1 = new PrimitiveType(String.class.getName(),
encoded, PrimitiveTypes.STRING, "Row", "Row");
mapResults.put(String.valueOf(i), val1);
// //check value contained in map
// String value = (String) val1.getContent();
//
// AnalysisLogger.getLogger().debug(
// "In RandomSampleOnTable->value: " + value);
}
// to add the file
PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
mgt.getFileSampleTableResult(), PrimitiveTypes.FILE, "File",
"File");
mapResults.put("File", fileResult);
return mapResults;
}
}

View File

@ -0,0 +1,602 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
/**
* Class that allows to perform a sample operation on a table of a chosen
* database. It retrieves the first 100 rows of a table.
*/
public class SampleOnTable extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
// object that allows to manage some operations on a database
private DatabaseManagement mgt;
// file in which information is written
// private BufferedWriter out;
// private String fileName;
// database's parameters specified by the user
private String resourceName = null;
private String databaseName = null;
private String schemaName = null;
private String tableName = null;
private SessionFactory sf;
// variable that keeps track of the driver information
private String driverInfo;
@Override
public void init() throws Exception {
mgt = new DatabaseManagement(config.getConfigPath());
AnalysisLogger.getLogger().debug("In SampleOnTable->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug(
"In SampleOnTable->scope set by config object: " + scope);
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug(
"In SampleOnTable->scope set by ScopeProvider: " + scope);
} else {
ScopeProvider.instance.set(scope);
}
}
@Override
public String getDescription() {
return "Algorithm that allows to perform a sample operation on a table";
}
@Override
protected void process() throws Exception {
AnalysisLogger.getLogger().debug("In SampleOnTable->Processing");
try {
// // file that will contain the result
// String property = "java.io.tmpdir";
// String tempDir = System.getProperty(property);
// tempDir = tempDir + "/";
// fileName = "./cfg/" + "SampleOnTable.txt";
// out = new BufferedWriter(new FileWriter(fileName));
// retrieve information
List<String> Info = retrieveInfo();
// check on table name field
tableName = getInputParameter("TableName").trim();
if ((tableName == null) || (tableName.equals(""))) {
throw new Exception("Warning: insert the table name");
}
// check on schema name field
if (driverInfo.toLowerCase().contains("postgres")) {
schemaName = getInputParameter("SchemaName").trim();
if ((schemaName == null) || (schemaName.equals(""))) {
throw new Exception("Warning: insert the schema name");
}
}
// create the connection
sf = getConnection(Info);
// sample operation on table
map = sampleOnTable();
// close the connection
sf.close();
} catch (HibernateException h) {
AnalysisLogger.getLogger().debug(
"In SampleOnTable-> ERROR " + h.getMessage());
throw h;
}
catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SampleOnTable-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SampleOnTable-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SampleOnTable-> ERROR " + e2.getMessage());
throw e2;
} catch (IOException e3) {
// e3.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SampleOnTable-> ERROR " + e3.getMessage());
throw e3;
}
catch (Exception e4) {
// e4.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SampleOnTable-> Exception " + e4.getMessage());
throw e4;
} finally {
// close the connection
if (sf.isClosed() == false) {
mgt.closeConnection();
}
}
}
@Override
protected void setInputParameters() {
// parameters specified by the user
addStringInput("ResourceName", "The name of the resource", "");
addStringInput("DatabaseName", "The name of the database", "");
addStringInput("SchemaName", "The name of the schema", "");
addStringInput("TableName", "The name of the table", "");
}
public StatisticalType getOutput() {
AnalysisLogger.getLogger()
.debug("In SampleOnTable->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
// LinkedHashMap<String, PrimitiveType> result = (LinkedHashMap<String,
// PrimitiveType>) output.getContent();
//
// for (int i=0; i<100;i++){
//
// PrimitiveType obj = result.get(String.valueOf(i));
//
// String val = (String) obj.getContent();
//
// try {
// String decoded = URLDecoder.decode(val, "UTF-8");
//
// AnalysisLogger.getLogger().debug("value decoded: " + decoded);
//
// } catch (UnsupportedEncodingException e) {
// // TODO Auto-generated catch block
// e.printStackTrace();
// }
//
// }
return output;
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In SampleOnTable->Shutdown");
}
// Method that recovers the info useful for the connection
private List<String> retrieveInfo() throws Exception,
IllegalStateException, DiscoveryException, InvalidResultException {
resourceName = getInputParameter("ResourceName").trim();
if ((resourceName == null) || (resourceName.equals(""))) {
throw new Exception("Warning: insert the resource name");
}
databaseName = getInputParameter("DatabaseName").trim();
if ((databaseName == null) || (databaseName.equals(""))) {
throw new Exception("Warning: insert the database name");
}
// retrieve the chosen resource
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
List<DBResource> resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In SampleOnTable->number of elements: " + resources.size());
// list that contains information useful for the connection
List<String> info = new ArrayList<String>();
check: for (int i = 0; i < resources.size(); i++) {
if (resources.get(i).getResourceName().toLowerCase()
.equals(resourceName.toLowerCase())) {
normalizeDBInfo(resources.get(i));
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
// if (resources.get(i).getAccessPoints().get(j)
// .getDatabaseName().equals(databaseName)) {
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getUsername());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getPassword());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDriver());
//
// // driverInfo =
// // resources.get(i).getAccessPoints().get(j)
// // .getDriver();
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDialect());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .address());
//
// info.add(databaseName);
//
// break check;
//
// }
// if (resources.get(i).getAccessPoints().get(j)
// .address().equals(url)){
//
// System.out.println("url selezionato");
//
//
//
// }
if (resources.get(i).getAccessPoints().get(j)
.getDatabaseName().toLowerCase()
.equals(databaseName.toLowerCase())) {
info.add(resources.get(i).getAccessPoints().get(j)
.getUsername());
AnalysisLogger.getLogger().debug(
"In SampleOnTable->username: "
+ resources.get(i).getAccessPoints()
.get(j).getUsername());
info.add(resources.get(i).getAccessPoints().get(j)
.getPassword());
AnalysisLogger.getLogger().debug(
"In SampleOnTable->password: "
+ resources.get(i).getAccessPoints()
.get(j).getPassword());
info.add(resources.get(i).getAccessPoints().get(j)
.getDriver());
driverInfo = resources.get(i).getAccessPoints().get(j)
.getDriver();
AnalysisLogger.getLogger().debug(
"In SampleOnTable->driver: "
+ resources.get(i).getAccessPoints()
.get(j).getDriver());
// driverInfo =
// resources.get(i).getAccessPoints().get(j)
// .getDriver();
info.add(resources.get(i).getAccessPoints().get(j)
.getDialect());
AnalysisLogger.getLogger().debug(
"In SampleOnTable->dialect: "
+ resources.get(i).getAccessPoints()
.get(j).getDialect());
info.add(resources.get(i).getAccessPoints().get(j)
.address());
AnalysisLogger.getLogger().debug(
"In SampleOnTable->url: "
+ resources.get(i).getAccessPoints()
.get(j).address());
info.add(databaseName);
AnalysisLogger.getLogger().debug(
"In SampleOnTable->databasename: "
+ resources.get(i).getAccessPoints()
.get(j).getDatabaseName());
break check;
}
}
}
}
AnalysisLogger
.getLogger()
.debug("In SampleOnTable->information useful for connection: retrieved");
return info;
}
// to normalize the information related to a database
private void normalizeDBInfo(DBResource resource) throws IOException {
int ap = resource.getAccessPoints().size();
for (int i = 0; i < ap; i++) {
try {
resource.normalize(i);
} catch (IOException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SampleOnTable->: Error in normalization process"
+ e.getMessage());
throw e;
}
}
}
// create the database's connection
private SessionFactory getConnection(List<String> Info) throws IOException {
// create the connection
Iterator<String> iterator = Info.iterator();
String DatabaseUserName = iterator.next();
String DatabasePassword = iterator.next();
String DatabaseDriver = iterator.next();
String DatabaseDialect = iterator.next();
String DatabaseURL = iterator.next();
String DatabaseName = iterator.next();
SessionFactory sf = mgt.createConnection(DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
AnalysisLogger.getLogger().debug(
"In SampleOnTable->database " + DatabaseName + ": connected");
return sf;
}
// to perform the sample operation on the table
private LinkedHashMap<String, StatisticalType> sampleOnTable()
throws Exception {
LinkedHashMap<String, StatisticalType> mapResults = new LinkedHashMap<String, StatisticalType>();
AnalysisLogger.getLogger().debug(
"In SampleOnTable->starting the sample operation on table");
// sample on table operation
// List<Object> resultSet = null;
if (driverInfo.toLowerCase().contains("postgres")) {
// for a postgres database the second parameter is the schema name
// resultSet = mgt.sampleOnTable(tableName, schemaName,
// config.getPersistencePath());
mgt.sampleOnTable(tableName, schemaName,
config.getPersistencePath());
}
if (driverInfo.toLowerCase().contains("mysql")) {
// for a mysql database the second parameter is the database name
// resultSet = mgt.sampleOnTable(tableName, databaseName,
// config.getPersistencePath());
mgt.sampleOnTable(tableName, databaseName,
config.getPersistencePath());
}
AnalysisLogger.getLogger().debug("In SampleOnTable->result retrieved");
// //processing the information and adding it to the map
//
// for (int i = 0; i < resultSet.size(); i++) {
//
// if (resultSet.get(i).getClass().isArray()) {
//
// Object[] row = (Object[]) resultSet.get(i);
//
// String RowString = "";
//
// for (int j = 0; j < row.length; j++) {
//
// AnalysisLogger.getLogger().debug(
// "In SampleOnTable->row: " + (i + 1) + " column: "
// + (j + 1) + " value= " + row[j].toString());
// // System.out.print("\"" + row[j] + "\"; ");
//
// // write in a file
//
// if (j != row.length - 1) {
//
// out.write(row[j].toString());
// out.write(",");
// RowString = RowString + row[j].toString() + " ";
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->row " + (j+1) + ": " +
// // row[j].toString());
//
// }
// if (j == row.length - 1) {
//
// out.write(row[j].toString());
// out.newLine();
//
// RowString = RowString + row[j].toString();
//
// PrimitiveType valRow = new PrimitiveType(
// String.class.getName(), RowString,
// PrimitiveTypes.STRING, "Row", "Row");
//
// mapResults.put(String.valueOf(i), valRow);
//
// }
//
// }
//
// } else {
// // String RowElement="";
//
// Object RowElement = (Object) resultSet.get(i);
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->Query's Result: ");
//
// AnalysisLogger.getLogger().debug(
// "In SampleOnTable->row: " + (i + 1) + " value= "
// + RowElement.toString());
//
// // write in a file
//
// out.write(RowElement.toString());
// out.newLine();
//
// }
//
// }
// adding the file
// PrimitiveType valfile = new PrimitiveType(File.class.getName(),
// new File(fileName), PrimitiveTypes.FILE, "ResultQuery",
// "ResultQuery");
// // close the file
// out.close();
// to add the results to the variable map
// to add the map
// HashMap<Integer, String> mapResult = new HashMap<Integer, String>();
HashMap<String, String> mapResult = new HashMap<String, String>();
mapResult = mgt.getMapSampleTableResult();
String encoded = null;
encoded = new String(mapResult.get("HEADERS").getBytes(), "UTF-8");
// // check encoded value
//
// AnalysisLogger.getLogger().debug(
// "In SampleOnTable->string encoded: " + encoded);
PrimitiveType val = new PrimitiveType(String.class.getName(), encoded,
PrimitiveTypes.STRING, "Row", "Row");
mapResults.put("HEADERS", val);
for (int i = 0; i < mapResult.size() - 1; i++) {
encoded = new String(mapResult.get(String.valueOf(i)).getBytes(),
"UTF-8");
// // check encoded value
// AnalysisLogger.getLogger().debug(
// "In SampleOnTable->string encoded: " + encoded);
PrimitiveType val1 = new PrimitiveType(String.class.getName(),
encoded, PrimitiveTypes.STRING, "Row", "Row");
mapResults.put(String.valueOf(i), val1);
// //check value contained in the map
// String value = (String) val1.getContent();
//
// AnalysisLogger.getLogger().debug(
// "In SampleOnTable->value: " + value);
}
// to add the file
PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
mgt.getFileSampleTableResult(), PrimitiveTypes.FILE, "File",
"File");
mapResults.put("File", fileResult);
return mapResults;
}
}

View File

@ -0,0 +1,7 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
public enum SmartCorrectionEnum {
NONE,
POSTGRES,
MYSQL,
}

View File

@ -0,0 +1,591 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
/**
* Class that allows to perform a smart sample operation on a table of a chosen
* database. It retrieves 100 rows of a table randomly that have the maximum
* number of columns not null.
*/
public class SmartSampleOnTable extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
// object that allows to manage some operations on a database
private DatabaseManagement mgt;
// file in which information is written
// private BufferedWriter out;
// private String fileName;
// database's parameters specified by the user
private String resourceName = null;
private String databaseName = null;
private String schemaName = null;
private String tableName = null;
private SessionFactory sf;
// variable that keeps track of the driver information
private String driverInfo;
@Override
public void init() throws Exception {
mgt = new DatabaseManagement(config.getConfigPath());
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->scope set by config object: " + scope);
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->scope set by ScopeProvider: "
+ scope);
} else {
ScopeProvider.instance.set(scope);
}
}
@Override
public String getDescription() {
return "Algorithm that allows to perform a smart sample operation on a table";
}
@Override
protected void process() throws Exception {
AnalysisLogger.getLogger().debug("In SmartSampleOnTable->Processing");
try {
// // file that will contain the result
// String property = "java.io.tmpdir";
// String tempDir = System.getProperty(property);
// tempDir = tempDir + "/";
// fileName = "./cfg/" + "SmartSampleOnTable.txt";
// out = new BufferedWriter(new FileWriter(fileName));
// retrieve information
List<String> Info = retrieveInfo();
// check on table name field
tableName = getInputParameter("TableName").trim();
if ((tableName == null) || (tableName.equals(""))) {
throw new Exception("Warning: insert the table name");
}
// check on schema name field
if (driverInfo.toLowerCase().contains("postgres")) {
schemaName = getInputParameter("SchemaName").trim();
if ((schemaName == null) || (schemaName.equals(""))) {
throw new Exception("Warning: insert the schema name");
}
}
// create the connection
sf = getConnection(Info);
// smart sample operation on table
map = smartSampleOnTable();
// close the connection
sf.close();
} catch (HibernateException h) {
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable-> ERROR " + h.getMessage());
throw h;
}
catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable-> ERROR " + e2.getMessage());
throw e2;
} catch (IOException e3) {
// e3.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable-> ERROR " + e3.getMessage());
throw e3;
}
catch (Exception e4) {
// e4.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable-> Exception " + e4.getMessage());
throw e4;
} finally {
// close the connection
if (sf.isClosed() == false) {
mgt.closeConnection();
}
}
}
@Override
protected void setInputParameters() {
// parameters specified by the user
addStringInput("ResourceName", "The name of the resource", "");
addStringInput("DatabaseName", "The name of the database", "");
addStringInput("SchemaName", "The name of the schema", "");
addStringInput("TableName", "The name of the table", "");
}
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In SmartSampleOnTable->Shutdown");
}
// Method that recovers the info useful for the connection
private List<String> retrieveInfo() throws Exception,
IllegalStateException, DiscoveryException, InvalidResultException {
resourceName = getInputParameter("ResourceName").trim();
if ((resourceName == null) || (resourceName.equals(""))) {
throw new Exception("Warning: insert the resource name");
}
databaseName = getInputParameter("DatabaseName").trim();
if ((databaseName == null) || (databaseName.equals(""))) {
throw new Exception("Warning: insert the database name");
}
// retrieve the chosen resource
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
List<DBResource> resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->number of elements: "
+ resources.size());
// list that contains information useful for the connection
List<String> info = new ArrayList<String>();
check: for (int i = 0; i < resources.size(); i++) {
if (resources.get(i).getResourceName().toLowerCase()
.equals(resourceName.toLowerCase())) {
normalizeDBInfo(resources.get(i));
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
// if (resources.get(i).getAccessPoints().get(j)
// .getDatabaseName().equals(databaseName)) {
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getUsername());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getPassword());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDriver());
//
// // driverInfo =
// // resources.get(i).getAccessPoints().get(j)
// // .getDriver();
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDialect());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .address());
//
// info.add(databaseName);
//
// break check;
//
// }
// if (resources.get(i).getAccessPoints().get(j)
// .address().equals(url)){
//
// System.out.println("url selezionato");
//
//
//
// }
if (resources.get(i).getAccessPoints().get(j)
.getDatabaseName().toLowerCase()
.equals(databaseName.toLowerCase())) {
info.add(resources.get(i).getAccessPoints().get(j)
.getUsername());
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->username: "
+ resources.get(i).getAccessPoints()
.get(j).getUsername());
info.add(resources.get(i).getAccessPoints().get(j)
.getPassword());
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->password: "
+ resources.get(i).getAccessPoints()
.get(j).getPassword());
info.add(resources.get(i).getAccessPoints().get(j)
.getDriver());
driverInfo = resources.get(i).getAccessPoints().get(j)
.getDriver();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->driver: "
+ resources.get(i).getAccessPoints()
.get(j).getDriver());
// driverInfo =
// resources.get(i).getAccessPoints().get(j)
// .getDriver();
info.add(resources.get(i).getAccessPoints().get(j)
.getDialect());
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->dialect: "
+ resources.get(i).getAccessPoints()
.get(j).getDialect());
info.add(resources.get(i).getAccessPoints().get(j)
.address());
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->url: "
+ resources.get(i).getAccessPoints()
.get(j).address());
info.add(databaseName);
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->databasename: "
+ resources.get(i).getAccessPoints()
.get(j).getDatabaseName());
break check;
}
}
}
}
AnalysisLogger
.getLogger()
.debug("In SmartSampleOnTable->information useful for connection: retrieved");
return info;
}
// to normalize the information related to a database
private void normalizeDBInfo(DBResource resource) throws IOException {
int ap = resource.getAccessPoints().size();
for (int i = 0; i < ap; i++) {
try {
resource.normalize(i);
} catch (IOException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->: Error in normalization process"
+ e.getMessage());
throw e;
}
}
}
// create the database's connection
private SessionFactory getConnection(List<String> Info) throws IOException {
// create the connection
Iterator<String> iterator = Info.iterator();
String DatabaseUserName = iterator.next();
String DatabasePassword = iterator.next();
String DatabaseDriver = iterator.next();
String DatabaseDialect = iterator.next();
String DatabaseURL = iterator.next();
String DatabaseName = iterator.next();
SessionFactory sf = mgt.createConnection(DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->database " + DatabaseName
+ ": connected");
return sf;
}
// to perform the sample operation on the table
private LinkedHashMap<String, StatisticalType> smartSampleOnTable()
throws Exception {
LinkedHashMap<String, StatisticalType> mapResults = new LinkedHashMap<String, StatisticalType>();
AnalysisLogger
.getLogger()
.debug("In SmartSampleOnTable->starting the smart sample operation on table");
// sample on table operation
// List<Object> resultSet = null;
if (driverInfo.toLowerCase().contains("postgres")) {
// for a postgres database the second parameter is the schema name
// resultSet = mgt.smartSampleOnTable(tableName, schemaName,
// config.getPersistencePath());
mgt.smartSampleOnTable(tableName, schemaName,
config.getPersistencePath());
}
if (driverInfo.toLowerCase().contains("mysql")) {
// for a mysql database the second parameter is the database name
// resultSet = mgt.smartSampleOnTable(tableName, databaseName,
// config.getPersistencePath());
mgt.smartSampleOnTable(tableName, databaseName,
config.getPersistencePath());
}
AnalysisLogger.getLogger().debug(
"In SmartSampleOnTable->result retrieved");
// // processing the information and adding it to the map
//
// for (int i = 0; i < resultSet.size(); i++) {
//
// if (resultSet.get(i).getClass().isArray()) {
//
// Object[] row = (Object[]) resultSet.get(i);
//
// String RowString = "";
//
// for (int j = 0; j < row.length; j++) {
//
// AnalysisLogger.getLogger().debug(
// "In SmartSampleOnTable->row: " + (i + 1)
// + " column: " + (j + 1) + " value= "
// + row[j].toString());
// // System.out.print("\"" + row[j] + "\"; ");
//
// // write in a file
//
// if (j != row.length - 1) {
//
// out.write(row[j].toString());
// out.write(",");
// RowString = RowString + row[j].toString() + " ";
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->row " + (j+1) + ": " +
// // row[j].toString());
//
// }
// if (j == row.length - 1) {
//
// out.write(row[j].toString());
// out.newLine();
//
// RowString = RowString + row[j].toString();
//
// PrimitiveType valRow = new PrimitiveType(
// String.class.getName(), RowString,
// PrimitiveTypes.STRING, "Row", "Row");
//
// mapResults.put(String.valueOf(i), valRow);
//
// }
//
// }
//
// } else {
// // String RowElement="";
//
// Object RowElement = (Object) resultSet.get(i);
//
// // AnalysisLogger.getLogger().debug(
// // "In SubmitQuery->Query's Result: ");
//
// AnalysisLogger.getLogger().debug(
// "In SmartSampleOnTable->row: " + (i + 1) + " value= "
// + RowElement.toString());
//
// // write in a file
//
// out.write(RowElement.toString());
// out.newLine();
//
// }
//
// }
// // adding the file
// PrimitiveType valfile = new PrimitiveType(File.class.getName(),
// new File(fileName), PrimitiveTypes.FILE, "ResultQuery",
// "ResultQuery");
//
// mapResults.put("ResultQuery", valfile);
//
// // close the file
// out.close();
//
// return mapResults;
// to add the results to the variable map
// to add the map
HashMap<String, String> mapResult = new HashMap<String, String>();
mapResult = mgt.getMapSampleTableResult();
String encoded = null;
encoded = new String(mapResult.get("HEADERS").getBytes(), "UTF-8");
// //check encoded value
// AnalysisLogger.getLogger().debug(
// "In SmartSampleOnTable->string encoded: " + encoded);
PrimitiveType val = new PrimitiveType(String.class.getName(), encoded,
PrimitiveTypes.STRING, "Row", "Row");
mapResults.put("HEADERS", val);
for (int i = 0; i < mapResult.size() - 1; i++) {
encoded = new String(mapResult.get(String.valueOf(i)).getBytes(),
"UTF-8");
// // check encoded value
// AnalysisLogger.getLogger().debug(
// "In SmartSampleOnTable->string encoded: " + encoded);
PrimitiveType val1 = new PrimitiveType(String.class.getName(),
encoded, PrimitiveTypes.STRING, "Row", "Row");
mapResults.put(String.valueOf(i), val1);
// //check value
// String value = (String) val1.getContent();
//
// AnalysisLogger.getLogger().debug(
// "In SmartSampleOnTable->value: " + value);
}
// to add the file
PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
mgt.getFileSampleTableResult(), PrimitiveTypes.FILE, "File",
"File");
mapResults.put("File", fileResult);
return mapResults;
}
}

View File

@ -0,0 +1,788 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.common.scope.api.ScopeProvider;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataaccess.databases.access.DatabasesDiscoverer;
import org.gcube.dataaccess.databases.lexer.MySQLLexicalAnalyzer;
import org.gcube.dataaccess.databases.lexer.PostgresLexicalAnalyzer;
import org.gcube.dataaccess.databases.resources.DBResource;
import org.gcube.dataaccess.databases.utils.DatabaseManagement;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.gcube.resources.discovery.client.api.DiscoveryException;
import org.gcube.resources.discovery.client.api.InvalidResultException;
import org.hibernate.HibernateException;
import org.hibernate.SessionFactory;
import com.adventnet.swissqlapi.sql.exception.ConvertException;
import com.adventnet.swissqlapi.sql.parser.ParseException;
/**
* Class that allows to submit a query. It retrieves results in a file and in a
* map.
*/
public class SubmitQuery extends StandardLocalExternalAlgorithm {
private LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
// object that allows to manage some operations on a database
private DatabaseManagement mgt;
// variable that keeps track of the database's type
private String driverInfo;
// file that will contain query's result
// private BufferedWriter out;
// private String fileName;
private SessionFactory sf;
// database's parameters specified by the user
private String resourceName = null;
private String databaseName = null;
private String schemaName = null;
private String tableName = null;
private String query = null;
private String valueReadOnly = "Read-Only Query";
private String smartCorrection = "Apply Smart Correction";
private String dialect = "Language";
String valueRO;
String valueSC;
String valueDialect="";
// variable used to filter the disallowed queries
private boolean NotAllowedQuery = false;
@Override
public void init() throws Exception {
mgt = new DatabaseManagement(config.getConfigPath());
AnalysisLogger.getLogger().debug("In SubmitQuery->Initialization");
String scope = config.getGcubeScope();
AnalysisLogger.getLogger().debug(
"In SubmitQuery->scope set by config object: " + scope);
if (scope == null || scope.length() == 0) {
scope = ScopeProvider.instance.get();
AnalysisLogger.getLogger().debug(
"In SubmitQuery->scope set by ScopeProvider: " + scope);
} else {
ScopeProvider.instance.set(scope);
}
valueRO = config.getParam(valueReadOnly);
valueSC = config.getParam(smartCorrection);
}
@Override
public String getDescription() {
return "Algorithm that allows to submit a query";
}
@Override
protected void process() throws Exception, IOException,
IllegalStateException, DiscoveryException, InvalidResultException,
HibernateException {
AnalysisLogger.getLogger().debug("In SubmitQuery->Processing");
try {
// // file that will contain query's result
// String property = "java.io.tmpdir";
// String tempDir = System.getProperty(property);
//
// // File f = File.createTempFile("results", ".txt", new
// // File(tempDir));
// tempDir = tempDir + "/";
//
// // BufferedWriter out = new BufferedWriter(new
// // FileWriter(tempDir+"results.txt"));
//
// // fileName = "./cfg/" + "results.csv";
//
// fileName = "./cfg/" + "results.txt";
//
// // remove comment
// // fileName = tempDir + "results.csv";
//
// out = new BufferedWriter(new FileWriter(fileName));
// retrieve information
List<String> Info = retrieveInfo();
// create the connection
sf = getConnection(Info);
// // check on table name field
// tableName = getInputParameter("TableName");
//
// if ((tableName == null) || (tableName.equals(""))) {
//
// throw new Exception("Warning: insert the table name");
//
// }
//
// // check on schema name field
//
// if (driverInfo.toLowerCase().contains("postgres")) {
//
// schemaName = getInputParameter("SchemaName");
//
// if ((schemaName == null) || (schemaName.equals(""))) {
//
// throw new Exception("Warning: insert the schema name");
//
// }
// }
// submit a query
map = submitQuery();
// close the connection
sf.close();
} catch (HibernateException h) {
AnalysisLogger.getLogger().debug(
"In SubmitQuery-> ERROR " + h.getMessage());
throw h;
}
catch (IllegalStateException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SubmitQuery-> ERROR " + e.getMessage());
throw e;
} catch (DiscoveryException e1) {
// e1.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SubmitQuery-> ERROR " + e1.getMessage());
throw e1;
} catch (InvalidResultException e2) {
// e2.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SubmitQuery-> ERROR " + e2.getMessage());
throw e2;
} catch (IOException e3) {
// e3.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SubmitQuery-> ERROR " + e3.getMessage());
throw e3;
}
catch (Exception e4) {
// e4.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SubmitQuery-> Exception " + e4.getMessage());
throw e4;
} finally {
if (sf.isClosed() == false) {
mgt.closeConnection();
}
}
}
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug("In SubmitQuery->retrieving outputs");
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(LinkedHashMap.class.getName(),
map, PrimitiveTypes.MAP, "ResultsMap", "Results Map");
return output;
}
public List<StatisticalType> getInputParameters() {
List<StatisticalType> parameters = new ArrayList<StatisticalType>();
// parameters specified by the user
PrimitiveType p0= new PrimitiveType(String.class.getName(),
"", PrimitiveTypes.STRING, "ResourceName", "The name of the resource");
PrimitiveType p1=new PrimitiveType(String.class.getName(),
"", PrimitiveTypes.STRING, "DatabaseName", "The name of the database");
PrimitiveType p2 = new PrimitiveType(Boolean.class.getName(), null,
PrimitiveTypes.BOOLEAN, valueReadOnly,
"Check the box if the query must be read-only", "true");
PrimitiveType p3 = new PrimitiveType(Boolean.class.getName(), null,
PrimitiveTypes.BOOLEAN, smartCorrection,
"Check the box for smart correction", "true");
// addEnumerateInput(SmartCorrectionEnum.values(), dialect, "Language", "");
PrimitiveType p4 = new PrimitiveType(Enum.class.getName(),SmartCorrectionEnum.values(),PrimitiveTypes.ENUMERATED,dialect,"Language", SmartCorrectionEnum.NONE.name());
PrimitiveType p5 = new PrimitiveType(String.class.getName(),
"", PrimitiveTypes.STRING, "Query", "query");
parameters.add(p0);
parameters.add(p1);
parameters.add(p2);
parameters.add(p3);
parameters.add(p4);
parameters.add(p5);
return parameters;
}
@Override
protected void setInputParameters() {
AnalysisLogger.getLogger().debug("In SubmitQuery->setting inputs");
// parameters specified by the user
// addStringInput("ResourceName", "The name of the resource", "");
// addStringInput("DatabaseName", "The name of the database", "");
// PrimitiveType p2 = new PrimitiveType(Boolean.class.getName(), null,
// PrimitiveTypes.BOOLEAN, valueReadOnly,
// "Check the box if the query must be read-only","true");
// addEnumerateInput(SubmitQueryEnum.values(), valueReadOnly,
// "Check the box if the query must be read-only",
// SubmitQueryEnum.TRUE.name());
// addStringInput("SchemaName", "The name of the schema", "");
// addStringInput("TableName", "The name of the table", "");
// PrimitiveType p3 = new PrimitiveType(Boolean.class.getName(), null,
// PrimitiveTypes.BOOLEAN, smartCorrection,
// "Check the box for smart correction","true");
// List<StatisticalType> parameters = new ArrayList<StatisticalType>();
// parameters.add(p2);
// parameters.add(p3);
// addEnumerateInput(
// SubmitQueryEnum.values(),
// smartCorrection,
// "Check the box for smart correction",
// SubmitQueryEnum.TRUE.name());
// addEnumerateInput(SmartCorrectionEnum.values(), dialect, "Language)",
// SmartCorrectionEnum.POSTGRES.name());
// addEnumerateInput(SmartCorrectionEnum.values(), dialect, "Language", "");
// addEnumerateInput(SmartCorrectionEnum.values(), dialect, "Language",
// "");
// addStringInput("Query", "query", "");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("In SubmitQuery->Shutdown");
}
// Method that recovers the info useful for the connection
private List<String> retrieveInfo() throws Exception,
IllegalStateException, DiscoveryException, InvalidResultException {
resourceName = getInputParameter("ResourceName").trim();
if ((resourceName == null) || (resourceName.equals(""))) {
throw new Exception("Warning: insert the resource name");
}
databaseName = getInputParameter("DatabaseName").trim();
if ((databaseName == null) || (databaseName.equals(""))) {
throw new Exception("Warning: insert the database name");
}
// retrieve the chosen resource
DatabasesDiscoverer discovery = new DatabasesDiscoverer();
List<DBResource> resources = discovery.discover();
AnalysisLogger.getLogger().debug(
"In SubmitQuery->number of elements: " + resources.size());
// list that contains information useful for the connection
List<String> info = new ArrayList<String>();
check: for (int i = 0; i < resources.size(); i++) {
if (resources.get(i).getResourceName().toLowerCase()
.equals(resourceName.toLowerCase())) {
normalizeDBInfo(resources.get(i));
for (int j = 0; j < resources.get(i).getAccessPoints().size(); j++) {
// if (resources.get(i).getAccessPoints().get(j)
// .getDatabaseName().equals(databaseName)) {
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getUsername());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getPassword());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDriver());
//
// // driverInfo =
// // resources.get(i).getAccessPoints().get(j)
// // .getDriver();
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .getDialect());
//
// info.add(resources.get(i).getAccessPoints().get(j)
// .address());
//
// info.add(databaseName);
//
// break check;
//
// }
// if (resources.get(i).getAccessPoints().get(j)
// .address().equals(url)){
//
// System.out.println("url selezionato");
//
//
//
// }
if (resources.get(i).getAccessPoints().get(j)
.getDatabaseName().toLowerCase()
.equals(databaseName.toLowerCase())) {
info.add(resources.get(i).getAccessPoints().get(j)
.getUsername());
AnalysisLogger.getLogger().debug(
"In SubmitQuery->username: "
+ resources.get(i).getAccessPoints()
.get(j).getUsername());
info.add(resources.get(i).getAccessPoints().get(j)
.getPassword());
AnalysisLogger.getLogger().debug(
"In SubmitQuery->password: "
+ resources.get(i).getAccessPoints()
.get(j).getPassword());
info.add(resources.get(i).getAccessPoints().get(j)
.getDriver());
driverInfo = resources.get(i).getAccessPoints().get(j)
.getDriver();
AnalysisLogger.getLogger().debug(
"In SubmitQuery->driver: "
+ resources.get(i).getAccessPoints()
.get(j).getDriver());
// driverInfo =
// resources.get(i).getAccessPoints().get(j)
// .getDriver();
info.add(resources.get(i).getAccessPoints().get(j)
.getDialect());
AnalysisLogger.getLogger().debug(
"In SubmitQuery->dialect: "
+ resources.get(i).getAccessPoints()
.get(j).getDialect());
info.add(resources.get(i).getAccessPoints().get(j)
.address());
AnalysisLogger.getLogger().debug(
"In SubmitQuery->url: "
+ resources.get(i).getAccessPoints()
.get(j).address());
info.add(databaseName);
AnalysisLogger.getLogger().debug(
"In SubmitQuery->databasename: "
+ resources.get(i).getAccessPoints()
.get(j).getDatabaseName());
break check;
}
}
}
}
AnalysisLogger.getLogger().debug(
"In SubmitQuery->information useful for connection: retrieved");
return info;
}
// create the database's connection
private SessionFactory getConnection(List<String> Info) throws IOException {
// create the connection
Iterator<String> iterator = Info.iterator();
String DatabaseUserName = iterator.next();
String DatabasePassword = iterator.next();
String DatabaseDriver = iterator.next();
String DatabaseDialect = iterator.next();
String DatabaseURL = iterator.next();
String DatabaseName = iterator.next();
SessionFactory sf = mgt.createConnection(DatabaseUserName,
DatabasePassword, DatabaseDriver, DatabaseDialect, DatabaseURL,
DatabaseName);
AnalysisLogger.getLogger().debug(
"In SubmitQuery->database " + DatabaseName + ": connected");
return sf;
}
// Method that allows to submit a query
private LinkedHashMap<String, StatisticalType> submitQuery()
throws Exception, ParseException, ConvertException {
// LinkedHashMap<String, StatisticalType> results = new
// LinkedHashMap<String, StatisticalType>();
LinkedHashMap<String, StatisticalType> mapResults = new LinkedHashMap<String, StatisticalType>();
query = getInputParameter("Query");
if ((query == null) || (query.equals(""))) {
throw new Exception("Warning: insert the query");
}
AnalysisLogger.getLogger().debug(
"In SubmitQuery->Analyzing the query: " + query);
// analyze the query to filter it if it is not read-only compliant
// String valueRO = getInputParameter(valueReadOnly);
// //print check
AnalysisLogger.getLogger().debug("In SubmitQuery->valueRO: " +
valueRO);
if (valueRO.equals("true")) {
NotAllowedQuery = analyzeQuery(query);
// //print check
// AnalysisLogger.getLogger().debug(
// "In SubmitQuery->NotAllowedQuery valueRO: " + NotAllowedQuery);
}
if (NotAllowedQuery == false) {
// formatWithQuotes(query);
// submit query
List<Object> result = new ArrayList<Object>();
AnalysisLogger.getLogger().debug(
"In SubmitQuery->Submitting the query: " + query);
AnalysisLogger.getLogger()
.debug("In SubmitQuery->path file: "
+ config.getPersistencePath());
// if user specifies to use the smart correction a translation in
// applied on the query
// String valueSC = getInputParameter(smartCorrection);
// //print check
AnalysisLogger.getLogger().debug(
"In SubmitQuery->valueSC: " + valueSC);
// dialect to which a query is converted
// String valueDialect = getInputParameter(dialect);
valueDialect = getInputParameter(dialect);
// //print check
AnalysisLogger.getLogger().debug(
"In SubmitQuery->valueDialect: " + valueDialect);
if ((valueSC.equals("true")) && (!(valueDialect.equals("")))) {
String smartCorrectedQuery = "";
AnalysisLogger.getLogger().debug(
"In SubmitQuery->applying smart correction on the query: "
+ query);
if (valueDialect.equals("POSTGRES")) {
// //print check
// AnalysisLogger.getLogger().debug(
// "In SubmitQuery->query: " + query);
//
// AnalysisLogger.getLogger().debug(
// "In SubmitQuery->dialect: " +
// DatabaseManagement.POSTGRESQLDialect);
// call the SwisSQL library functionality
smartCorrectedQuery = mgt.smartCorrectionOnQuery(query,
DatabaseManagement.POSTGRESQLDialect);
}
if (valueDialect.equals("MYSQL")) {
// call the SwisSQL library functionality
smartCorrectedQuery = mgt.smartCorrectionOnQuery(query,
DatabaseManagement.MYSQLDialect);
}
AnalysisLogger.getLogger().debug(
"In SubmitQuery-> query converted: "
+ smartCorrectedQuery);
query = smartCorrectedQuery;
if (!(smartCorrectedQuery.equals(""))) {
PrimitiveType valQuery = new PrimitiveType(
String.class.getName(), smartCorrectedQuery,
PrimitiveTypes.STRING, "Query Converted",
"Query Converted");
mapResults.put("Query Converted", valQuery);
}
}
// else if ((valueSC.equals("true")) && (valueDialect.equals("NONE"))) {
//
// throw new Exception("Warning: specify the language");
//
// }
if (driverInfo.toLowerCase().contains("postgres")) {
// for a postgres database the second parameter is the
// schema
// name
// result = mgt.submitQuery(query, tableName, schemaName,
// sf, config.getPersistencePath());
result = mgt
.submitQuery(query, sf, config.getPersistencePath());
}
if (driverInfo.toLowerCase().contains("mysql")) {
// for a mysql database the second parameter is the database
// name
// result = mgt.submitQuery(query, tableName, databaseName,
// sf, config.getPersistencePath());
result = mgt
.submitQuery(query, sf, config.getPersistencePath());
}
if (result == null) {
throw new Exception("Warning: the table has not rows");
}
AnalysisLogger.getLogger().debug(
"In SubmitQuery->Query's Result retrieved");
HashMap<String, String> mapResult = new HashMap<String, String>();
mapResult = mgt.getMapQueryResult();
String encoded = null;
encoded = new String(mapResult.get("HEADERS").getBytes(), "UTF-8");
// // check the encoded value
// AnalysisLogger.getLogger().debug(
// "In RandomSampleOnTable->string encoded: " + encoded);
PrimitiveType val = new PrimitiveType(String.class.getName(),
encoded, PrimitiveTypes.STRING, "Row", "Row");
mapResults.put("HEADERS", val);
// to add the rows (result of the query)
for (int i = 0; i < mapResult.size() - 1; i++) {
encoded = new String(mapResult.get(String.valueOf(i))
.getBytes(), "UTF-8");
// // check the encoded value
// AnalysisLogger.getLogger().debug(
// "In RandomSampleOnTable->string encoded: " + encoded);
PrimitiveType val1 = new PrimitiveType(String.class.getName(),
encoded, PrimitiveTypes.STRING, "Row", "Row");
mapResults.put(String.valueOf(i), val1);
// //check value contained in map
// String value = (String) val1.getContent();
//
// AnalysisLogger.getLogger().debug(
// "In RandomSampleOnTable->value: " + value);
}
// to add the file
PrimitiveType fileResult = new PrimitiveType(File.class.getName(),
mgt.getFileQueryResult(), PrimitiveTypes.FILE, "File",
"File");
mapResults.put("File", fileResult);
}
return mapResults;
}
// method that allows to analyze the query in order to filter it if it is
// not read-only compliant
private boolean analyzeQuery(String query) throws Exception {
boolean NotAllowed = false;
// check if the query is allowed
// TODO: check also the oracle case
if (driverInfo.toLowerCase().contains("postgres")) {
PostgresLexicalAnalyzer obj = new PostgresLexicalAnalyzer();
NotAllowed = obj.analyze(query);
}
if (driverInfo.toLowerCase().contains("mysql")) {
MySQLLexicalAnalyzer obj = new MySQLLexicalAnalyzer();
NotAllowed = obj.analyze(query);
}
AnalysisLogger.getLogger().debug(
"In SubmitQuery->Warning: query filtered: " + NotAllowed);
return NotAllowed;
}
private void normalizeDBInfo(DBResource resource) throws IOException {
int ap = resource.getAccessPoints().size();
for (int i = 0; i < ap; i++) {
try {
resource.normalize(i);
} catch (IOException e) {
// e.printStackTrace();
AnalysisLogger.getLogger().debug(
"In SubmitQuery->: Error in normalization process"
+ e.getMessage());
throw e;
}
}
}
// private void formatWithQuotes(String Query){
//
// if (driverInfo.toLowerCase().contains("postgres")){
//
// if (Query.contains(tableName))
// {
//
// query=Query.replaceAll(tableName, "\""+tableName+"\"");
//
// }
//
// if (driverInfo.toLowerCase().contains("mysql")){
//
//
// query=Query.replaceAll(tableName, "\""+tableName+"\"");
//
//
// }
//
//
// }
//
//
// }
}

View File

@ -0,0 +1,6 @@
package org.gcube.dataacces.algorithms.drmalgorithms;
public enum SubmitQueryEnum {
TRUE,
FALSE
}

View File

@ -0,0 +1,38 @@
package org.gcube.dataacces.algorithms.test;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestListDBInfo {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans.get(0).getOutput();
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBINFO");
// config.setParam("ResourceName", "TabularData Database");
config.setGcubeScope("/gcube");
return config;
}
}

View File

@ -0,0 +1,36 @@
package org.gcube.dataacces.algorithms.test;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestListNames {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
StatisticalType output = trans.get(0).getOutput();
System.out.println(output);
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBNAMES");
config.setGcubeScope("/gcube");
return config;
}
}

View File

@ -0,0 +1,71 @@
package org.gcube.dataacces.algorithms.test;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestListSchemas {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans.get(0).getOutput();
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBSCHEMA");
//A test with a database postgres
config.setParam("ResourceName", "GP DB");
//connection's parameters for a database postgres
//// config.setParam("databaseName", "aquamapsdb");
config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("userName", "postgres");
// config.setParam("password", "d4science2");
// config.setParam("driverName", "org.postgresql.Driver");
// config.setParam("URL",
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
// config.setParam("DatabaseName", "mysql");
// Another test with database postgres
// config.setParam("ResourceName", "TabularData Database");
// config.setParam("DatabaseName", "tabulardata");
// config.setParam("userName", "tabulardataadmin");
// config.setParam("password", "gcube2010");
// config.setParam("driverName", "org.postgresql.Driver");
// config.setParam("URL",
// "jdbc:postgresql://node7.d.d4science.research-infrastructures.eu:5432/tabulardata");
config.setGcubeScope("/gcube");
// config.setGcubeScope("/d4science.research-infrastructures.eu");
return config;
}
}

View File

@ -0,0 +1,56 @@
package org.gcube.dataacces.algorithms.test;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestListTables {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans.get(0).getOutput();
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLES");
//A test with a database postgres
config.setParam("ResourceName", "GP DB");
//connection's parameters for a database postgres
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
// config.setParam("userName", "postgres");
// config.setParam("password", "d4science2");
// config.setParam("driverName", "org.postgresql.Driver");
// config.setParam("URL",
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
//a test with a database mysql
config.setGcubeScope("/gcube");
return config;
}
}

View File

@ -0,0 +1,92 @@
package org.gcube.dataacces.algorithms.test;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestSubmitQuery {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans.get(0).getOutput();
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// // //A test with a database postgres
// config.setParam("ResourceName", "GP DB");
//
// //connection's parameters for a database postgres
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
// config.setParam("TableName","area" );
// config.setParam("Query","select * from area limit 3" );
// // config.setParam("TableName", "Divisions");
// config.setParam("TableName", "all_world");
// config.setParam("Query", "select * from all_world");
// config.setParam("userName", "postgres");
// config.setParam("password", "d4science2");
// config.setParam("driverName", "org.postgresql.Driver");
// config.setParam("URL",
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
// config.setParam("dialect",
// "org.hibernate.dialect.PostgreSQLDialect");
// a test with a database mysql
// config.setParam("ResourceName", "CatalogOfLife2010");
// config.setParam("DatabaseName", "col2oct2010");
// config.setParam("TableName", "common_names");
//// config.setParam("Query", "select record_id, name_code from common_names limit 3");
//// config.setParam("Query", "select record_id as a, name_code as b from common_names limit 3");
//
//// config.setParam("Query", "select name_code, record_id from common_names limit 3");
//
// config.setParam("Query", "select record_id, name_code from common_names limit 3");
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
config.setParam("Query", "select count (*)from (select csquarecode from hcaf_d)");
// // a test with postgis
// config.setParam("ResourceName", "Geoserver database ");
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
//
// config.setParam("TableName", "Divisions");
// config.setParam("Query", "select gid, area from \"Divisions\" limit 30");
//
//// config.setParam("Query", "select the_geom from Divisions limit 30");
//
//// config.setParam("Query", "select text(the_geom) from Divisions limit 30");
//
// config.setParam("Query", "EXPLAIN ANALYZE select gid from \"Divisions\" limit 30");
config.setGcubeScope("/gcube/devsec");
// config.setGcubeScope("/gcube");
return config;
}
}

View File

@ -0,0 +1,106 @@
package org.gcube.dataacces.algorithms.test;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestTableDetails {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans.get(0).getOutput();
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLEDETAILS");
// //A test with a database postgres
// config.setParam("ResourceName", "GP DB");
//
// //connection's parameters for a database postgres
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
//// config.setParam("TableName", "Divisions");
//
//// config.setParam("TableName", "all_world");
//
// config.setParam("TableName", "biodiversity_lme");
//
// config.setParam("userName", "postgres");
// config.setParam("password", "d4science2");
// config.setParam("driverName", "org.postgresql.Driver");
// config.setParam("URL",
// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
// config.setParam("dialect", "org.hibernate.dialect.PostgreSQLDialect");
// //A test with a database postgres
// config.setParam("ResourceName", "StatisticalManagerDataBase");
//
// //connection's parameters for a database postgres
// config.setParam("DatabaseName", "testdb");
// config.setParam("SchemaName", "public");
//// config.setParam("TableName", "Divisions");
//
//// config.setParam("TableName", "all_world");
//
// config.setParam("TableName", "hcaf_d");
////
//// config.setParam("userName", "utente");
//// config.setParam("password", "d4science");
//// config.setParam("driverName", "org.postgresql.Driver");
//// config.setParam("URL",
//// "jdbc:postgresql://geoserver-dev.d4science-ii.research-infrastructures.eu:5432/aquamapsdb");
//// config.setParam("dialect", "org.hibernate.dialect.PostgreSQLDialect");
// //a test with a database mysql
// config.setParam("ResourceName", "CatalogOfLife2010");
// config.setParam("DatabaseName", "col2oct2010");
// config.setParam("TableName", "Common_names");
//a test with postgis
config.setParam("ResourceName", "Geoserver database ");
// config.setParam("DatabaseName", "aquamapsdb");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,85 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionListDBInfo {
static String[] algorithms = { "Postgres", "NullInputValue"};
// static AlgorithmConfiguration[] configs = { testPostgres1(),
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(),
// Postgres3() };
static AlgorithmConfiguration[] configs = { testPostgres1(), NullInputValue()};
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
// Print Result
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testPostgres1() {
System.out.println("TEST 1: Postgres");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBINFO");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
// config.setParam("ResourceName", "AquaMaps Service DataBase");
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue() {
System.out.println("TEST 2: NullInputValue");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBINFO");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,75 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionListNames {
static String[] algorithms = { "Postgres1", "Postgres2", "Postgis",
"Mysql", "NullInputValue" };
static AlgorithmConfiguration config;
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
// for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + "test");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBNAMES");
config.setGcubeScope("/gcube");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(config);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
AnalysisLogger.getLogger().debug("ST:" + st);
// // Print Result
// PrimitiveType obj= (PrimitiveType)st;
// Object result=(Object) (obj.getContent());
// LinkedHashMap map=new LinkedHashMap<String, String>();
//
// map= (LinkedHashMap) result;
//
// for(int j=0;j<map.size();j++){
//
// AnalysisLogger.getLogger().debug("value:" + map.get(j));
//
// }
trans = null;
// }
}
// private static AlgorithmConfiguration test() {
//
// AlgorithmConfiguration config = Regressor.getConfig();
//
// config.setAgent("LISTDBNAMES");
// config.setGcubeScope("/gcube");
// return config;
//
// }
}

View File

@ -0,0 +1,123 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionListSchemas {
// static String[] algorithms = { "Postgres1", "NullInputValue1", "NullInputValue2"};
// static AlgorithmConfiguration[] configs = { testPostgres1(),
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(),
// Postgres3() };
static String[] algorithms = { "Postgres1"};
static AlgorithmConfiguration[] configs = { testPostgres1()};
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
// Print Result
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testPostgres1() {
System.out.println("TEST 1: Postgres table without rows");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBSCHEMA");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("ResourceName", "StatisticalManagerDataBase");
// config.setParam("DatabaseName", "testdb");
// config.setParam("ResourceName", "CatalogOfLife2010");
// config.setParam("DatabaseName", "col2oct2010");
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue1() {
System.out.println("TEST 2: NullInputValue1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBSCHEMA");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
config.setParam("DatabaseName", "aquamapsdb");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue2() {
System.out.println("TEST 3: NullInputValue2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTDBSCHEMA");
// A test with a database postgres
config.setParam("ResourceName", "GP DB");
// config.setParam("DatabaseName", "aquamapsdb");
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,159 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionListTables {
// static String[] algorithms = { "Postgres1", "Mysql", "NullInputValue1", "NullInputValue2", "NullInputValue3" };
// static AlgorithmConfiguration[] configs = { testPostgres1(),Mysql(), NullInputValue1(), NullInputValue2(), NullInputValue3()};
static String[] algorithms = { "Postgres1"};
static AlgorithmConfiguration[] configs = { testPostgres1()};
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
// Print Result
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testPostgres1() {
System.out.println("TEST 1: Postgres table without rows");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLES");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
//
// config.setParam("DatabaseName", "aquamapsdb");
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
config.setParam("SchemaName", "public");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration Mysql() {
System.out.println("TEST 2: Mysql");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLES");
// a test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("SchemaName", "public");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue1() {
System.out.println("TEST 3: Postgis NullInputValue1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLES");
// a test with postgis
// config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue2() {
System.out.println("TEST 4: Postgis NullInputValue2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLES");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
// config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue3() {
System.out.println("TEST 5: Postgis NullInputValue3");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLES");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,247 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionRandomSampleOnTable {
//static AlgorithmConfiguration[] configs = { testPostgres1(), testPostgis(), testMysql1(),testMysql2(), NullInputValue1(), NullInputValue2(), NullInputValue3(), NullInputValue4()};
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1", "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4"};
static AlgorithmConfiguration[] configs = { testMysql1()};
static String[] algorithms = {"Postgres1"};
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
// Print Result
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
// Posgresql database
private static AlgorithmConfiguration testPostgres1() {
System.out.println("TEST 1: Postgres table without rows");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("RANDOMSAMPLEONTABLE");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
//
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "area"); // it has not rows
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
config.setParam("SchemaName", "public");
// config.setParam("TableName", "hcaf_d");
config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
// config.setParam("TableName", "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
//// config.setParam("TableName", "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
//// config.setParam("TableName", "bionymoutlevfaked2csvpreprcsv");
//
//// config.setParam("TableName", "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
//// config.setParam("TableName", "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
// config.setParam("TableName", "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
// config.setParam("ResourceName", "AquaMaps Service DataBase");
// config.setParam("DatabaseName", "aquamapsorgupdated");
// config.setParam("SchemaName", "public");
////
////
////
//// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
//
//// config.setParam("TableName", "custom2013_12_04_15_27_16_493_cet");
//
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
// config.setParam("Query", "select * from area limit 3");
// config.setParam("Query",
// "select text(the_geom) from \"Divisions\" limit 1");
// config.setParam("TableName", "all_world");
// config.setParam("TableName", "biodiversity_lme");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testPostgis() {
System.out.println("TEST 2: Postgis");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("RANDOMSAMPLEONTABLE");
// A test with a database postgres
config.setParam("ResourceName", "GP DB");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testMysql1() {
System.out.println("TEST 3: Mysql1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("RANDOMSAMPLEONTABLE");
// A test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("TableName", "Common_names"); // mysql is not case
// sensitive
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testMysql2() {
System.out.println("TEST 4: Mysql2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("RANDOMSAMPLEONTABLE");
// A test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("TableName", "example"); // the table does not exist
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue1() {
System.out.println("TEST 5: Postgis NullInputValue1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("RANDOMSAMPLEONTABLE");
// a test with postgis
// config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue2() {
System.out.println("TEST 6: Postgis NullInputValue2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("RANDOMSAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
// config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue3() {
System.out.println("TEST 7: Postgis NullInputValue3");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("RANDOMSAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue4() {
System.out.println("TEST 8: Postgis NullInputValue4");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("RANDOMSAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
// config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,238 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionSampleOnTable {
// static AlgorithmConfiguration[] configs = { testPostgres1(), testPostgis(), testMysql1(),testMysql2(), NullInputValue1(), NullInputValue2(), NullInputValue3(), NullInputValue4()};
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1", "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4"};
static AlgorithmConfiguration[] configs = { testPostgres1()};
static String[] algorithms = { "Postgres1"};
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
// Print Result
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testPostgres1() {
System.out.println("TEST 1: Postgres table without rows");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SAMPLEONTABLE");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
//
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "area"); // it has not rows
// config.setParam("Query", "select * from area limit 3");
// config.setParam("Query", "select text(the_geom) from \"Divisions\" limit 1");
// config.setParam("TableName", "all_world");
// config.setParam("TableName", "biodiversity_lme");
// config.setParam("ResourceName", "DionysusDB");
// config.setParam("DatabaseName", "World");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "countrylanguage"); //mysql is not case sensitive
// config.setParam("ResourceName", "AquaMaps Service DataBase");
// config.setParam("DatabaseName", "aquamapsorgupdated");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
config.setParam("SchemaName", "public");
// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
config.setParam("TableName", "hcaf_d");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testPostgis() {
System.out.println("TEST 2: Postgis");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SAMPLEONTABLE");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "Divisions");
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "hcaf_d");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testMysql1() {
System.out.println("TEST 3: Mysql1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SAMPLEONTABLE");
// A test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testMysql2() {
System.out.println("TEST 4: Mysql2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SAMPLEONTABLE");
// A test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("TableName", "example"); //the table does not exist
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue1() {
System.out.println("TEST 5: Postgis NullInputValue1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SAMPLEONTABLE");
// a test with postgis
// config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue2() {
System.out.println("TEST 6: Postgis NullInputValue2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
// config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue3() {
System.out.println("TEST 7: Postgis NullInputValue3");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue4() {
System.out.println("TEST 8: Postgis NullInputValue4");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
// config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,283 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionSmartSampleOnTable {
//static AlgorithmConfiguration[] configs = { testPostgres1(), testPostgis(), testMysql1(),testMysql2(), NullInputValue1(), NullInputValue2(), NullInputValue3(), NullInputValue4()};
// static String[] algorithms = { "Postgres1", "Postgis", "Mysql1", "Mysql2", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4"};
static AlgorithmConfiguration[] configs = { testPostgres1()};
static String[] algorithms = { "Postgres1"};
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
// Print Result
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testPostgres1() {
System.out.println("TEST 1: Postgres table without rows");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SMARTSAMPLEONTABLE");
// // A test with a database postgres
// config.setParam("ResourceName", "GP DB");
//
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "area"); // it has not rows
// config.setParam("Query", "select * from area limit 3");
// config.setParam("Query", "select text(the_geom) from \"Divisions\" limit 1");
// config.setParam("TableName", "all_world");
// config.setParam("TableName", "biodiversity_lme");
// config.setParam("ResourceName", "StatisticalManagerDataBase");
// config.setParam("DatabaseName", "testdb");
// config.setParam("SchemaName", "public");
//// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
//
//// config.setParam("TableName", "hcaf_d");
//
// config.setParam("TableName", "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
// config.setParam("TableName", "hcaf_d");
// config.setParam("TableName", "generic_id037d302d_2ba0_4e43_b6e4_1a797bb91728");
// config.setParam("TableName", "bionymoutlevfaked2csvpreprcsv");
// config.setParam("TableName", "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
// config.setParam("TableName", "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
// config.setParam("TableName", "processedoccurrences_id_e3b82f7f_6bd6_493a_bd2c_552cd486004a");
// config.setParam("TableName", "hspen_mini_100");
// config.setParam("TableName", "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
// config.setParam("ResourceName", "AquaMaps Service DataBase");
// config.setParam("DatabaseName", "aquamapsorgupdated");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
////// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
//// config.setParam("TableName", "hspec_suitable_executor_1");
// config.setParam("TableName", "custom2013_12_04_15_27_16_493_cet");
// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
// config.setParam("ResourceName", "AquaMaps Service DataBase");
// config.setParam("DatabaseName", "aquamapsorgupdated");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "hspec2012_07_11_12_33_05_526");
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
config.setParam("SchemaName", "public");
// config.setParam("TableName", "hcaf_d");
config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testPostgis() {
System.out.println("TEST 2: Postgis");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SMARTSAMPLEONTABLE");
// A test with a database postgres
config.setParam("ResourceName", "GP DB");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testMysql1() {
System.out.println("TEST 3: Mysql1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SMARTSAMPLEONTABLE");
// A test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testMysql2() {
System.out.println("TEST 4: Mysql2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SMARTSAMPLEONTABLE");
// A test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("TableName", "example"); //the table does not exist
config.setGcubeScope("/gcube/devsec");
return config;
}
//private static AlgorithmConfiguration testMysql3() { //dati sbagliati
//
// System.out.println("TEST 4.1: Mysql3");
//
// AlgorithmConfiguration config = Regressor.getConfig();
//
// config.setAgent("SMARTSAMPLEONTABLE");
//
// // A test with a database mysql
// config.setParam("ResourceName", "CatalogOfLife2010");
// config.setParam("DatabaseName", "aquamaps");
// config.setParam("TableName", "hcaf_d");
//
// config.setGcubeScope("/gcube/devsec");
//
// return config;
//
//}
private static AlgorithmConfiguration NullInputValue1() {
System.out.println("TEST 5: Postgis NullInputValue1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SMARTSAMPLEONTABLE");
// a test with postgis
// config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue2() {
System.out.println("TEST 6: Postgis NullInputValue2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SMARTSAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
// config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue3() {
System.out.println("TEST 7: Postgis NullInputValue3");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SMARTSAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue4() {
System.out.println("TEST 8: Postgis NullInputValue4");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("SMARTSAMPLEONTABLE");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
// config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,319 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class RegressionSubmitQuery {
// static String[] algorithms = { "Postgres1", "Mysql", "Postgres3", "Postgres4", "NullInputValue1", "NullInputValue2", "NullInputValue3", "NullInputValue4", "NullInputValue5", "Postgis"};
// static AlgorithmConfiguration[] configs = { testPostgres1(),
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(),
// Postgres3() };
// static AlgorithmConfiguration[] configs = { testPostgres1(), Mysql(), Postgres3(), Postgres4(), NullInputValue1(), NullInputValue2(), NullInputValue3(), NullInputValue4(), NullInputValue5(), Postgis()};
static String[] algorithms = {"Postgres1"};
static AlgorithmConfiguration[] configs = { Mysql() };
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
// Print Result
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testPostgres1() {
System.out.println("TEST 1: Postgres table without rows");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// A test with a database postgres
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
// config.setParam("", "TRUE");
config.setParam("Read-Only Query", "TRUE");
config.setParam("Apply Smart Correction", "TRUE");
config.setParam("Language", "POSTGRES");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "hcaf_d");
//
//
config.setParam("Query", "select * from hcaf_d limit 10");
// config.setParam("Query", "select csquarecode,months,sum(effort) as effort, sum(total_yft_catch) as total_yft_catch from (select csquarecode,to_char(time,'MM') months,sum(effort) as effort,sum(total_yft_catch) as total_yft_catch from timeseries_idacdbb646_7500_4920_8e0d_aa38cc99a4a6 group by csquarecode,time order by time ASC) as a group by csquarecode,months order by csquarecode");
// config.setParam("Query", "select csquarecode, total_yft_catch from timeseries_idacdbb646_7500_4920_8e0d_aa38cc99a4a6 limit 3");
// config.setParam("ResourceName", "DionysusDB");
// config.setParam("DatabaseName", "World");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "countrylanguage"); //mysql is not case sensitive
//// config.setParam("Query", "select * from countrylanguage limit 10");
// config.setParam("Query", "SELECT * FROM information_schema.COLUMNS WHERE table_name ='countrylanguage' and table_schema='public'");
// config.setParam("ResourceName", "GP DB");
//
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("", "TRUE");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "Divisions");
// config.setParam("Query","select gid, area, perimeter, nafo_, nafo_id, zone from \"Divisions\" limit 100");
// config.setParam("TableName", "area"); // it has not rows
// config.setParam("Query", "select * from area limit 3");
// config.setParam("Query", "select gid, area, perimeter, CAST(the_geom as text) from \"Divisions\" limit 10");
// config.setParam("Query", "select text(the_geom) from \"Divisions\" limit 1");
// config.setParam("Query", "select perimeter,zone from \"Divisions\" where gid='7'");
// config.setParam("Query", "select area, CAST(perimeter as text) from \"Divisions\" order by random() limit 2");
// config.setParam("TableName", "all_world");
// config.setParam("TableName", "biodiversity_lme");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration Mysql() {
System.out.println("TEST 2: Mysql");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// // a test with postgis
// config.setParam("ResourceName", "Geoserver database ");
// config.setParam("SchemaName", "public");
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("TableName", "Divisions");
//a test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("", "TRUE");
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
config.setParam("Query", "select * from common_names limit 10");
// config.setParam("TableName", "specialists");
// config.setParam("Query", "select * from specialists limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration Postgres3() {
System.out.println("TEST 3: Postgis");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions"); //postgres is case sensitive
config.setParam("Query", "select the_geom from Divisions limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration Postgres4() {
System.out.println("TEST 4: Postgis filter query");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions"); //postgres is case sensitive
// config.setParam("Query", "select the_geom from Divisions limit 30");
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue1() {
System.out.println("TEST 5: Postgis NullInputValue1");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// a test with postgis
// config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue2() {
System.out.println("TEST 5: Postgis NullInputValue2");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
// config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue3() {
System.out.println("TEST 5: Postgis NullInputValue3");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue4() {
System.out.println("TEST 5: Postgis NullInputValue4");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
// config.setParam("TableName", "Divisions");
config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue5() {
System.out.println("TEST 6: Postgis NullInputValue5");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
// config.setParam("Query", "EXPLAIN ANALYZE select gid from Divisions limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration Postgis() {
System.out.println("TEST 7: Postgis");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTSUBMITQUERY");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions"); //postgres is case sensitive
config.setParam("Query", "select * from \"Divisions\" limit 1");
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,212 @@
package org.gcube.dataacces.algorithms.test.regressiontest;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
public class RegressionTableDetails {
// static String[] algorithms = { "Postgres1", "Postgres2", "Postgis",
// "Mysql", "NullInputValue", "Postgres3" };
// static AlgorithmConfiguration[] configs = { testPostgres1(),
// testPostgres2(), testPostgis(), Mysql(), NullInputValue(), Postgres3()};
static String[] algorithms = { "Postgres1"};
static AlgorithmConfiguration[] configs = { testPostgres1()};
public static void main(String[] args) throws Exception {
// System.out.println("TEST 1");
for (int i = 0; i < algorithms.length; i++) {
AnalysisLogger.getLogger().debug("Executing:" + algorithms[i]);
// ComputationalAgent trans = new WPSProcess(wps, algorithms[i]);
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(configs[i]);
trans.get(0).init();
// trans.setConfiguration(configs[i]);
// trans.init();
Regressor.process(trans.get(0));
StatisticalType st = trans.get(0).getOutput();
// Print Result
AnalysisLogger.getLogger().debug("ST:" + st);
trans = null;
}
}
private static AlgorithmConfiguration testPostgres1() {
System.out.println("TEST 1: Postgres");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("GETTABLEDETAILS");
// A test with a database postgres
// config.setParam("ResourceName", "GP DB");
//
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
// config.setParam("TableName", "Divisions");
// config.setParam("TableName", "all_world");
// config.setParam("TableName", "biodiversity_lme");
config.setParam("ResourceName", "StatisticalManagerDataBase");
config.setParam("DatabaseName", "testdb");
config.setParam("SchemaName", "publicd");
config.setParam("TableName", "hcaf_d");
// config.setParam("TableName", "hspec_id_c8e87e16_a0b4_4f9b_b48e_f1cf60ab104c");
//// config.setParam("TableName", "timeseries_id08b3abb9_c7b0_4b82_8117_64b69055416f");
//// config.setParam("TableName", "occurrence_species_idaf35d737_fb3e_43a7_b13a_611dfa97b064");
//// config.setParam("TableName", "occcluster_id_15271993_5129_4eda_92a2_fe8d22737007");
// config.setParam("TableName", "hspec_id_3f4c79fa_442e_42ba_9344_1b3e64dc3326");
// config.setParam("ResourceName", "AquaMaps Service DataBase");
// config.setParam("DatabaseName", "aquamapsorgupdated");
// config.setParam("SchemaName", "public");
////// config.setParam("TableName", "hspec2012_07_11_12_33_05_483");
// config.setParam("TableName", "hspec_suitable_peng_test_tbsp_1");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testPostgres2() {
System.out.println("TEST 2: Postgres");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("TableDetails");
// A test with a database postgres
config.setParam("ResourceName", "GP DB");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "area"); // it has not rows
// config.setParam("TableName", "all_world");
// config.setParam("TableName", "biodiversity_lme");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration testPostgis() {
System.out.println("TEST 3: Postgis");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLEDETAILS");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration Mysql() {
System.out.println("TEST 4: Mysql");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLEDETAILS");
// // a test with postgis
// config.setParam("ResourceName", "Geoserver database ");
// config.setParam("SchemaName", "public");
// config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("TableName", "Divisions");
//a test with a database mysql
config.setParam("ResourceName", "CatalogOfLife2010");
config.setParam("DatabaseName", "col2oct2010");
config.setParam("TableName", "Common_names"); //mysql is not case sensitive
config.setParam("Query", "select * from common_names limit 3");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration NullInputValue() {
System.out.println("TEST 5: Postgis NullInputValue");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLEDETAILS");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
// config.setParam("SchemaName", "public");
config.setParam("TableName", "Divisions");
config.setGcubeScope("/gcube/devsec");
return config;
}
private static AlgorithmConfiguration Postgres3() {
System.out.println("TEST 6: Postgres");
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("LISTTABLEDETAILS");
// a test with postgis
config.setParam("ResourceName", "Geoserver database ");
config.setParam("DatabaseName", "aquamapsdb");
config.setParam("SchemaName", "public");
config.setParam("TableName", "divisions"); //postgres is case sensitive
config.setGcubeScope("/gcube/devsec");
return config;
}
}

View File

@ -0,0 +1,358 @@
package org.gcube.dataaccess.algorithms.examples;
import java.awt.Image;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.gcube.contentmanagement.graphtools.data.conversions.ImageTools;
import org.gcube.contentmanagement.graphtools.plotting.graphs.HistogramGraph;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
import org.jfree.chart.JFreeChart;
import org.jfree.data.category.DefaultCategoryDataset;
public class AbsoluteSpeciesBarChartsAlgorithm extends
StandardLocalExternalAlgorithm {
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
static String databaseName = "DatabaseName";
static String userParameterName = "DatabaseUserName";
static String passwordParameterName = "DatabasePassword";
static String urlParameterName = "DatabaseURL";
// static String databaseName = "Obis2Repository";
// static String userParameterName = "postgres";
// static String passwordParameterName = "0b1s@d4sc13nc3";
// static String urlParameterName =
// "jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis";
protected String fileName;
BufferedWriter out;
private String firstSpeciesNumber = " SpeciesNumber :";
private String yearStart = "Start year :";
private String yearEnd = "End year :";
private int speciesNumber;
String databaseJdbc;
String year_start;
String year_end;
String databaseUser;
String databasePwd;
private Connection connection = null;
private DefaultCategoryDataset defaultcategorydataset;
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("Initialization");
}
@Override
public String getDescription() {
return ": A transducer algorithm that produces the list of top n most observed taxa, i.e. the species taxa having the largest number of occurrence records, in the OBIS database in a given time interval";
}
public void fulfilParameters() {
AnalysisLogger.getLogger().debug("fulfilParameters method");
// String tmp = getInputParameter(firstSpeciesNumber);
List<StatisticalType> list = getInputParameters();
System.out.println("size: " + list.size());
for (int i = 0; i < list.size(); i++) {
System.out.println(list.get(i).getName()+" "+list.get(i).getDefaultValue());
if (list.get(i).getName().equals(firstSpeciesNumber)) {
// System.out.println(list.get(i).getName());
String tmp = list.get(i).getDefaultValue();
speciesNumber = Integer.parseInt(tmp);
}
if (list.get(i).getName().equals(yearStart)) {
year_start = list.get(i).getDefaultValue();
}
if (list.get(i).getName().equals(yearEnd)) {
year_end = list.get(i).getDefaultValue();
}
// if (list.get(i).getName().equals(urlParameterName)) {
//
// databaseJdbc = list.get(i).getDefaultValue();
//
// }
// if (list.get(i).getName().equals(userParameterName)) {
//
// databaseUser = list.get(i).getDefaultValue();
//
// }
// if (list.get(i).getName().equals(passwordParameterName)) {
//
// databasePwd = list.get(i).getDefaultValue();
//
// }
databaseJdbc = getInputParameter("DatabaseURL");
databaseUser= getInputParameter("DatabaseUserName");
databasePwd= getInputParameter("DatabasePassword");
}
// System.out.println(tmp);
// databaseJdbc = getInputParameter(urlParameterName);
// year_start = getInputParameter(yearStart);
// year_end = getInputParameter(yearEnd);
// databaseUser = getInputParameter(userParameterName);
// databasePwd = getInputParameter(passwordParameterName);
// fileName = super.config.getPersistencePath() + "results.csv";
fileName = config.getConfigPath() + "results.csv";
// fileName = "./cfg/" + "results.csv";
AnalysisLogger.getLogger().debug("Percorso file: " + fileName);
AnalysisLogger.getLogger().debug("fulfilParameters method");
}
private ResultSet performeQuery() throws SQLException {
connection = DriverManager.getConnection(databaseJdbc, databaseUser,
databasePwd);
Statement stmt = connection.createStatement();
String query = "SELECT tname, sum(count)AS count FROM public.count_species_per_year WHERE year::integer >="
+ year_start
+ "AND year::integer <="
+ year_end
+ "GROUP BY tname ORDER BY count desc;";
return stmt.executeQuery(query);
}
@Override
protected void process() throws Exception {
System.out.println("In the process");
defaultcategorydataset = new DefaultCategoryDataset();
// String driverName = "org.postgresql.Driver";
// Class driverClass = Class.forName(driverName);
// Driver driver = (Driver) driverClass.newInstance();
System.out.println("pre fulfill");
fulfilParameters();
System.out.println("post fulfill");
// String tmp = getInputParameter(firstSpeciesNumber);
// System.out.println("process-> speciesnumber value: " + tmp);
// String tmp="10";
// speciesNumber = Integer.parseInt(tmp);
// year_start = getInputParameter(yearStart);
// year_start="1800";
// year_end = getInputParameter(yearEnd);
// year_end="2020";
// fileName = super.config.getPersistencePath() + "results.csv";
// fileName = "results.csv";
out = new BufferedWriter(new FileWriter(fileName));
System.out.println("pre query");
ResultSet rs = performeQuery();
System.out.println("post query");
// connection =
// DriverManager.getConnection("jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis",
// "postgres",
// "0b1s@d4sc13nc3");
// Statement stmt = connection.createStatement();
// String query =
// "SELECT tname, sum(count)AS count FROM public.count_species_per_year WHERE year::integer >="
// + year_start
// + "AND year::integer <="
// + year_end
// + "GROUP BY tname ORDER BY count desc;";
//
// System.out.println("pre query");
// ResultSet rs=stmt.executeQuery(query);
//
// System.out.println("post query");
//
int i = 0;
String s = "Species";
while (rs.next() && i < speciesNumber) {
System.out.println(rs.toString());
String tname = rs.getString("tname");
String count = rs.getString("count");
System.out.println("tname:" + tname);
System.out.println("count:" + count);
write(tname + "," + count);
int countOcc = Integer.parseInt(count);
PrimitiveType val = new PrimitiveType(String.class.getName(),
count, PrimitiveTypes.STRING, tname, tname);
if (i < 100)
map.put(tname, val);
if (i < 16)
defaultcategorydataset.addValue(countOcc, s, tname);
i++;
}
out.close();
connection.close();
}
@Override
protected void setInputParameters() {
System.out.println("Sono in SetParameters");
addStringInput(
firstSpeciesNumber,
"Number of species to report (max 17 will be visualized on the chart)",
"10");
// System.out.println(firstSpeciesNumber);
addStringInput(yearStart, "Starting year of the analysis", "1800");
// System.out.println(yearStart);
addStringInput(yearEnd, "Ending year of the analysis", "2020");
// System.out.println(yearEnd);
// addRemoteDatabaseInput("Obis2Repository", urlParameterName,
// userParameterName, passwordParameterName, "driver", "dialect");
// addRemoteDatabaseInput("Obis2Repository", urlParameterName,
// userParameterName, passwordParameterName, "org.postgresql.Driver",
// "org.hibernate.dialect.PostgreSQLDialect");
System.out.println("pre addRemoteDB");
// addRemoteDatabaseInput(
// "Obis2Repository",
// "jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis",
// "postgres", "0b1s@d4sc13nc3", "org.postgresql.Driver",
// "org.hibernate.dialect.PostgreSQLDialect");
// addRemoteDatabaseInput("Obis2Repository", urlParameterName,
// userParameterName, passwordParameterName, "driver", "dialect");
System.out.println("post addRemoteDB");
// super.config.setConfigPath("./cfg/");
// config.setConfigPath();
// super.config.setParam("DatabaseUserName","gcube");
// super.config.setParam("DatabasePassword","d4science2");
// super.config.setParam("DatabaseURL","jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis");
// super.config.setParam("DatabaseDriver","org.postgresql.Driver");
// System.out.println("URL: "+ super.config.getDatabaseURL());
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Shutdown");
}
@Override
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug("In getOutput");
PrimitiveType p = new PrimitiveType(Map.class.getName(),
PrimitiveType.stringMap2StatisticalMap(outputParameters),
PrimitiveTypes.MAP, "Discrepancy Analysis", "");
AnalysisLogger
.getLogger()
.debug("MapsComparator: Producing Gaussian Distribution for the errors");
// build image:
HashMap<String, Image> producedImages = new HashMap<String, Image>();
JFreeChart chart = HistogramGraph
.createStaticChart(defaultcategorydataset);
Image image = ImageTools.toImage(chart.createBufferedImage(680, 420));
producedImages.put("Species Observations", image);
PrimitiveType images = new PrimitiveType(HashMap.class.getName(),
producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
"Graphical representation of the error spread");
// PrimitiveType images = new PrimitiveType("Species Observations",
// producedImages, PrimitiveTypes.IMAGES, "ErrorRepresentation",
// "Graphical representation of the error spread");
PrimitiveType f = new PrimitiveType(File.class.getName(), new File(
fileName), PrimitiveTypes.FILE, "OccFile", "OccFile");
// end build image
AnalysisLogger.getLogger().debug(
"Bar Charts Species Occurrences Produced");
// collect all the outputs
map.put("File", f);
map.put("Result", p);
map.put("Images", images);
// generate a primitive type for the collection
PrimitiveType output = new PrimitiveType(HashMap.class.getName(), map,
PrimitiveTypes.MAP, "ResultsMap", "Results Map");
// PrimitiveType output=null;
return output;
}
public void write(String writeSt) {
try {
out.write(writeSt);
out.newLine();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}

View File

@ -0,0 +1,107 @@
package org.gcube.dataaccess.algorithms.examples;
import java.util.LinkedHashMap;
import java.util.List;
import org.gcube.contentmanagement.lexicalmatcher.utils.AnalysisLogger;
import org.gcube.dataanalysis.ecoengine.datatypes.PrimitiveType;
import org.gcube.dataanalysis.ecoengine.datatypes.StatisticalType;
import org.gcube.dataanalysis.ecoengine.datatypes.enumtypes.PrimitiveTypes;
import org.gcube.dataanalysis.ecoengine.interfaces.StandardLocalExternalAlgorithm;
public class SimpleAlg extends
StandardLocalExternalAlgorithm{
@Override
public void init() throws Exception {
AnalysisLogger.getLogger().debug("Initialization");
}
@Override
public String getDescription() {
return "An algorithm for testing";
}
@Override
protected void process() throws Exception {
AnalysisLogger.getLogger().debug("in process");
}
@Override
protected void setInputParameters() {
addStringInput("Name","name","");
addStringInput("Surname","surname","Liccardo");
}
@Override
public void shutdown() {
AnalysisLogger.getLogger().debug("Shutdown");
// closes database's connection
}
@Override
public StatisticalType getOutput() {
AnalysisLogger.getLogger().debug("retrieving results");
String name= getInputParameter("Name");
String surname= getInputParameter("Surname");
List<StatisticalType> list = getInputParameters();
System.out.println("size: " + list.size());
for (int i = 0; i < list.size(); i++) {
System.out.println(list.get(i).getName()+" "+list.get(i).getDefaultValue());
}
PrimitiveType n = new PrimitiveType(
String.class.getName(),
getInputParameter("Name") ,
PrimitiveTypes.STRING,
"Name",
"name");
PrimitiveType s = new PrimitiveType(
String.class.getName(),
getInputParameter("Surname") ,
PrimitiveTypes.STRING,
"Surname",
"surname");
LinkedHashMap<String, StatisticalType> map = new LinkedHashMap<String, StatisticalType>();
map.put("Name", n);
map.put("Surname", s);
AnalysisLogger.getLogger().debug("name: " + name);
AnalysisLogger.getLogger().debug("surname: " + surname);
return null;
}
}

View File

@ -0,0 +1,38 @@
package org.gcube.dataaccess.algorithms.examples;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestSimpleAlg {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans.get(0).getOutput();
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
config.setAgent("TEST_ALG");
// AlgorithmConfiguration config=new AlgorithmConfiguration();
return config;
}
}

View File

@ -0,0 +1,67 @@
package org.gcube.dataaccess.algorithms.examples;
import java.util.List;
import org.gcube.dataanalysis.ecoengine.configuration.AlgorithmConfiguration;
import org.gcube.dataanalysis.ecoengine.interfaces.ComputationalAgent;
import org.gcube.dataanalysis.ecoengine.processing.factories.TransducerersFactory;
import org.gcube.dataanalysis.ecoengine.test.regression.Regressor;
public class TestTransducers {
public static void main(String[] args) throws Exception {
System.out.println("TEST 1");
List<ComputationalAgent> trans = null;
trans = TransducerersFactory.getTransducerers(testConfigLocal());
trans.get(0).init();
Regressor.process(trans.get(0));
trans.get(0).getOutput();
trans = null;
}
private static AlgorithmConfiguration testConfigLocal() {
AlgorithmConfiguration config = Regressor.getConfig();
// AlgorithmConfiguration config=new AlgorithmConfiguration();
config.setConfigPath("./cfg");
config.setParam("DatabaseName", "Obis2Repository");
config.setParam("DatabaseUserName","postgres");
config.setParam("DatabasePassword","0b1s@d4sc13nc3");
config.setParam("databaseNamebaseDriver","org.postgresql.Driver");
config.setParam("DatabaseURL", "jdbc:postgresql://obis2.i-marine.research-infrastructures.eu:5432/obis");
System.out.println("config: " + config.getParam("DatabaseUserName"));
// System.out.println(config.getDatabaseURL());
config.setAgent("LISTNAMES_TABLES");
config.setParam("longitudeColumn", "decimallongitude");
config.setParam("latitudeColumn", "decimallatitude");
config.setParam("recordedByColumn", "recordedby");
config.setParam("scientificNameColumn", "scientificname");
config.setParam("eventDateColumn", "eventdate");
config.setParam("lastModificationColumn", "modified");
config.setParam("OccurrencePointsTableName", "whitesharkoccurrences2");
config.setParam("finalTableName", "whitesharkoccurrencesnoduplicates");
config.setParam("spatialTolerance", "0.5");
config.setParam("confidence", "80");
return config;
}
}