lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
apache-2.0
a3b486a792dbb5b369b335b526dffa952cce9ee4
0
MeetMe/selenium,krosenvold/selenium,dkentw/selenium,amar-sharma/selenium,jerome-jacob/selenium,pulkitsinghal/selenium,gabrielsimas/selenium,knorrium/selenium,customcommander/selenium,vveliev/selenium,houchj/selenium,i17c/selenium,knorrium/selenium,carlosroh/selenium,lrowe/selenium,denis-vilyuzhanin/selenium-fastview,jknguyen/josephknguyen-selenium,xsyntrex/selenium,Ardesco/selenium,Ardesco/selenium,stupidnetizen/selenium,titusfortner/selenium,chrisblock/selenium,gotcha/selenium,davehunt/selenium,Appdynamics/selenium,thanhpete/selenium,gregerrag/selenium,knorrium/selenium,dbo/selenium,freynaud/selenium,arunsingh/selenium,joshmgrant/selenium,alexec/selenium,TheBlackTuxCorp/selenium,davehunt/selenium,lummyare/lummyare-test,dimacus/selenium,skurochkin/selenium,Sravyaksr/selenium,valfirst/selenium,asashour/selenium,skurochkin/selenium,chrisblock/selenium,bartolkaruza/selenium,RamaraoDonta/ramarao-clone,sri85/selenium,compstak/selenium,RamaraoDonta/ramarao-clone,vinay-qa/vinayit-android-server-apk,DrMarcII/selenium,lummyare/lummyare-lummy,yukaReal/selenium,asolntsev/selenium,mach6/selenium,asolntsev/selenium,carlosroh/selenium,joshmgrant/selenium,SevInf/IEDriver,yukaReal/selenium,isaksky/selenium,sankha93/selenium,petruc/selenium,mestihudson/selenium,jerome-jacob/selenium,SevInf/IEDriver,mojwang/selenium,Jarob22/selenium,asashour/selenium,clavery/selenium,jknguyen/josephknguyen-selenium,TheBlackTuxCorp/selenium,misttechnologies/selenium,aluedeke/chromedriver,lilredindy/selenium,telefonicaid/selenium,jknguyen/josephknguyen-selenium,gregerrag/selenium,gurayinan/selenium,gorlemik/selenium,s2oBCN/selenium,joshbruning/selenium,wambat/selenium,SeleniumHQ/selenium,SouWilliams/selenium,krmahadevan/selenium,alexec/selenium,alexec/selenium,dandv/selenium,bayandin/selenium,Sravyaksr/selenium,manuelpirez/selenium,chrsmithdemos/selenium,gemini-testing/selenium,customcommander/selenium,lrowe/selenium,gurayinan/selenium,soundcloud/selenium,misttechnologies/selenium,markodolancic/selenium,gurayinan/selenium,Appdynamics/selenium,aluedeke/chromedriver,lukeis/selenium,SeleniumHQ/selenium,jabbrwcky/selenium,arunsingh/selenium,lummyare/lummyare-test,twalpole/selenium,eric-stanley/selenium,rrussell39/selenium,kalyanjvn1/selenium,slongwang/selenium,blackboarddd/selenium,juangj/selenium,compstak/selenium,jsakamoto/selenium,dimacus/selenium,Herst/selenium,i17c/selenium,minhthuanit/selenium,yukaReal/selenium,carlosroh/selenium,petruc/selenium,titusfortner/selenium,blueyed/selenium,dbo/selenium,uchida/selenium,houchj/selenium,twalpole/selenium,thanhpete/selenium,Ardesco/selenium,compstak/selenium,GorK-ChO/selenium,Jarob22/selenium,MCGallaspy/selenium,alb-i986/selenium,arunsingh/selenium,HtmlUnit/selenium,SevInf/IEDriver,gotcha/selenium,titusfortner/selenium,wambat/selenium,krmahadevan/selenium,MCGallaspy/selenium,gorlemik/selenium,dimacus/selenium,isaksky/selenium,lilredindy/selenium,sebady/selenium,eric-stanley/selenium,bmannix/selenium,rplevka/selenium,freynaud/selenium,mestihudson/selenium,doungni/selenium,freynaud/selenium,sri85/selenium,uchida/selenium,doungni/selenium,juangj/selenium,sebady/selenium,soundcloud/selenium,bmannix/selenium,misttechnologies/selenium,eric-stanley/selenium,krosenvold/selenium,lummyare/lummyare-test,HtmlUnit/selenium,RamaraoDonta/ramarao-clone,joshbruning/selenium,skurochkin/selenium,carsonmcdonald/selenium,bartolkaruza/selenium,anshumanchatterji/selenium,rovner/selenium,arunsingh/selenium,temyers/selenium,krmahadevan/selenium,carsonmcdonald/selenium,houchj/selenium,quoideneuf/selenium,lummyare/lummyare-test,wambat/selenium,tbeadle/selenium,customcommander/selenium,misttechnologies/selenium,sag-enorman/selenium,sag-enorman/selenium,dibagga/selenium,juangj/selenium,misttechnologies/selenium,joshbruning/selenium,kalyanjvn1/selenium,carsonmcdonald/selenium,blackboarddd/selenium,petruc/selenium,lmtierney/selenium,gorlemik/selenium,blackboarddd/selenium,minhthuanit/selenium,xsyntrex/selenium,arunsingh/selenium,jsarenik/jajomojo-selenium,krmahadevan/selenium,amikey/selenium,SeleniumHQ/selenium,valfirst/selenium,bartolkaruza/selenium,livioc/selenium,chrisblock/selenium,mojwang/selenium,telefonicaid/selenium,MCGallaspy/selenium,lrowe/selenium,SevInf/IEDriver,jknguyen/josephknguyen-selenium,Appdynamics/selenium,bayandin/selenium,dcjohnson1989/selenium,asashour/selenium,livioc/selenium,xsyntrex/selenium,amar-sharma/selenium,SouWilliams/selenium,sri85/selenium,Tom-Trumper/selenium,HtmlUnit/selenium,davehunt/selenium,rrussell39/selenium,carlosroh/selenium,tarlabs/selenium,jknguyen/josephknguyen-selenium,HtmlUnit/selenium,chrsmithdemos/selenium,temyers/selenium,petruc/selenium,dibagga/selenium,dcjohnson1989/selenium,TikhomirovSergey/selenium,sevaseva/selenium,Ardesco/selenium,Appdynamics/selenium,dkentw/selenium,manuelpirez/selenium,xsyntrex/selenium,JosephCastro/selenium,JosephCastro/selenium,kalyanjvn1/selenium,Herst/selenium,dibagga/selenium,anshumanchatterji/selenium,aluedeke/chromedriver,isaksky/selenium,temyers/selenium,xmhubj/selenium,5hawnknight/selenium,dimacus/selenium,mestihudson/selenium,manuelpirez/selenium,jsarenik/jajomojo-selenium,5hawnknight/selenium,skurochkin/selenium,TikhomirovSergey/selenium,carlosroh/selenium,compstak/selenium,HtmlUnit/selenium,jsakamoto/selenium,sevaseva/selenium,p0deje/selenium,titusfortner/selenium,twalpole/selenium,anshumanchatterji/selenium,slongwang/selenium,carlosroh/selenium,xmhubj/selenium,bayandin/selenium,MeetMe/selenium,5hawnknight/selenium,twalpole/selenium,valfirst/selenium,joshuaduffy/selenium,joshuaduffy/selenium,valfirst/selenium,JosephCastro/selenium,dcjohnson1989/selenium,zenefits/selenium,minhthuanit/selenium,vinay-qa/vinayit-android-server-apk,RamaraoDonta/ramarao-clone,stupidnetizen/selenium,krmahadevan/selenium,AutomatedTester/selenium,lrowe/selenium,tbeadle/selenium,rrussell39/selenium,meksh/selenium,jsarenik/jajomojo-selenium,joshmgrant/selenium,MCGallaspy/selenium,livioc/selenium,BlackSmith/selenium,SeleniumHQ/selenium,bmannix/selenium,SeleniumHQ/selenium,dandv/selenium,alexec/selenium,joshmgrant/selenium,eric-stanley/selenium,AutomatedTester/selenium,pulkitsinghal/selenium,manuelpirez/selenium,anshumanchatterji/selenium,jabbrwcky/selenium,sri85/selenium,clavery/selenium,livioc/selenium,gorlemik/selenium,petruc/selenium,houchj/selenium,kalyanjvn1/selenium,bartolkaruza/selenium,valfirst/selenium,rplevka/selenium,mestihudson/selenium,MCGallaspy/selenium,amar-sharma/selenium,gorlemik/selenium,alexec/selenium,xmhubj/selenium,vveliev/selenium,TikhomirovSergey/selenium,gregerrag/selenium,dcjohnson1989/selenium,Ardesco/selenium,juangj/selenium,petruc/selenium,actmd/selenium,minhthuanit/selenium,pulkitsinghal/selenium,oddui/selenium,soundcloud/selenium,doungni/selenium,temyers/selenium,jsakamoto/selenium,Jarob22/selenium,meksh/selenium,sevaseva/selenium,sankha93/selenium,davehunt/selenium,gemini-testing/selenium,juangj/selenium,jabbrwcky/selenium,Dude-X/selenium,jsakamoto/selenium,Tom-Trumper/selenium,JosephCastro/selenium,oddui/selenium,rovner/selenium,titusfortner/selenium,asolntsev/selenium,twalpole/selenium,thanhpete/selenium,Appdynamics/selenium,dkentw/selenium,oddui/selenium,o-schneider/selenium,asolntsev/selenium,Herst/selenium,compstak/selenium,o-schneider/selenium,jknguyen/josephknguyen-selenium,tarlabs/selenium,valfirst/selenium,gabrielsimas/selenium,s2oBCN/selenium,tbeadle/selenium,sankha93/selenium,vinay-qa/vinayit-android-server-apk,5hawnknight/selenium,gurayinan/selenium,titusfortner/selenium,s2oBCN/selenium,joshmgrant/selenium,actmd/selenium,gurayinan/selenium,mojwang/selenium,SevInf/IEDriver,bmannix/selenium,orange-tv-blagnac/selenium,jsarenik/jajomojo-selenium,rplevka/selenium,zenefits/selenium,gregerrag/selenium,knorrium/selenium,minhthuanit/selenium,alb-i986/selenium,denis-vilyuzhanin/selenium-fastview,arunsingh/selenium,Appdynamics/selenium,sankha93/selenium,juangj/selenium,Sravyaksr/selenium,tkurnosova/selenium,gorlemik/selenium,oddui/selenium,houchj/selenium,Dude-X/selenium,o-schneider/selenium,SeleniumHQ/selenium,bayandin/selenium,chrsmithdemos/selenium,freynaud/selenium,SevInf/IEDriver,aluedeke/chromedriver,blackboarddd/selenium,markodolancic/selenium,soundcloud/selenium,tbeadle/selenium,mojwang/selenium,meksh/selenium,TheBlackTuxCorp/selenium,onedox/selenium,tkurnosova/selenium,jsarenik/jajomojo-selenium,HtmlUnit/selenium,AutomatedTester/selenium,petruc/selenium,rrussell39/selenium,markodolancic/selenium,pulkitsinghal/selenium,mojwang/selenium,eric-stanley/selenium,orange-tv-blagnac/selenium,DrMarcII/selenium,o-schneider/selenium,dandv/selenium,chrsmithdemos/selenium,lrowe/selenium,mach6/selenium,kalyanjvn1/selenium,BlackSmith/selenium,oddui/selenium,gregerrag/selenium,vveliev/selenium,GorK-ChO/selenium,mojwang/selenium,livioc/selenium,DrMarcII/selenium,blueyed/selenium,carsonmcdonald/selenium,HtmlUnit/selenium,o-schneider/selenium,i17c/selenium,denis-vilyuzhanin/selenium-fastview,lmtierney/selenium,customcommander/selenium,meksh/selenium,gabrielsimas/selenium,mach6/selenium,sevaseva/selenium,rplevka/selenium,anshumanchatterji/selenium,joshmgrant/selenium,Sravyaksr/selenium,joshuaduffy/selenium,vinay-qa/vinayit-android-server-apk,Jarob22/selenium,Herst/selenium,orange-tv-blagnac/selenium,Tom-Trumper/selenium,i17c/selenium,Sravyaksr/selenium,sag-enorman/selenium,RamaraoDonta/ramarao-clone,tarlabs/selenium,sri85/selenium,sankha93/selenium,actmd/selenium,rovner/selenium,jknguyen/josephknguyen-selenium,pulkitsinghal/selenium,clavery/selenium,knorrium/selenium,chrsmithdemos/selenium,xmhubj/selenium,RamaraoDonta/ramarao-clone,thanhpete/selenium,arunsingh/selenium,minhthuanit/selenium,s2oBCN/selenium,gotcha/selenium,tkurnosova/selenium,blueyed/selenium,lummyare/lummyare-lummy,denis-vilyuzhanin/selenium-fastview,tkurnosova/selenium,gorlemik/selenium,tbeadle/selenium,pulkitsinghal/selenium,sankha93/selenium,isaksky/selenium,markodolancic/selenium,lmtierney/selenium,markodolancic/selenium,soundcloud/selenium,slongwang/selenium,eric-stanley/selenium,o-schneider/selenium,RamaraoDonta/ramarao-clone,joshuaduffy/selenium,vveliev/selenium,slongwang/selenium,uchida/selenium,carsonmcdonald/selenium,SeleniumHQ/selenium,TikhomirovSergey/selenium,AutomatedTester/selenium,SouWilliams/selenium,dbo/selenium,denis-vilyuzhanin/selenium-fastview,slongwang/selenium,dbo/selenium,vinay-qa/vinayit-android-server-apk,lummyare/lummyare-test,gemini-testing/selenium,TheBlackTuxCorp/selenium,mestihudson/selenium,houchj/selenium,dcjohnson1989/selenium,valfirst/selenium,joshmgrant/selenium,jerome-jacob/selenium,GorK-ChO/selenium,quoideneuf/selenium,joshbruning/selenium,vveliev/selenium,skurochkin/selenium,asashour/selenium,SeleniumHQ/selenium,sevaseva/selenium,gotcha/selenium,p0deje/selenium,chrisblock/selenium,kalyanjvn1/selenium,orange-tv-blagnac/selenium,lukeis/selenium,dibagga/selenium,onedox/selenium,BlackSmith/selenium,livioc/selenium,minhthuanit/selenium,MCGallaspy/selenium,doungni/selenium,twalpole/selenium,blueyed/selenium,jsakamoto/selenium,alexec/selenium,gemini-testing/selenium,bayandin/selenium,joshuaduffy/selenium,jabbrwcky/selenium,dibagga/selenium,AutomatedTester/selenium,freynaud/selenium,dandv/selenium,MCGallaspy/selenium,isaksky/selenium,amar-sharma/selenium,Dude-X/selenium,jabbrwcky/selenium,skurochkin/selenium,eric-stanley/selenium,juangj/selenium,SouWilliams/selenium,bayandin/selenium,arunsingh/selenium,lmtierney/selenium,rplevka/selenium,MCGallaspy/selenium,bartolkaruza/selenium,xmhubj/selenium,onedox/selenium,compstak/selenium,MeetMe/selenium,Dude-X/selenium,carsonmcdonald/selenium,amikey/selenium,SevInf/IEDriver,yukaReal/selenium,sebady/selenium,JosephCastro/selenium,Dude-X/selenium,mach6/selenium,TikhomirovSergey/selenium,sag-enorman/selenium,blueyed/selenium,tbeadle/selenium,alb-i986/selenium,tkurnosova/selenium,lmtierney/selenium,carsonmcdonald/selenium,meksh/selenium,lummyare/lummyare-test,SouWilliams/selenium,sri85/selenium,meksh/selenium,GorK-ChO/selenium,jabbrwcky/selenium,sebady/selenium,sebady/selenium,gotcha/selenium,chrsmithdemos/selenium,lrowe/selenium,amar-sharma/selenium,SouWilliams/selenium,asolntsev/selenium,Dude-X/selenium,tkurnosova/selenium,SouWilliams/selenium,zenefits/selenium,sevaseva/selenium,SevInf/IEDriver,uchida/selenium,lummyare/lummyare-lummy,Sravyaksr/selenium,zenefits/selenium,tarlabs/selenium,BlackSmith/selenium,s2oBCN/selenium,clavery/selenium,amar-sharma/selenium,jsakamoto/selenium,Sravyaksr/selenium,lummyare/lummyare-test,Herst/selenium,soundcloud/selenium,s2oBCN/selenium,onedox/selenium,TikhomirovSergey/selenium,asashour/selenium,bayandin/selenium,Herst/selenium,lummyare/lummyare-test,lilredindy/selenium,davehunt/selenium,temyers/selenium,dcjohnson1989/selenium,davehunt/selenium,yukaReal/selenium,gabrielsimas/selenium,bayandin/selenium,p0deje/selenium,amikey/selenium,lmtierney/selenium,JosephCastro/selenium,lilredindy/selenium,stupidnetizen/selenium,gorlemik/selenium,xsyntrex/selenium,asashour/selenium,jsarenik/jajomojo-selenium,gregerrag/selenium,Jarob22/selenium,chrsmithdemos/selenium,manuelpirez/selenium,orange-tv-blagnac/selenium,sevaseva/selenium,rovner/selenium,mach6/selenium,chrisblock/selenium,sankha93/selenium,uchida/selenium,jsakamoto/selenium,livioc/selenium,markodolancic/selenium,denis-vilyuzhanin/selenium-fastview,stupidnetizen/selenium,carlosroh/selenium,knorrium/selenium,bmannix/selenium,mestihudson/selenium,gabrielsimas/selenium,aluedeke/chromedriver,dbo/selenium,clavery/selenium,lummyare/lummyare-lummy,onedox/selenium,titusfortner/selenium,dimacus/selenium,skurochkin/selenium,aluedeke/chromedriver,gregerrag/selenium,lukeis/selenium,isaksky/selenium,meksh/selenium,wambat/selenium,quoideneuf/selenium,telefonicaid/selenium,sag-enorman/selenium,jsarenik/jajomojo-selenium,Dude-X/selenium,gemini-testing/selenium,rrussell39/selenium,gemini-testing/selenium,gurayinan/selenium,MeetMe/selenium,Herst/selenium,s2oBCN/selenium,amar-sharma/selenium,carsonmcdonald/selenium,vveliev/selenium,Tom-Trumper/selenium,dkentw/selenium,sag-enorman/selenium,vinay-qa/vinayit-android-server-apk,wambat/selenium,dimacus/selenium,denis-vilyuzhanin/selenium-fastview,tarlabs/selenium,minhthuanit/selenium,orange-tv-blagnac/selenium,dcjohnson1989/selenium,bartolkaruza/selenium,amikey/selenium,dkentw/selenium,eric-stanley/selenium,asashour/selenium,JosephCastro/selenium,bmannix/selenium,uchida/selenium,juangj/selenium,quoideneuf/selenium,joshuaduffy/selenium,markodolancic/selenium,lilredindy/selenium,bartolkaruza/selenium,dandv/selenium,anshumanchatterji/selenium,stupidnetizen/selenium,xmhubj/selenium,mestihudson/selenium,asolntsev/selenium,sevaseva/selenium,blackboarddd/selenium,dandv/selenium,dibagga/selenium,dbo/selenium,knorrium/selenium,sevaseva/selenium,alb-i986/selenium,JosephCastro/selenium,freynaud/selenium,vveliev/selenium,gemini-testing/selenium,i17c/selenium,houchj/selenium,yukaReal/selenium,krosenvold/selenium,p0deje/selenium,lmtierney/selenium,DrMarcII/selenium,lrowe/selenium,RamaraoDonta/ramarao-clone,rplevka/selenium,titusfortner/selenium,Tom-Trumper/selenium,s2oBCN/selenium,freynaud/selenium,gabrielsimas/selenium,dcjohnson1989/selenium,doungni/selenium,GorK-ChO/selenium,joshbruning/selenium,clavery/selenium,xmhubj/selenium,asolntsev/selenium,xmhubj/selenium,krmahadevan/selenium,markodolancic/selenium,kalyanjvn1/selenium,freynaud/selenium,jerome-jacob/selenium,livioc/selenium,MeetMe/selenium,lilredindy/selenium,TheBlackTuxCorp/selenium,zenefits/selenium,MeetMe/selenium,sri85/selenium,Jarob22/selenium,chrsmithdemos/selenium,yukaReal/selenium,vinay-qa/vinayit-android-server-apk,soundcloud/selenium,manuelpirez/selenium,gabrielsimas/selenium,jerome-jacob/selenium,actmd/selenium,lummyare/lummyare-lummy,dkentw/selenium,mojwang/selenium,xsyntrex/selenium,asolntsev/selenium,davehunt/selenium,jsakamoto/selenium,dbo/selenium,livioc/selenium,oddui/selenium,pulkitsinghal/selenium,knorrium/selenium,titusfortner/selenium,lummyare/lummyare-lummy,alb-i986/selenium,customcommander/selenium,TheBlackTuxCorp/selenium,amar-sharma/selenium,alb-i986/selenium,jerome-jacob/selenium,gregerrag/selenium,actmd/selenium,lukeis/selenium,p0deje/selenium,joshuaduffy/selenium,jabbrwcky/selenium,doungni/selenium,onedox/selenium,lukeis/selenium,sebady/selenium,gabrielsimas/selenium,misttechnologies/selenium,joshbruning/selenium,uchida/selenium,slongwang/selenium,TheBlackTuxCorp/selenium,slongwang/selenium,dibagga/selenium,bayandin/selenium,AutomatedTester/selenium,gabrielsimas/selenium,Ardesco/selenium,GorK-ChO/selenium,anshumanchatterji/selenium,lummyare/lummyare-test,telefonicaid/selenium,SeleniumHQ/selenium,clavery/selenium,lilredindy/selenium,sag-enorman/selenium,Herst/selenium,lummyare/lummyare-lummy,yukaReal/selenium,GorK-ChO/selenium,krmahadevan/selenium,gregerrag/selenium,temyers/selenium,Appdynamics/selenium,aluedeke/chromedriver,houchj/selenium,Tom-Trumper/selenium,rovner/selenium,bmannix/selenium,sankha93/selenium,bartolkaruza/selenium,lrowe/selenium,actmd/selenium,Jarob22/selenium,uchida/selenium,pulkitsinghal/selenium,compstak/selenium,stupidnetizen/selenium,gotcha/selenium,MCGallaspy/selenium,joshbruning/selenium,sebady/selenium,quoideneuf/selenium,lukeis/selenium,krmahadevan/selenium,twalpole/selenium,HtmlUnit/selenium,doungni/selenium,oddui/selenium,telefonicaid/selenium,Dude-X/selenium,pulkitsinghal/selenium,AutomatedTester/selenium,arunsingh/selenium,slongwang/selenium,valfirst/selenium,joshuaduffy/selenium,Ardesco/selenium,blackboarddd/selenium,DrMarcII/selenium,isaksky/selenium,kalyanjvn1/selenium,chrisblock/selenium,gurayinan/selenium,rovner/selenium,Ardesco/selenium,uchida/selenium,gotcha/selenium,gurayinan/selenium,joshbruning/selenium,blueyed/selenium,thanhpete/selenium,rovner/selenium,p0deje/selenium,carsonmcdonald/selenium,lilredindy/selenium,petruc/selenium,gurayinan/selenium,MeetMe/selenium,bmannix/selenium,krosenvold/selenium,carlosroh/selenium,markodolancic/selenium,dbo/selenium,rrussell39/selenium,Appdynamics/selenium,5hawnknight/selenium,DrMarcII/selenium,sebady/selenium,alb-i986/selenium,xmhubj/selenium,compstak/selenium,JosephCastro/selenium,gotcha/selenium,TikhomirovSergey/selenium,TheBlackTuxCorp/selenium,lukeis/selenium,sri85/selenium,amikey/selenium,juangj/selenium,5hawnknight/selenium,wambat/selenium,misttechnologies/selenium,i17c/selenium,orange-tv-blagnac/selenium,jabbrwcky/selenium,amar-sharma/selenium,jsarenik/jajomojo-selenium,onedox/selenium,zenefits/selenium,HtmlUnit/selenium,quoideneuf/selenium,thanhpete/selenium,rplevka/selenium,anshumanchatterji/selenium,telefonicaid/selenium,o-schneider/selenium,krosenvold/selenium,thanhpete/selenium,knorrium/selenium,titusfortner/selenium,aluedeke/chromedriver,tarlabs/selenium,tbeadle/selenium,asolntsev/selenium,p0deje/selenium,i17c/selenium,wambat/selenium,BlackSmith/selenium,telefonicaid/selenium,quoideneuf/selenium,mach6/selenium,dandv/selenium,MeetMe/selenium,joshmgrant/selenium,lilredindy/selenium,gemini-testing/selenium,chrsmithdemos/selenium,gotcha/selenium,isaksky/selenium,tbeadle/selenium,onedox/selenium,customcommander/selenium,zenefits/selenium,GorK-ChO/selenium,twalpole/selenium,slongwang/selenium,stupidnetizen/selenium,houchj/selenium,dkentw/selenium,manuelpirez/selenium,jerome-jacob/selenium,DrMarcII/selenium,dandv/selenium,SevInf/IEDriver,blackboarddd/selenium,dcjohnson1989/selenium,krmahadevan/selenium,telefonicaid/selenium,mach6/selenium,titusfortner/selenium,bmannix/selenium,jknguyen/josephknguyen-selenium,p0deje/selenium,soundcloud/selenium,s2oBCN/selenium,rrussell39/selenium,SouWilliams/selenium,doungni/selenium,blackboarddd/selenium,5hawnknight/selenium,thanhpete/selenium,blueyed/selenium,misttechnologies/selenium,Tom-Trumper/selenium,freynaud/selenium,dbo/selenium,amikey/selenium,lmtierney/selenium,SeleniumHQ/selenium,tkurnosova/selenium,lmtierney/selenium,zenefits/selenium,carlosroh/selenium,clavery/selenium,BlackSmith/selenium,rplevka/selenium,alexec/selenium,Jarob22/selenium,Herst/selenium,dandv/selenium,actmd/selenium,actmd/selenium,actmd/selenium,xsyntrex/selenium,amikey/selenium,dimacus/selenium,meksh/selenium,joshuaduffy/selenium,customcommander/selenium,orange-tv-blagnac/selenium,quoideneuf/selenium,tarlabs/selenium,aluedeke/chromedriver,Jarob22/selenium,skurochkin/selenium,doungni/selenium,valfirst/selenium,SouWilliams/selenium,BlackSmith/selenium,sankha93/selenium,dkentw/selenium,DrMarcII/selenium,zenefits/selenium,davehunt/selenium,valfirst/selenium,isaksky/selenium,i17c/selenium,Sravyaksr/selenium,manuelpirez/selenium,soundcloud/selenium,lrowe/selenium,Sravyaksr/selenium,twalpole/selenium,skurochkin/selenium,o-schneider/selenium,DrMarcII/selenium,dibagga/selenium,jerome-jacob/selenium,sri85/selenium,krosenvold/selenium,manuelpirez/selenium,BlackSmith/selenium,alexec/selenium,jabbrwcky/selenium,compstak/selenium,5hawnknight/selenium,rplevka/selenium,customcommander/selenium,jsarenik/jajomojo-selenium,stupidnetizen/selenium,joshbruning/selenium,blueyed/selenium,tkurnosova/selenium,dibagga/selenium,meksh/selenium,minhthuanit/selenium,wambat/selenium,joshmgrant/selenium,temyers/selenium,chrisblock/selenium,dimacus/selenium,mojwang/selenium,telefonicaid/selenium,yukaReal/selenium,Tom-Trumper/selenium,RamaraoDonta/ramarao-clone,lummyare/lummyare-lummy,jerome-jacob/selenium,vinay-qa/vinayit-android-server-apk,Dude-X/selenium,davehunt/selenium,Tom-Trumper/selenium,Appdynamics/selenium,dkentw/selenium,dimacus/selenium,o-schneider/selenium,amikey/selenium,asashour/selenium,alb-i986/selenium,blackboarddd/selenium,mojwang/selenium,vveliev/selenium,chrisblock/selenium,wambat/selenium,SeleniumHQ/selenium,tarlabs/selenium,joshmgrant/selenium,vinay-qa/vinayit-android-server-apk,rovner/selenium,krosenvold/selenium,gemini-testing/selenium,quoideneuf/selenium,eric-stanley/selenium,tkurnosova/selenium,mach6/selenium,oddui/selenium,AutomatedTester/selenium,vveliev/selenium,joshmgrant/selenium,clavery/selenium,gorlemik/selenium,Ardesco/selenium,rrussell39/selenium,i17c/selenium,p0deje/selenium,tbeadle/selenium,lukeis/selenium,sebady/selenium,temyers/selenium,tarlabs/selenium,lummyare/lummyare-lummy,rovner/selenium,jknguyen/josephknguyen-selenium,xsyntrex/selenium,BlackSmith/selenium,onedox/selenium,thanhpete/selenium,MeetMe/selenium,TikhomirovSergey/selenium,temyers/selenium,xsyntrex/selenium,denis-vilyuzhanin/selenium-fastview,chrisblock/selenium,mestihudson/selenium,sag-enorman/selenium,HtmlUnit/selenium,mestihudson/selenium,TikhomirovSergey/selenium,alb-i986/selenium,asashour/selenium,rrussell39/selenium,amikey/selenium,sag-enorman/selenium,denis-vilyuzhanin/selenium-fastview,valfirst/selenium,customcommander/selenium,oddui/selenium,alexec/selenium,orange-tv-blagnac/selenium,krosenvold/selenium,petruc/selenium,jsakamoto/selenium,5hawnknight/selenium,lukeis/selenium,TheBlackTuxCorp/selenium,misttechnologies/selenium,kalyanjvn1/selenium,blueyed/selenium,AutomatedTester/selenium,anshumanchatterji/selenium,mach6/selenium,krosenvold/selenium,bartolkaruza/selenium,GorK-ChO/selenium,stupidnetizen/selenium
package org.openqa.selenium.internal; import org.json.JSONArray; import org.json.JSONException; import org.openqa.selenium.Ignore; import org.openqa.selenium.remote.BeanToJsonConverter; import java.lang.reflect.Method; import java.util.*; public class IgnoreCollector implements IgnoredTestCallback { private Set<Map> tests = new HashSet<Map>(); private BeanToJsonConverter converter = new BeanToJsonConverter(); public void callback(Class clazz, String testName, Ignore ignore) { for (String name : getTestMethodsFor(clazz, testName)) { tests.add(IgnoredTestCase.asMap(clazz.getName(), name, ignore)); } } private List<String> getTestMethodsFor(Class clazz, String testName) { if (!testName.isEmpty()) { return Arrays.asList(testName); } List<String> testMethods = new ArrayList<String>(); Method[] methods = clazz.getDeclaredMethods(); for (Method method : methods) { if (isTestMethod(method)) { testMethods.add(method.getName()); } } return testMethods; } private boolean isTestMethod(Method method) { return method.getAnnotation(org.junit.Test.class) != null || method.getName().startsWith("test"); } public String toJson() throws JSONException { return new JSONArray(converter.convert(tests)).toString(); } private static class IgnoredTestCase { public static Map<String, Object> asMap(String className, String testName, Ignore ignore) { final Map<String, Object> map = new HashMap<String, Object>(); map.put("className", className); map.put("testName", testName); map.put("reason", ignore.reason()); map.put("issues", ignore.issues()); final Set<String> drivers = new HashSet<String>(); for (Ignore.Driver driver : ignore.value()) { drivers.add(driver.name()); } map.put("drivers", drivers); return map; } } }
java/client/test/org/openqa/selenium/internal/IgnoreCollector.java
package org.openqa.selenium.internal; import org.json.JSONArray; import org.json.JSONException; import org.openqa.selenium.Ignore; import org.openqa.selenium.remote.BeanToJsonConverter; import java.lang.reflect.Method; import java.util.*; public class IgnoreCollector implements IgnoredTestCallback { private Set<Map> tests = new HashSet<Map>(); private BeanToJsonConverter converter = new BeanToJsonConverter(); public void callback(Class clazz, String testName, Ignore ignore) { for (String name : getTestMethodsFor(clazz, testName)) { tests.add(IgnoredTestCase.asMap(clazz.getName(), name, ignore)); } } private List<String> getTestMethodsFor(Class clazz, String testName) { if (!testName.isEmpty()) { return Arrays.asList(testName); } List<String> testMethods = new ArrayList<String>(); Method[] methods = clazz.getDeclaredMethods(); for (Method method : methods) { if (isTestMethod(method)) { testMethods.add(method.getName()); } } return testMethods; } private boolean isTestMethod(Method method) { return method.getAnnotation(org.junit.Test.class) != null || method.getName().startsWith("test"); } public String toJson() throws JSONException { return new JSONArray(converter.convert(tests)).toString(); } private static class IgnoredTestCase { public static Map<String, Object> asMap(String className, String testName, Ignore ignore) { final Map<String, Object> map = new HashMap<String, Object>(); map.put("className", className); map.put("testName", testName); map.put("reason", ignore.reason()); final Set<String> drivers = new HashSet<String>(); for (Ignore.Driver driver : ignore.value()) { drivers.add(driver.name()); } map.put("drivers", drivers); return map; } } }
JariBakken: Dump Ignore.issues() as well. r14375
java/client/test/org/openqa/selenium/internal/IgnoreCollector.java
JariBakken: Dump Ignore.issues() as well.
<ide><path>ava/client/test/org/openqa/selenium/internal/IgnoreCollector.java <ide> map.put("className", className); <ide> map.put("testName", testName); <ide> map.put("reason", ignore.reason()); <add> map.put("issues", ignore.issues()); <ide> <ide> final Set<String> drivers = new HashSet<String>(); <ide> for (Ignore.Driver driver : ignore.value()) {
Java
mpl-2.0
682c6ad7a17298f57ae148e5d585e2147b7d8553
0
JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core,JurassicWordExcel/core
/************************************************************************* * * OpenOffice.org - a multi-platform office productivity suite * * $RCSfile: UnoScrollBarControl.java,v $ * * $Revision: 1.6 $ * * last change: $Author: kz $ $Date: 2005-11-02 18:26:19 $ * * The Contents of this file are made available subject to * the terms of GNU Lesser General Public License Version 2.1. * * * GNU Lesser General Public License Version 2.1 * ============================================= * Copyright 2005 by Sun Microsystems, Inc. * 901 San Antonio Road, Palo Alto, CA 94303, USA * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1, as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307 USA * ************************************************************************/ package mod._toolkit; import com.sun.star.accessibility.XAccessible; import com.sun.star.accessibility.XAccessibleComponent; import com.sun.star.awt.Point; import com.sun.star.awt.Rectangle; import com.sun.star.awt.XControl; import com.sun.star.awt.XControlModel; import com.sun.star.awt.XDevice; import com.sun.star.awt.XGraphics; import com.sun.star.awt.XScrollBar; import com.sun.star.awt.XToolkit; import com.sun.star.awt.XWindow; import com.sun.star.awt.XWindowPeer; import com.sun.star.drawing.XControlShape; import com.sun.star.drawing.XShape; import com.sun.star.frame.XController; import com.sun.star.frame.XFrame; import com.sun.star.frame.XModel; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.text.XTextDocument; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XInterface; import com.sun.star.util.XCloseable; import com.sun.star.view.XControlAccess; import java.awt.Dimension; import java.awt.Toolkit; import java.io.PrintWriter; import lib.StatusException; import lib.TestCase; import lib.TestEnvironment; import lib.TestParameters; import util.FormTools; import util.SOfficeFactory; import util.WriterTools; import util.utils; public class UnoScrollBarControl extends TestCase { private static XTextDocument xTextDoc; protected void initialize(TestParameters Param, PrintWriter log) { SOfficeFactory SOF = SOfficeFactory.getFactory( (XMultiServiceFactory) Param.getMSF()); try { log.println("creating a textdocument"); xTextDoc = SOF.createTextDoc(null); log.println("maximize the window size"); XModel xModel = (XModel) UnoRuntime.queryInterface(XModel.class, xTextDoc); XFrame xFrame = xModel.getCurrentController().getFrame(); XWindow xWin = xFrame.getContainerWindow(); Toolkit tk = Toolkit.getDefaultToolkit(); Dimension dim = tk.getScreenSize(); Rectangle newPosSize = xWin.getPosSize(); newPosSize.Width = new Double(dim.getWidth()).intValue(); newPosSize.Height = new Double(dim.getHeight()).intValue(); newPosSize.X = 0; newPosSize.Y = 0; xWin.setPosSize(newPosSize.X, newPosSize.Y, newPosSize.Width, newPosSize.Height, com.sun.star.awt.PosSize.POSSIZE); } catch (com.sun.star.uno.Exception e) { // Some exception occures.FAILED e.printStackTrace(log); throw new StatusException("Couldn't create document", e); } } protected void cleanup(TestParameters tParam, PrintWriter log) { log.println(" disposing xTextDoc "); util.DesktopTools.closeDoc(xTextDoc); } protected TestEnvironment createTestEnvironment(TestParameters Param, PrintWriter log) { XInterface oObj = null; XWindowPeer the_win = null; XToolkit the_kit = null; XDevice aDevice = null; XGraphics aGraphic = null; XControl aControl = null; //Insert a ControlShape and get the ControlModel XControlShape aShape = FormTools.createUnoControlShape(xTextDoc, 3000, 4500, 15000, 10000, "ScrollBar", "UnoControlScrollBar"); WriterTools.getDrawPage(xTextDoc).add((XShape) aShape); XControlModel the_Model = aShape.getControl(); XControlShape aShape2 = FormTools.createControlShape(xTextDoc, 3000, 4500, 5000, 10000, "TextField"); WriterTools.getDrawPage(xTextDoc).add((XShape) aShape2); XControlModel the_Model2 = aShape2.getControl(); //Try to query XControlAccess XControlAccess the_access = (XControlAccess) UnoRuntime.queryInterface( XControlAccess.class, xTextDoc.getCurrentController()); //get the ScrollBarControl for the needed Object relations try { oObj = the_access.getControl(the_Model); aControl = the_access.getControl(the_Model2); the_win = the_access.getControl(the_Model).getPeer(); the_kit = the_win.getToolkit(); aDevice = the_kit.createScreenCompatibleDevice(200, 200); aGraphic = aDevice.createGraphics(); } catch (Exception e) { log.println("Couldn't get ScrollBarControl"); e.printStackTrace(log); throw new StatusException("Couldn't get ScrollBarControl", e); } log.println( "creating a new environment for UnoControlScrollBar object"); TestEnvironment tEnv = new TestEnvironment(oObj); //adding Object-Relation for XScrollBar tEnv.addObjRelation("Document", xTextDoc); //Adding ObjRelation for XView tEnv.addObjRelation("GRAPHICS", aGraphic); //Adding ObjRelation for XControl tEnv.addObjRelation("CONTEXT", xTextDoc); tEnv.addObjRelation("WINPEER", the_win); tEnv.addObjRelation("TOOLKIT", the_kit); tEnv.addObjRelation("MODEL", the_Model); XWindow forObjRel = (XWindow) UnoRuntime.queryInterface(XWindow.class, aControl); tEnv.addObjRelation("XWindow.AnotherWindow", forObjRel); System.out.println("ImplementationName: " + utils.getImplName(oObj)); return tEnv; } // finish method getTestEnvironment }
qadevOOo/tests/java/mod/_toolkit/UnoScrollBarControl.java
/************************************************************************* * * OpenOffice.org - a multi-platform office productivity suite * * $RCSfile: UnoScrollBarControl.java,v $ * * $Revision: 1.5 $ * * last change: $Author: rt $ $Date: 2005-09-09 04:50:04 $ * * The Contents of this file are made available subject to * the terms of GNU Lesser General Public License Version 2.1. * * * GNU Lesser General Public License Version 2.1 * ============================================= * Copyright 2005 by Sun Microsystems, Inc. * 901 San Antonio Road, Palo Alto, CA 94303, USA * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License version 2.1, as published by the Free Software Foundation. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, * MA 02111-1307 USA * ************************************************************************/ package mod._toolkit; import com.sun.star.accessibility.XAccessible; import com.sun.star.accessibility.XAccessibleComponent; import com.sun.star.awt.Point; import com.sun.star.awt.Rectangle; import com.sun.star.awt.XControl; import com.sun.star.awt.XControlModel; import com.sun.star.awt.XDevice; import com.sun.star.awt.XGraphics; import com.sun.star.awt.XScrollBar; import com.sun.star.awt.XToolkit; import com.sun.star.awt.XWindow; import com.sun.star.awt.XWindowPeer; import com.sun.star.drawing.XControlShape; import com.sun.star.drawing.XShape; import com.sun.star.frame.XController; import com.sun.star.frame.XFrame; import com.sun.star.frame.XModel; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.text.XTextDocument; import com.sun.star.uno.UnoRuntime; import com.sun.star.uno.XInterface; import com.sun.star.util.XCloseable; import com.sun.star.view.XControlAccess; import java.awt.Dimension; import java.awt.Toolkit; import java.io.PrintWriter; import lib.StatusException; import lib.TestCase; import lib.TestEnvironment; import lib.TestParameters; import util.FormTools; import util.SOfficeFactory; import util.WriterTools; import util.utils; public class UnoScrollBarControl extends TestCase { XTextDocument xTextDoc; protected void initialize(TestParameters Param, PrintWriter log) { SOfficeFactory SOF = SOfficeFactory.getFactory( (XMultiServiceFactory) Param.getMSF()); try { log.println("creating a textdocument"); xTextDoc = SOF.createTextDoc(null); log.println("maximize the window size"); XModel xModel = (XModel) UnoRuntime.queryInterface(XModel.class, xTextDoc); XFrame xFrame = xModel.getCurrentController().getFrame(); XWindow xWin = xFrame.getContainerWindow(); Toolkit tk = Toolkit.getDefaultToolkit(); Dimension dim = tk.getScreenSize(); Rectangle newPosSize = xWin.getPosSize(); newPosSize.Width = new Double(dim.getWidth()).intValue(); newPosSize.Height = new Double(dim.getHeight()).intValue(); newPosSize.X = 0; newPosSize.Y = 0; xWin.setPosSize(newPosSize.X, newPosSize.Y, newPosSize.Width, newPosSize.Height, com.sun.star.awt.PosSize.POSSIZE); } catch (com.sun.star.uno.Exception e) { // Some exception occures.FAILED e.printStackTrace(log); throw new StatusException("Couldn't create document", e); } } protected void cleanup(TestParameters tParam, PrintWriter log) { log.println(" disposing xTextDoc "); util.DesktopTools.closeDoc(xTextDoc); } protected TestEnvironment createTestEnvironment(TestParameters Param, PrintWriter log) { XInterface oObj = null; XWindowPeer the_win = null; XToolkit the_kit = null; XDevice aDevice = null; XGraphics aGraphic = null; XControl aControl = null; //Insert a ControlShape and get the ControlModel XControlShape aShape = FormTools.createUnoControlShape(xTextDoc, 3000, 4500, 15000, 10000, "ScrollBar", "UnoControlScrollBar"); WriterTools.getDrawPage(xTextDoc).add((XShape) aShape); XControlModel the_Model = aShape.getControl(); XControlShape aShape2 = FormTools.createControlShape(xTextDoc, 3000, 4500, 5000, 10000, "TextField"); WriterTools.getDrawPage(xTextDoc).add((XShape) aShape2); XControlModel the_Model2 = aShape2.getControl(); //Try to query XControlAccess XControlAccess the_access = (XControlAccess) UnoRuntime.queryInterface( XControlAccess.class, xTextDoc.getCurrentController()); //get the ScrollBarControl for the needed Object relations try { oObj = the_access.getControl(the_Model); aControl = the_access.getControl(the_Model2); the_win = the_access.getControl(the_Model).getPeer(); the_kit = the_win.getToolkit(); aDevice = the_kit.createScreenCompatibleDevice(200, 200); aGraphic = aDevice.createGraphics(); } catch (Exception e) { log.println("Couldn't get ScrollBarControl"); e.printStackTrace(log); throw new StatusException("Couldn't get ScrollBarControl", e); } log.println( "creating a new environment for UnoControlScrollBar object"); TestEnvironment tEnv = new TestEnvironment(oObj); //adding Object-Relation for XScrollBar tEnv.addObjRelation("Document", xTextDoc); //Adding ObjRelation for XView tEnv.addObjRelation("GRAPHICS", aGraphic); //Adding ObjRelation for XControl tEnv.addObjRelation("CONTEXT", xTextDoc); tEnv.addObjRelation("WINPEER", the_win); tEnv.addObjRelation("TOOLKIT", the_kit); tEnv.addObjRelation("MODEL", the_Model); XWindow forObjRel = (XWindow) UnoRuntime.queryInterface(XWindow.class, aControl); tEnv.addObjRelation("XWindow.AnotherWindow", forObjRel); System.out.println("ImplementationName: " + utils.getImplName(oObj)); return tEnv; } // finish method getTestEnvironment }
INTEGRATION: CWS qadev24 (1.4.22); FILE MERGED 2005/09/19 17:23:52 cn 1.4.22.2: RESYNC: (1.4-1.5); FILE MERGED 2005/08/19 07:40:06 cn 1.4.22.1: #i52213# declare instance variables as 'privte static'
qadevOOo/tests/java/mod/_toolkit/UnoScrollBarControl.java
INTEGRATION: CWS qadev24 (1.4.22); FILE MERGED 2005/09/19 17:23:52 cn 1.4.22.2: RESYNC: (1.4-1.5); FILE MERGED 2005/08/19 07:40:06 cn 1.4.22.1: #i52213# declare instance variables as 'privte static'
<ide><path>adevOOo/tests/java/mod/_toolkit/UnoScrollBarControl.java <ide> * <ide> * $RCSfile: UnoScrollBarControl.java,v $ <ide> * <del> * $Revision: 1.5 $ <add> * $Revision: 1.6 $ <ide> * <del> * last change: $Author: rt $ $Date: 2005-09-09 04:50:04 $ <add> * last change: $Author: kz $ $Date: 2005-11-02 18:26:19 $ <ide> * <ide> * The Contents of this file are made available subject to <ide> * the terms of GNU Lesser General Public License Version 2.1. <ide> <ide> <ide> public class UnoScrollBarControl extends TestCase { <del> XTextDocument xTextDoc; <add> private static XTextDocument xTextDoc; <ide> <ide> protected void initialize(TestParameters Param, PrintWriter log) { <ide> SOfficeFactory SOF = SOfficeFactory.getFactory(
Java
mit
6707e4c2b6f77993cd7dce4045642bc181b5608c
0
jsettlers/settlers-remake,phirschbeck/settlers-remake,phirschbeck/settlers-remake,phirschbeck/settlers-remake,Peter-Maximilian/settlers-remake,andreasb242/settlers-remake,andreasb242/settlers-remake,andreasb242/settlers-remake,jsettlers/settlers-remake,jsettlers/settlers-remake
package jsettlers.main.android.core.controls; import java.util.LinkedList; import go.graphics.android.AndroidSoundPlayer; import jsettlers.common.menu.IStartedGame; import jsettlers.common.menu.action.EActionType; import jsettlers.common.menu.action.IAction; import jsettlers.common.player.IInGamePlayer; import jsettlers.common.selectable.ISelectionSet; import jsettlers.graphics.action.Action; import jsettlers.graphics.map.ETextDrawPosition; import jsettlers.graphics.map.MapContent; import jsettlers.graphics.map.controls.IControls; import android.content.Context; /** * Created by tompr on 14/01/2017. */ public class ControlsAdapter implements ActionControls, DrawControls, SelectionControls, TaskControls { private static final int SOUND_THREADS = 6; private final Context context; private final AndroidSoundPlayer soundPlayer; private final IInGamePlayer player; private final AndroidControls androidControls; private final MapContent mapContent; private final GameMenu gameMenu; private final LinkedList<SelectionListener> selectionListeners = new LinkedList<>(); private final LinkedList<ActionListener> actionListeners = new LinkedList<>(); private final LinkedList<DrawListener> drawListeners = new LinkedList<>(); private ISelectionSet selection; public ControlsAdapter(Context context, IStartedGame game) { this.context = context; this.player = game.getInGamePlayer(); soundPlayer = new AndroidSoundPlayer(SOUND_THREADS); androidControls = new AndroidControls(this); mapContent = new MapContent(game, soundPlayer, ETextDrawPosition.NONE, androidControls); gameMenu = new GameMenu(context, soundPlayer, this); } public IControls getControls() { return androidControls; } public MapContent getMapContent() { return mapContent; } public void onAction(IAction action) { synchronized (actionListeners) { for (ActionListener listener : actionListeners) { listener.actionFired(action); } } } public void onSelection(ISelectionSet selection) { if (selection != null && selection.getSize() > 0) { this.selection = selection; } else { this.selection = null; } synchronized (selectionListeners) { for (SelectionListener listener : selectionListeners) { listener.selectionChanged(this.selection); } } } public void onDraw() { synchronized (drawListeners) { for (DrawListener listener : drawListeners) { listener.draw(); } } } /** * ActionControls implementation */ @Override public void fireAction(IAction action) { androidControls.fireAction(action); } @Override public void addActionListener(ActionListener actionListener) { synchronized (actionListeners) { actionListeners.add(actionListener); } } @Override public void removeActionListener(ActionListener actionListener) { synchronized (actionListeners) { actionListeners.remove(actionListener); } } /** * DrawControls implementation */ @Override public void addDrawListener(DrawListener drawListener) { synchronized (drawListeners) { drawListeners.add(drawListener); } } @Override public void removeDrawListener(DrawListener drawListener) { synchronized (drawListeners) { drawListeners.remove(drawListener); } } /** * SelectionControls implementation */ @Override public ISelectionSet getCurrentSelection() { return selection; } @Override public void deselect() { if (selection != null) { fireAction(new Action(EActionType.DESELECT)); } } @Override public void addSelectionListener(SelectionListener selectionListener) { synchronized (selectionListeners) { selectionListeners.add(selectionListener); } } @Override public void removeSelectionListener(SelectionListener selectionListener) { synchronized (selectionListeners) { selectionListeners.remove(selectionListener); } } /** * TaskControls implementation * * @return */ @Override public boolean isTaskActive() { return androidControls.isTaskActive(); } @Override public void endTask() { androidControls.endTask(); } public GameMenu getGameMenu() { return gameMenu; } public IInGamePlayer getInGamePlayer() { return player; } }
jsettlers.main.android/src/main/java/jsettlers/main/android/core/controls/ControlsAdapter.java
package jsettlers.main.android.core.controls; import java.util.LinkedList; import go.graphics.android.AndroidSoundPlayer; import jsettlers.common.menu.IStartedGame; import jsettlers.common.menu.action.EActionType; import jsettlers.common.menu.action.IAction; import jsettlers.common.player.IInGamePlayer; import jsettlers.common.selectable.ISelectionSet; import jsettlers.graphics.action.Action; import jsettlers.graphics.map.ETextDrawPosition; import jsettlers.graphics.map.MapContent; import jsettlers.graphics.map.controls.IControls; import android.content.Context; /** * Created by tompr on 14/01/2017. */ public class ControlsAdapter implements ActionControls, DrawControls, SelectionControls, TaskControls { private static final int SOUND_THREADS = 6; private final Context context; private final AndroidSoundPlayer soundPlayer; private final IInGamePlayer player; private final AndroidControls androidControls; private final MapContent mapContent; private final GameMenu gameMenu; private final LinkedList<SelectionListener> selectionListeners = new LinkedList<>(); private final LinkedList<ActionListener> actionListeners = new LinkedList<>(); private final LinkedList<DrawListener> drawListeners = new LinkedList<>(); private ISelectionSet selection; public ControlsAdapter(Context context, IStartedGame game) { this.context = context; this.player = game.getInGamePlayer(); soundPlayer = new AndroidSoundPlayer(SOUND_THREADS); androidControls = new AndroidControls(this); mapContent = new MapContent(game, soundPlayer, ETextDrawPosition.TOP_LEFT, androidControls); gameMenu = new GameMenu(context, soundPlayer, this); } public IControls getControls() { return androidControls; } public MapContent getMapContent() { return mapContent; } public void onAction(IAction action) { synchronized (actionListeners) { for (ActionListener listener : actionListeners) { listener.actionFired(action); } } } public void onSelection(ISelectionSet selection) { if (selection != null && selection.getSize() > 0) { this.selection = selection; } else { this.selection = null; } synchronized (selectionListeners) { for (SelectionListener listener : selectionListeners) { listener.selectionChanged(this.selection); } } } public void onDraw() { synchronized (drawListeners) { for (DrawListener listener : drawListeners) { listener.draw(); } } } /** * ActionControls implementation */ @Override public void fireAction(IAction action) { androidControls.fireAction(action); } @Override public void addActionListener(ActionListener actionListener) { synchronized (actionListeners) { actionListeners.add(actionListener); } } @Override public void removeActionListener(ActionListener actionListener) { synchronized (actionListeners) { actionListeners.remove(actionListener); } } /** * DrawControls implementation */ @Override public void addDrawListener(DrawListener drawListener) { synchronized (drawListeners) { drawListeners.add(drawListener); } } @Override public void removeDrawListener(DrawListener drawListener) { synchronized (drawListeners) { drawListeners.remove(drawListener); } } /** * SelectionControls implementation */ @Override public ISelectionSet getCurrentSelection() { return selection; } @Override public void deselect() { if (selection != null) { fireAction(new Action(EActionType.DESELECT)); } } @Override public void addSelectionListener(SelectionListener selectionListener) { synchronized (selectionListeners) { selectionListeners.add(selectionListener); } } @Override public void removeSelectionListener(SelectionListener selectionListener) { synchronized (selectionListeners) { selectionListeners.remove(selectionListener); } } /** * TaskControls implementation * * @return */ @Override public boolean isTaskActive() { return androidControls.isTaskActive(); } @Override public void endTask() { androidControls.endTask(); } public GameMenu getGameMenu() { return gameMenu; } public IInGamePlayer getInGamePlayer() { return player; } }
Disabled time and FPS until decision on how best to handle status bar
jsettlers.main.android/src/main/java/jsettlers/main/android/core/controls/ControlsAdapter.java
Disabled time and FPS until decision on how best to handle status bar
<ide><path>settlers.main.android/src/main/java/jsettlers/main/android/core/controls/ControlsAdapter.java <ide> <ide> soundPlayer = new AndroidSoundPlayer(SOUND_THREADS); <ide> androidControls = new AndroidControls(this); <del> mapContent = new MapContent(game, soundPlayer, ETextDrawPosition.TOP_LEFT, androidControls); <add> mapContent = new MapContent(game, soundPlayer, ETextDrawPosition.NONE, androidControls); <ide> gameMenu = new GameMenu(context, soundPlayer, this); <ide> } <ide>
JavaScript
mit
0d2761dc81384f8689058bd3ff7d6308f13ca4fc
0
markkr125/jsmaps
/** * The native maps is a fork of https://github.com/robotnic/khtmlib * * khtmlib credits: * verion 0.54 * LGPL Bernhard Zwischenbrugger * * @param mapDomDocument * @constructor */ if (typeof jsMaps.Native == 'undefined') { jsMaps.Native = function (mapDomDocument) {}; jsMaps.Native.prototype = new jsMaps.Abstract(); } jsMaps.Native.MapCount = 0; jsMaps.Native.Overlay = {}; /** * create the map * * @param map * @param options * @param {jsMaps.Native.Tiles} tileLayers * @returns {jsMaps.MapStructure} */ jsMaps.Native.prototype.initializeMap = function (map, options, tileLayers) { jsMaps.Native.Dom.addClass(map,'jsMaps-Native'); jsMaps.Native.MapCount++; this.MapNumber = jsMaps.Native.MapCount; // **** Overlays handling **** this.addOverlay = function (obj) { this.overlays.push(obj); if (typeof(obj.init) == "function") { obj.init(this); } this.renderOverlay(obj); }; this.renderOverlay = function (obj) { obj.render(); }; this.renderOverlays = function () { this.overlayDiv.style.display = ""; var that = this; var i = 0; for (var obj in this.overlays) { if (this.overlays.hasOwnProperty(obj) == false) continue; if (i == 0) { try { //this.overlays[obj].clear(that); } catch (e) { } i++; } this.overlays[obj].render(); } }; this.hideOverlays = function () { for (var obj in this.overlays) { if (this.overlays.hasOwnProperty(obj) == false) continue; try { if (typeof this.overlays[obj].hide !='undefined') { this.overlays[obj].hide(that); } } catch (e) { } } }; this.removeOverlays = function () { while (this.overlays.length > 0) { var overlay = this.overlays.pop(); overlay.clear(); } }; this.stopRenderOverlays = function () { for (var obj in this.overlays) { if (this.overlays.hasOwnProperty(obj) == false) continue; if (typeof(this.overlays[obj].cancel) == "function") { this.overlays[obj].cancel(); } } }; this.removeOverlay = function (ov) { for (var i = 0; i < this.overlays.length; i++) { var overlay = this.overlays[i]; if (ov == overlay) { ov.clear(); this.overlays.splice(i, 1); break; } } }; // every change (lat,lng,zoom) will call a user defined function this.callbackFunctions = []; this.addCallbackFunction = function (func) { if (typeof(func) == "function") { this.callbackFunctions.push(func); } }; this.executeCallbackFunctions = function () { for (var i = 0; i < this.callbackFunctions.length; i++) { this.callbackFunctions[i].call(); } }; /*================================================== // // Touchscreen and Mouse EVENTS // ===================================================*/ // // Touchscreen // Here also the multitouch zoom is done this.moving=false; this.start = function(evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; // The IE way } jsMaps.Native.Dom.addClass(document.body,'jsMaps-Native-no-scroll'); this.moving = this.center(); this.moveAnimationBlocked = true; if (evt.touches.length == 1) { if (this.mousedownTime != null) { var now = (new Date()).getTime(); if (now - this.mousedownTime < this.doubleclickTime) { this.discretZoom(1,this.pageX(evt.touches[0]), this.pageY(evt.touches[0])); } } if (!this.discretZoomBlocked) { this.mousedownTime2 = (new Date()).getTime(); this.startMoveX = this.moveX - (this.pageX(evt.touches[0])) / this.factor / this.sc; this.startMoveY = this.moveY - (this.pageY(evt.touches[0])) / this.factor / this.sc; this.movestarted = true; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.dragstart); this.mousedownTime = (new Date()).getTime(); } } if (evt.touches.length == 2 && !this.discretZoomBlocked) { this.mousedownTime = null; this.movestarted = false; var X1 = this.pageX(evt.touches[0]); var Y1 = this.pageY(evt.touches[0]); var X2 = this.pageX(evt.touches[1]); var Y2 = this.pageY(evt.touches[1]); this.startDistance = Math.sqrt(Math.pow((X2 - X1), 2) + Math.pow((Y2 - Y1), 2)); this.startZZ = this.position.zoom; var x = (X1 + X2) / 2 / this.factor / this.sc; var y = (Y1 + Y2) / 2 / this.factor / this.sc; this.startMoveX = this.moveX - x; this.startMoveY = this.moveY - y; this.prevxy = {x:0,y:0}; } }; this.moveok = true; this.prevxy = {x:0,y:0}; this.move = function(evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; // The IE way } var center; if (evt.touches.length == 1 && this.movestarted) { this.lastMouseX = this.pageX(evt.touches[0]); this.lastMouseY = this.pageY(evt.touches[0]); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.drag); this.lastMoveX = this.moveX; this.lastMoveY = this.moveY; this.lastMoveTime = new Date(); this.moveX = (this.pageX(evt.touches[0])) / this.factor / this.sc + this.startMoveX; this.moveY = (this.pageY(evt.touches[0])) / this.factor / this.sc + this.startMoveY; center = new jsMaps.geo.Location(this.lat, this.lng); this.setCenter2(center, this.position.zoom); this.moveAnimationBlocked = false; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); } if (evt.touches.length == 2 && !this.discretZoomBlocked) { this.movestarted = false; this.mousedownTime = null; var X1 = this.pageX(evt.touches[0]); var Y1 = this.pageY(evt.touches[0]); var X2 = this.pageX(evt.touches[1]); var Y2 = this.pageY(evt.touches[1]); var Distance = Math.sqrt(Math.pow((X2 - X1), 2) + Math.pow((Y2 - Y1), 2)); var zoomDelta = (Distance / this.startDistance); var zz = this.startZZ + zoomDelta - 1; if (zz > this.tileSource.maxzoom) { zz = this.tileSource.maxzoom; zoomDelta = this.zoomDeltaOld; } else this.zoomDeltaOld = zoomDelta; var x = (X1 + X2) / 2; var y = (Y1 + Y2) / 2; var diff = (Distance - this.startDistance) / this.startDistance * 100; if (Math.round(x) != Math.round(this.prevxy.x) && Math.round(y) != Math.round(this.prevxy.y) && Math.round(this.startDistance) != Math.round(Distance) && Math.abs(diff) > 15) { this.discretZoom(((zoomDelta < 1)? -1: 1),x, y); } this.prevxy = {x:x,y:y}; } }; this.end = function(evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; } this.prevxy = {x:0,y:0}; jsMaps.Native.Dom.removeClass(document.body,'jsMaps-Native-no-scroll'); var steps = 20; for (var i = 1; i <= steps; i++) { if (typeof this.zoomTimeouts[i] != 'undefined') { clearTimeout(this.zoomTimeouts[i]); } } if (this.movestarted) { this.lastMouseX = this.pageX(evt.touches[0]); this.lastMouseY = this.pageY(evt.touches[0]); if (this.moveMarker) { this.moveMarker = null; } // using this normalize some things are working better, others not so good. // delete it will solve some problems but bring other problems var now = new Date(evt.timeStamp); var timeDelta = now - this.lastMoveTime; if (this.wheelSpeedConfig["moveAnimateDesktop"] && timeDelta != 0) { if (this.movestarted) { if (this.moveAnimationBlocked == false) { var speedX = (this.lastMoveX - this.moveX) / timeDelta; var speedY = (this.lastMoveY - this.moveY) / timeDelta; var maxSpeed = 200; if (speedX > maxSpeed)speedX = maxSpeed; if (speedY > maxSpeed)speedY = maxSpeed; if (speedX < -maxSpeed)speedX = -maxSpeed; if (speedY < -maxSpeed)speedY = -maxSpeed; if (Math.abs(speedX) > this.wheelSpeedConfig["animateMinSpeed"] || Math.abs(speedY) > this.wheelSpeedConfig["animateMinSpeed"]) { this.animateMove(speedX, speedY); } } } } var that = this; var tempFunction = function () { if (that.movestarted) { jsMaps.Native.Event.trigger(that.mapParent, jsMaps.api.supported_events.dragend); jsMaps.Native.Event.trigger(that.mapParent, jsMaps.api.supported_events.idle); } that.movestarted = false; }; setTimeout(tempFunction, 1); } if (evt.touches.length == 1) { this.startMoveX = this.moveX - evt.touches[0].pageX / this.factor / this.sc; this.startMoveY = this.moveY - evt.touches[0].pageY / this.factor / this.sc; //this.startDistance = 0; //this.startZZ = this.position.zoom; } }; /** * mouse events * (distance measure code not in use anymore) * * @param evt * @returns {*} */ this.pageX = function (evt) { try { var px = (evt.pageX === undefined) ? evt.clientX + document.body.scrollLeft: evt.pageX; return px - this.mapLeft; } catch (e) { return this.lastMouseX; } }; /** * mouse events * (distance measure code not in use anymore) * * @param evt * @returns {*} */ this.pageY = function (evt) { try { var py = (evt.pageY === undefined) ? evt.clientY + document.body.scrollTop: evt.pageY; return py - this.mapTop; } catch (e) { return this.lastMouseY; } }; this.doubleclickBlocked = false; this.doubleclick = function (evt) { this.discretZoom(1, this.pageX(evt), this.pageY(evt)); }; this.leftClick = null; this.mousedown = function (evt) { this.mapParent.focus(); if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { window.returnValue = false; // The IE way } if (jsMaps.Native.Browser.ie) this.leftClick = true; this.lastMouseX = this.pageX(evt); this.lastMouseY = this.pageY(evt); this.moveAnimationBlocked = true; if (this.mousedownTime2 != null) { var now = (new Date()).getTime(); if (now - this.mousedownTime2 < this.doubleclickTime2) { this.doubleclick(evt); return; } } this.mousedownTime2 = (new Date()).getTime(); if (evt.shiftKey) { this.selectRectLeft = this.pageX(evt); this.selectRectTop = this.pageY(evt); this.selectRect = document.createElement("div"); this.selectRect.style.left = this.selectRectLeft + "px"; this.selectRect.style.top = this.selectRectTop + "px"; this.selectRect.style.border = "1px solid gray"; if (!this.internetExplorer) { this.selectRect.style.opacity = 0.5; this.selectRect.style.backgroundColor = "white"; } this.selectRect.style.position = "absolute"; this.map.parentNode.appendChild(this.selectRect); } else { //this.hideOverlays(); this.startMoveX = this.moveX - (this.pageX(evt)) / this.factor / this.sc; this.startMoveY = this.moveY - (this.pageY(evt)) / this.factor / this.sc; this.movestarted = true; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.dragstart); jsMaps.Native.setCursor(this.clone,"grabbing"); } return false; }; this.mousemove = function (evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { window.returnValue = false; // The IE way } var leftClick = jsMaps.Native.Event.leftClick(evt); if (leftClick == false || (this.leftClick !== null && this.leftClick == false)) { this.movestarted = false; jsMaps.Native.setCursor(this.clone,"grab"); return; } if (this.draggable == false) return; this.lastMouseX = this.pageX(evt); this.lastMouseY = this.pageY(evt); if (evt.shiftKey) { if (this.selectRect) { this.selectRect.style.width = Math.abs(this.pageX(evt) - this.selectRectLeft) + "px"; this.selectRect.style.height = Math.abs(this.pageY(evt) - this.selectRectTop) + "px"; if (this.pageX(evt) < this.selectRectLeft) { this.selectRect.style.left = this.pageX(evt); } if (this.pageY(evt) < this.selectRectTop) { this.selectRect.style.top = this.pageY(evt); } jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); } } else { if (this.movestarted) { jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.drag); this.lastMoveX = this.moveX; this.lastMoveY = this.moveY; this.lastMoveTime = new Date(); this.moveX = (this.pageX(evt)) / this.factor / this.sc + this.startMoveX; this.moveY = (this.pageY(evt)) / this.factor / this.sc + this.startMoveY; var center = new jsMaps.geo.Location(this.lat, this.lng); this.setCenter2(center, this.position.zoom); this.moveAnimationBlocked = false; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); } } return false; }; this.mouseup = function (evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; // The IE way } jsMaps.Native.setCursor(this.clone,"grab"); this.lastMouseX = this.pageX(evt); this.lastMouseY = this.pageY(evt); if (this.moveMarker) { this.moveMarker = null; } if (this.selectRect) { var p1 = this.XYTolatlng(this.selectRect.offsetLeft, this.selectRect.offsetTop + this.selectRect.offsetHeight); var p2 = this.XYTolatlng(this.selectRect.offsetLeft + this.selectRect.offsetWidth, this.selectRect.offsetTop); var inner_bounds = new jsMaps.Native.InnerBounds(p1, p2); this.setBounds(inner_bounds); this.selectRect.parentNode.removeChild(this.selectRect); this.selectRect = null; } // using this normalize some things are working better, others not so good. // delete it will solve some problems but bring other problems var now = new Date(evt.timeStamp); var timeDelta = now - this.lastMoveTime; if (this.wheelSpeedConfig["moveAnimateDesktop"] && timeDelta != 0) { if (this.movestarted) { if (this.moveAnimationBlocked == false) { var speedX = (this.lastMoveX - this.moveX) / timeDelta; var speedY = (this.lastMoveY - this.moveY) / timeDelta; var maxSpeed = 200; if (speedX > maxSpeed)speedX = maxSpeed; if (speedY > maxSpeed)speedY = maxSpeed; if (speedX < -maxSpeed)speedX = -maxSpeed; if (speedY < -maxSpeed)speedY = -maxSpeed; if (Math.abs(speedX) > this.wheelSpeedConfig["animateMinSpeed"] || Math.abs(speedY) > this.wheelSpeedConfig["animateMinSpeed"]) { this.animateMove(speedX, speedY); } } } } var that = this; var tempFunction = function () { if (that.movestarted) { jsMaps.Native.Event.trigger(that.mapParent,jsMaps.api.supported_events.dragend); jsMaps.Native.Event.trigger(that.mapParent,jsMaps.api.supported_events.idle); } that.movestarted = false; }; setTimeout(tempFunction, 1); }; this.startZoomTime = null; this.wheeling = false; this.zoomActive = true; /** * Mouse wheel * * @param evt */ this.mousewheel = function (evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; // The IE way } this.mapParent.focus(); this.zoomActive = true; if (!evt) evt = window.event; if (evt.wheelDelta) { /* IE/Opera/Chrom. */ delta = evt.wheelDelta / 120; } else if (evt.detail) { /** Mozilla case. */ delta = -evt.detail / 3; if (this.lastWheelDelta * delta < 0) { if (!this.wheelSpeedConfig["digizoom"]) { delta = 0; } } this.lastWheelDelta = -evt.detail / 3; } var direction = (delta < 0) ? -1: 1; var that = this; this.wheeling = true; if (this.wheelSpeedConfig["digizoom"]) { this.discretZoom(direction, this.pageX(evt), this.pageY(evt)); return; } if (!this.startZoomTime) { this.startZoomTime = (new Date()); this.startZoomTime2 = (new Date()); this.oldZoom = this.zoom(); this.speed = 1; } var delta = (new Date()) - this.startZoomTime; var delta2 = (new Date()) - this.startZoomTime2; var tempFunc = function () { that.startZoomTime = new Date(); }; this.startZoomTime = new Date(); if (delta > 300) { this.startZoomTime2 = new Date(); this.oldZoom = this.zoom(); this.speed = 1; delta2 = 0.1; } this.speed = this.speed * 2; if (this.speed > 5) this.speed = 5; var zoom = this.oldZoom + delta2 / 3000 * this.speed * direction; if (zoom > this.position.maxZoom) zoom = this.position.maxZoom; if (zoom < this.position.minZoom) zoom = this.position.minZoom; this.scaleDivExec(); this.centerAndZoomXY(this.center(), zoom, this.pageX(evt), this.pageY(evt)); }; this.zoomTimeouts = []; this.discretZoomBlocked = false; this.discretZoom = function (direction, x, y) { var that = this; if (this.discretZoomBlocked) return; var func = function () { that.discretZoomBlocked = false; }; this.zoomActive = true; setTimeout(func, 400); this.discretZoomBlocked = true; var steps = 20; for (var i = 1; i <= 20; i++) { if (this.zoomTimeouts[i]) { clearTimeout(this.zoomTimeouts[i]); } } var start = this.zoom(); var end = (direction == 1) ? Math.ceil(this.zoom() + 0.9): Math.floor(this.zoom() - 0.9); if (direction == -1) { for (q in this.layers) { if (this.layers.hasOwnProperty(q) == false) continue; if (q > start &&typeof this.layers[q]!='undefined') { this.map.removeChild(this.layers[q]['layerDiv']); this.layers[q] = false; delete this.layers[q]; } } } var delta = Math.abs(start - end); this.scaleDivExec(); var lastDZ = 0; for (var ii = 1; ii <= steps; ii++) { var rad = ii / steps * Math.PI / 2; var dz = direction * (Math.sin(rad)) * delta; var ddz = dz - lastDZ; this.zoomTimeouts[i] = this.discretZoomExec(x, y, ddz, ii, steps); lastDZ = dz; } if (end >= this.tileSource.minzoom && end <= this.tileSource.maxzoom) { jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.zoom_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); } }; this.discretZoomExec = function (x, y, dz, i, steps) { var that = this; var tempFunc = function () { var zoom = that.zoom() + dz; if (i == steps) zoom = Math.round(zoom); that.centerAndZoomXY(that.center(), zoom, x, y); }; return setTimeout(tempFunc, i*15); }; /** * Map continues moving after mouse up * * @param speedX * @param speedY * @param faktor */ this.animateMove = function (speedX, speedY, faktor) { if (jsMaps.Native.Browser.ie && jsMaps.Native.Utils.TRANSFORM == false) return; if (typeof faktor == 'undefined') faktor=Math.pow(2,this.zoom()); clearTimeout(this.animateMoveTimeout); if (Math.abs(speedX) < this.wheelSpeedConfig["animateMinSpeed"]/faktor && Math.abs(speedY) < this.wheelSpeedConfig["animateMinSpeed"]/faktor){ this.moving=false; this.setCenter2(this.position.center, this.position.zoom); return; } var framesPerSecond=50; this.moveX += -speedX; this.moveY += -speedY; that = this; var speed=Math.sqrt(Math.pow(speedX,2) + Math.pow(speedY,2)); var fx=speedX/speed; var fy=speedY/speed; var tempFunction = function() { var newSpeedX=speedX - fx*that.wheelSpeedConfig["moveAnimationSlowdown"]/faktor; var newSpeedY=speedY - fy*that.wheelSpeedConfig["moveAnimationSlowdown"]/faktor; that.animateMove(newSpeedX,newSpeedY,faktor); }; this.animateMoveTimeout = window.setTimeout(tempFunction, 1/framesPerSecond * 1000); this.setCenter2(this.position.center, this.position.zoom); }; //*** zoom animation *** this.autoZoomInTimeout = null; this.autoZoomStartTime = null; this.autoZoomIn = function (x, y, z) { if (this.autoZoomInTimeout) window.clearTimeout(this.autoZoomInTimeout); var stepwidth = 0.20; if (z < 0) { stepwidth = -stepwidth } var zoomGap = Math.abs(z) <= Math.abs(stepwidth); //this.hideOverlays(); var dzoom = stepwidth; var zoom = this.position.zoom + dzoom; zoom = Math.round(zoom * 1000) / 1000; if (zoomGap) { if (z < 0) { zoom = Math.floor(zoom); } else { zoom = Math.ceil(zoom - 0.2); } dzoom = zoom - this.position.zoom; } factor = Math.pow(2, zoom); var zoomCenterDeltaX = (x - this.mapLeft) - this.width / 2; var zoomCenterDeltaY = (y - this.mapTop) - this.height / 2; var f = Math.pow(2, dzoom); var dx = zoomCenterDeltaX - zoomCenterDeltaX * f; var dy = zoomCenterDeltaY - zoomCenterDeltaY * f; var that = this; var now = new Date().getMilliseconds(); var timeDelta = (this.autoZoomStartTime) ? now - this.autoZoomStartTime: 0; this.autoZoomStartTime = now; var tempFunction; if (timeDelta < 100 || zoomGap) { if (zoom >= this.tileSource.minzoom && zoom <= this.tileSource.maxzoom) { this.moveX = this.moveX + dx / factor; this.moveY = this.moveY + dy / factor; } var center = new jsMaps.geo.Location(this.lat, this.lng); if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; tempFunction = function () { that.setCenter2(center, zoom); }; setTimeout(tempFunction, 1); } var newz = z - dzoom; if (!zoomGap) { tempFunction = function () { that.autoZoomIn(x, y, newz); }; this.autoZoomInTimeout = window.setTimeout(tempFunction, 40); } }; /** * same as centerAndZoom but zoom center is not map center * * @param center * @param zoom * @param x * @param y */ this.centerAndZoomXY = function (center, zoom, x, y) { var factor = Math.pow(2, zoom); var zoomCenterDeltaX = x - this.mapsize.width / 2; var zoomCenterDeltaY = y - this.mapsize.height / 2; var dzoom = zoom - this.zoom(); var f = Math.pow(2, dzoom); var dx = zoomCenterDeltaX - zoomCenterDeltaX * f; var dy = zoomCenterDeltaY - zoomCenterDeltaY * f; if (zoom >= this.tileSource.minzoom && zoom <= this.tileSource.maxzoom) { this.moveX = this.moveX + dx / factor; this.moveY = this.moveY + dy / factor; } var center = new jsMaps.geo.Location(this.lat, this.lng); if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; this.setCenter2(center, zoom); }; /** * Set the map coordinates and zoom * * @param center * @param zoom */ this.centerAndZoom = function (center, zoom) { this.moveX = 0; this.moveY = 0; if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; this.record(); this.setCenterNoLog(center, zoom); this.scaleDivExec(); }; /** * @param center * @param zoom */ this.setCenter3 = function (center, zoom) { this.moveX = 0; this.moveY = 0; this.setCenterNoLog(center, zoom); }; /** * same as setCenter but moveX,moveY are not reset (for internal use) * * @param center * @param zoom */ this.setCenter2 = function (center, zoom) { this.record(); this.setCenterNoLog(center, zoom); }; /** * same as setCenter but no history item is generated (for undo, redo) * * @param center * @param zoom */ this.setCenterNoLog = function (center, zoom) { this.position.center = center; this.lat = center.lat; this.lng = center.lng; zoom = parseFloat(zoom); if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; this.position.zoom = zoom; this.layer(this.map, this.lat, this.lng, this.moveX, this.moveY, zoom); this.executeCallbackFunctions(); }; /** * read the map center (no zoom value) * * @param center * @returns {*} */ this.center = function (center) { if (center) { //this.position.center=center; this.centerAndZoom(center, this.getZoom()); } if (this.moveX != 0 || this.moveY != 0) { return new jsMaps.geo.Location(this.movedLat, this.movedLng); } return this.position.center; }; this.zoom = function (zoom) { if (zoom) { this.centerAndZoom(this.position.center, zoom); } return this.position.zoom; }; this.moveXY = function (x, y) { this.moveX = parseFloat(x) / this.factor / this.sc + this.moveDelayedX; this.moveY = parseFloat(y) / this.factor / this.sc + this.moveDelayedY; this.setCenter2(this.center(), this.zoom()); }; this.tiles = function (tileSource) { this.clearMap(); this.tileSource = tileSource; }; this.tileOverlays = []; this.addTilesOverlay = function (t) { this.tileOverlays.push(t); var ov = this.tileOverlays[this.tileOverlays.length - 1]; this.clearMap(); return ov; }; this.removeTilesOverlay = function (ov) { //alert(this.tileOverlays.length); for (var i = 0; i < this.tileOverlays.length; i++) { var overlay = this.tileOverlays[i]; if (ov == overlay) { //ov.clear(); this.tileOverlays.splice(i, 1); break; } } this.clearMap(); }; this.getCenter = function () { var center; if (this.moveX != 0 || this.moveY != 0) { center = new jsMaps.geo.Location(this.movedLat, this.movedLng); } else { if (!this.position.center) { } else { center = this.position.center; } } return center; }; /** * read bounds. The Coordinates at corners of the map div sw, ne would be better (change it!) * * @param b * @returns {*} */ this.mapBounds = function (b) { if (b) { this.setBounds(b); } else { return this.getBounds(); } }; this.getBounds = function () { var sw = this.XYTolatlng(0, this.height); var ne = this.XYTolatlng(this.width, 0); return new jsMaps.Native.InnerBounds(sw, ne); }; /** * like setCenter but with two gps points * * @param b */ this.setBounds = function (b) { //this.normalize(); //the setbounds should be a mathematical formula and not guessing around. //if you know this formula pease add it here. //this.getSize(); var p1 = b.sw(); var p2 = b.ne(); var minlat = p1.lat; var maxlat = p2.lat; var minlng = p1.lng; var maxlng = p2.lng; var minlat360 = lat2y(minlat); var maxlat360 = lat2y(maxlat); var centerLng = (minlng + maxlng) / 2; var centerLat360 = (minlat360 + maxlat360) / 2; var centerLat = y2lat(centerLat360); var center = new jsMaps.geo.Location(centerLat, centerLng); var extendY = Math.abs(maxlat360 - minlat360); var extendX = Math.abs(maxlng - minlng); var extend, screensize; if (extendX / this.width > extendY / this.height) { extend = extendX; screensize = this.width; } else { extend = extendY; screensize = this.height; } var scalarZoom = 360 / extend; var screenfactor = 512 / screensize; var zoom = (Math.log(scalarZoom / screenfactor)) / (Math.log(2)) + 1; if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; if (this.position.center) { if (this.wheelSpeedConfig["rectShiftAnimate"]) { this.animatedGoto(center, zoom, this.wheelSpeedConfig["rectShiftAnimationTime"]); } else { this.centerAndZoom(center, zoom); } } else { this.centerAndZoom(center, zoom); } }; this.animatedGotoStep = null; this.animatedGotoTimeout = []; this.animatedGoto = function (newCenter, newZoom, time) { //this.hideOverlays(); var zoomSteps = time / 10; var oldCenter = this.getCenter(); var newLat = newCenter.lat; var newLng = newCenter.lng; var oldLat = oldCenter.lat; var oldLng = oldCenter.lng; var oldZoom = this.getZoom(); var dLat = (newLat - oldLat) / zoomSteps; var dLng = (newLng - oldLng) / zoomSteps; var dZoom = (newZoom - oldZoom) / zoomSteps; var dMoveX = this.moveX / zoomSteps; var dMoveY = this.moveY / zoomSteps; var oldMoveX = this.moveX; var oldMoveY = this.moveY; this.animatedGotoStep = 0; var that = this; while (timeout = this.animatedGotoTimeout.pop()) { clearTimeout(timeout); } for (var i = 0; i < zoomSteps; i++) { var tempFunction = function () { that.animatedGotoExec(oldLat, oldLng, oldZoom, dLat, dLng, dZoom, oldMoveX, oldMoveY, dMoveX, dMoveY) }; this.animatedGotoTimeout[i] = window.setTimeout(tempFunction, 10 * i); } }; this.animatedGotoExec = function (oldLat, oldLng, oldZoom, dLat, dLng, dZoom, oldMoveX, oldMoveY, dMoveX, dMoveY) { this.moveX = -dMoveX; this.moveY = -dMoveY; var lat = oldLat + dLat * this.animatedGotoStep; var lng = oldLng + dLng * this.animatedGotoStep; var zoom = oldZoom + dZoom * this.animatedGotoStep; this.animatedGotoStep++; this.centerAndZoom(new jsMaps.geo.Location(lat, lng), zoom); }; this.getZoom = function () { return this.position.zoom; }; this.getIntZoom = function () { return this.intZoom; }; /** * WGS84 to x,y at the div calculation * * @param point * @returns {Array} */ this.latlngToXY = function (point) { var lat = point.lat; var lng = point.lng; if(lat >90) lat =lat -180; if(lat <-90) lat =lat +180; var worldCenter = this.getCenter(); var intZoom = this.getIntZoom(); var tileCenter = getTileNumber(worldCenter.lat, worldCenter.lng, intZoom); var tileTest = getTileNumber(lat, lng, intZoom); var x = (tileCenter[0] - tileTest[0]) * this.tileW * this.sc - this.width / 2; var y = (tileCenter[1] - tileTest[1]) * this.tileW * this.sc - this.height / 2; point = []; point["x"] = -x; point["y"] = -y; return (point); }; /** * screen (map div) coordinates to lat,lng * * @param x * @param y * @returns {jsMaps.geo.Location} * @constructor */ this.XYTolatlng = function (x, y) { var center = this.getCenter(); if (!center) return; var factor = Math.pow(2, this.intZoom); var centerLat = center.lat; var centerLng = center.lng; var xypoint = getTileNumber(centerLat, centerLng, this.intZoom); var dx = x - this.width / 2; var dy = y - this.height / 2; //das style var lng = (xypoint[0] + dx / this.tileW / this.sc) / factor * 360 - 180; var lat360 = (xypoint[1] + dy / this.tileH / this.sc) / factor * 360 - 180; var lat = -y2lat(lat360); if(lat >90) lat =lat -180; if(lat <-90) lat =lat +180; return new jsMaps.geo.Location(lat, lng); }; this.mouseToLatLng = function (evt) { var x = this.pageX(evt); var y = this.pageY(evt); return this.XYTolatlng(x, y); }; /** * for iPhone to make page fullscreen (maybe not working) */ this.reSize = function () { var that = this; //setTimeout("window.scrollTo(0,1)",500); var tempFunction = function () { that.getSize(that) }; window.setTimeout(tempFunction, 1050); }; /** * read the size of the DIV that will contain the map */ this.getSize = function () { this.width = this.map.parentNode.offsetWidth; this.height = this.map.parentNode.offsetHeight; var obj = this.map; var left = 0; var top = 0; do { left += obj.offsetLeft; top += obj.offsetTop; obj = obj.offsetParent; } while (obj.offsetParent); this.mapTop = top; this.mapLeft = left; }; //for undo,redo this.recordArray = []; this.record = function () { var center = this.getCenter(); if (center) { var lat = center.lat; var lng = center.lng; var zoom = this.getZoom(); var item = [lat, lng, zoom]; this.recordArray.push(item); } }; this.play = function (i) { if (i < 1) return; if (i > (this.recordArray.length - 1)) return; var item = this.recordArray[i]; var center = new jsMaps.geo.Location(item[0], item[1]); //undo,redo must not generate history items this.moveX = 0; this.moveY = 0; this.setCenter3(center, item[2]); }; this.myTimer = function(interval) { /** * reset/stop countdown. */ this.reset = function() { this._isRunning = false; }; /** * Is countdown running? */ this.isTimeRunning = function() { if(false == this._isRunning) return false; var now = new Date(); if(this.time + this.myInterval > now.getTime()) return false; this._isRunning = false; return true; }; /** * Start countdown. */ this.start = function() { this._isRunning = true; var d = new Date(); this.time = d.getTime(); }; /** * Setter/getter */ this.interval = function(arg1) { if(typeof(arg1) != "undefined") { this.myInterval = parseInt(arg1); } return this.myInterval; }; // Constructor this.reset(); this.interval(interval); return this; }; /*================== LAYERMANAGER (which layer is visible) ===================== Description: This method desides witch zoom layer is visible at the moment. It has the same parameters as the "draw" method, but no "intZoom". This Layers are NOT tile or vector overlays ========================================================================= */ this.layerDrawLastFrame = null; this.doTheOverlays = true; this.finalDraw = false; this.layerOldZoom = 0; this.moveDelayedX = 0; this.moveDelayedY = 0; this.layerTimer = new this.myTimer(400); this.layer = function (map, lat, lng, moveX, moveY, zoom) { var delta = (new Date()) - this.startZoomTime; this.stopRenderOverlays(); if (!zoom) zoom = this.getZoom(); var intZoom = (this.wheelSpeedConfig["digizoom"]) ? Math.floor(zoom): Math.round(zoom); if (this.layerDrawLastFrame) { window.clearTimeout(this.layerDrawLastFrame); this.layerDrawLastFrame = null; } var that = this; var tempFunction; if (this.layerTimer.isTimeRunning() || this.finalDraw == false ) { //the last frames must be drawn to have good result tempFunction = function () { if(intZoom == that.visibleZoom){ that.finalDraw = true; } that.layer(map, lat, lng, moveX, moveY, zoom); }; this.visibleZoom=parseInt(this.visibleZoom); if(!that.finalDraw){ this.layerDrawLastFrame = window.setTimeout(tempFunction, 250); } if (this.layerTimer.isTimeRunning()) { this.moveDelayedX = moveX; //used in method moveXY this.moveDelayedY = moveY; return; } } this.layerTimer.start(); this.moveDelayedX = 0; this.moveDelayedY = 0; for (var i = 0; i < 22; i++) { if (this.layers[i]) this.layers[i]["layerDiv"].style.visibility = "hidden"; } if (this.layerOldZoom > zoom && !this.finalDraw) { if (this.layers[intZoom] && !this.layers[intZoom]["loadComplete"]) { this.visibleZoom = intZoom + 1; } } this.intZoom = intZoom; if (intZoom > this.tileSource.maxzoom) intZoom = this.tileSource.maxzoom; if (!this.visibleZoom) { this.visibleZoom = intZoom; this.oldIntZoom = intZoom; } this.factor = Math.pow(2, intZoom); var zoomDelta = zoom - intZoom; this.sc = Math.pow(2, zoomDelta); // Calculate the next displayed layer this.loadingZoomLevel = intZoom; if (this.visibleZoom < intZoom) { if (Math.abs(this.visibleZoom - intZoom) < 4) { this.loadingZoomLevel = parseInt(this.visibleZoom) + 1; } } // draw the layer with current zoomlevel this.draw(this.map, lat, lng, moveX, moveY, this.loadingZoomLevel, zoom, this.tileSource.src); this.layers[this.loadingZoomLevel]["layerDiv"].style.visibility = ""; //if the current zoomlevel is not loaded completly, there must be a second layer displayed if (intZoom != this.visibleZoom && typeof this.layers[this.visibleZoom] != 'undefined') { if (this.visibleZoom < intZoom + 2) { this.draw(this.map, lat, lng, moveX, moveY, this.visibleZoom, zoom, this.tileSource.src,true); this.layers[this.visibleZoom]["layerDiv"].style.visibility = ""; } else { this.layers[this.visibleZoom]["layerDiv"].style.visibility = "hidden"; } } // pre Load for zoom out if (intZoom == this.visibleZoom && typeof this.layers[this.visibleZoom - 1] != 'undefined') { this.draw(this.map, lat, lng, moveX, moveY, this.visibleZoom - 1, zoom, this.tileSource.src,true); this.layers[this.visibleZoom - 1]["layerDiv"].style.visibility = "hidden"; } if (this.layers[this.loadingZoomLevel]["loadComplete"]) { if (this.visibleZoom != intZoom) { this.layers[this.loadingZoomLevel]["loadComplete"] = false; this.hideLayer(this.visibleZoom); this.visibleZoom = this.loadingZoomLevel; } } if (this.quadtreeTimeout) clearTimeout(this.quadtreeTimeout); if (this.loadingZoomLevel != intZoom) { that = this; tempFunction = function () { that.layer(map, lat, lng, moveX, moveY); }; this.quadtreeTimeout = window.setTimeout(tempFunction, 100); } if (this.oldIntZoom != this.intZoom) { if (this.oldIntZoom != this.visibleZoom) { this.hideLayer(this.oldIntZoom); } } this.oldIntZoom = intZoom; if (this.doTheOverlays || this.finalDraw || this.layerOldZoom == this.zoom()) { var startTime = new Date(); this.lastDX = this.moveX; this.lastDY = this.moveY; this.renderOverlays(); this.layerOldZoom = this.zoom(); var duration = (new Date() - startTime); this.doTheOverlays = !(duration > 10 && !this.finalDraw); } else { this.hideOverlays(); } that = this; var func = function () { that.blocked = false; that.layerTimer.reset();}; if (this.layerBlockTimeout) clearTimeout(this.layerBlockTimeout); this.layerBlockTimeout = window.setTimeout(func, 20); this.finalDraw = false; }; /* ==================== DRAW (speed optimized!!!)=============================== This function draws one layer. It is highly opimized for iPhone. Please DO NOT CHANGE things except you want to increase speed! For opimization you need a benchmark test. How it works: The position of the images is fixed. The layer (not the images) is moved because of better performance Even zooming does not change position of the images, if 3D CSS is active (webkit). this method uses "this.layers" , "this.oldIntZoom", "this.width", "this.height"; ===================================================================================*/ this.draw = function (map, lat, lng, moveX, moveY, intZoom, zoom, tileFunc,preLoad) { this.framesCounter++; var that = this; var tempFunction = function () { that.framesCounter-- }; if (typeof preLoad == 'undefined') preLoad = false; window.setTimeout(tempFunction, 1000); var factor = Math.pow(2, intZoom); var latDelta,lngDelta,layerDiv; //create new layer if (!this.layers[intZoom]) { var tile = getTileNumber(lat, lng, intZoom); this.layers[intZoom] = []; this.layers[intZoom]["startTileX"] = tile[0]; this.layers[intZoom]["startTileY"] = tile[1]; this.layers[intZoom]["startLat"] = lat2y(lat); this.layers[intZoom]["startLng"] = lng; this.layers[intZoom]["images"] = {}; layerDiv = document.createElement("div"); layerDiv.setAttribute("zoomlevel", intZoom); layerDiv.style.position = "relative"; //higher zoomlevels are places in front of lower zoomleves. //no z-index in use. z-index could give unwanted side effects to you application if you use this lib. var layers = map.childNodes; var appended = false; for (var i = layers.length - 1; i >= 0; i--) { var l = layers.item(i); if (l.getAttribute("zoomlevel") < intZoom) { this.map.insertBefore(layerDiv, l); appended = true; //break; } } if (!appended) this.map.appendChild(layerDiv); //for faster access, a referenz to this div is in an array this.layers[intZoom]["layerDiv"] = layerDiv; latDelta = 0; lngDelta = 0; } else { //The layer with this zoomlevel already exists. If there are new lat,lng value, the lat,lng Delta is calculated layerDiv = this.layers[intZoom]["layerDiv"]; latDelta = lat2y(lat) - this.layers[intZoom]["startLat"]; lngDelta = lng - this.layers[intZoom]["startLng"]; } layerDiv.style.visibility = "hidden"; layerDiv.style.opacity = 1; //if the map is moved with drag/drop, the moveX,moveY gives the movement in Pixel (not degree as lat/lng) //here the real values of lat, lng are caculated //this.movedLng = (this.layers[intZoom]["startTileX"] / factor - moveX / this.tileW) * 360 - 180 + lngDelta; var ttt = this.latlngToXY(this.getCenter()); this.movedLng = (this.layers[intZoom]["startTileX"] / factor - moveX / this.tileW) * 360 - 180 + lngDelta; var movedLat360 = (this.layers[intZoom]["startTileY"] / factor - moveY / this.tileH) * 360 - 180 - latDelta; this.movedLat = -y2lat(movedLat360); // -latDelta; //the bug // calculate real x,y tile = getTileNumber(this.movedLat, this.movedLng, intZoom); var x = tile[0]; var y = tile[1]; var intX = Math.floor(x); var intY = Math.floor(y); var startX = this.layers[intZoom]["startTileX"]; var startY = this.layers[intZoom]["startTileY"]; var startIntX = Math.floor(startX); var startIntY = Math.floor(startY); var startDeltaX = -startX + startIntX; var startDeltaY = -startY + startIntY; var dx = x - startX; var dy = y - startY; var dxDelta = dx - startDeltaX; var dyDelta = dy - startDeltaY; //set all images to hidden (only in Array) - the values are used later in this function for (var vimg in this.layers[intZoom]["images"]) { this.layers[intZoom]["images"][vimg]["visibility"] = false; } //for debug only var width = this.width; var height = this.height; var zoomDelta = zoom - intZoom; sc = Math.pow(2, zoomDelta); if (sc < 0.5) sc = 0.5; //here the bounds of the map are calculated. //there is NO preload of images. Preload makes everything slow var minX = Math.floor((-width / 2 / sc) / this.tileW + dxDelta); var maxX = Math.ceil((width / 2 / sc) / this.tileW + dxDelta); var minY = Math.floor((-height / 2 / sc) / this.tileH + dyDelta); var maxY = Math.ceil((height / 2 / sc) / this.tileH + dyDelta); var minsc; //now the images are placed on to the layer for (var i = minX; i < maxX; i++) { for (var j = minY; j < maxY; j++) { var xxx = Math.floor(startX + i); var yyy = Math.floor(startY + j); // The world is recursive. West of America is Asia. var xx = xxx % factor; var yy = yyy; if (xx < 0) xx = xx + factor; // modulo function gives negative value for negative numbers if (yy < 0) continue; if (yy >= factor) continue; var src = tileFunc(xx, yy, intZoom); var id = src + "-" + xxx + "-" + yyy; //if zoom out, without this too much images are loaded if (this.wheelSpeedConfig["digizoom"]) { minsc=1; }else{ minsc=0.5; } // draw images only if they don't exist on the layer if (this.layers[intZoom]["images"][id] == null && sc >=minsc) { var img = document.createElement("img"); img.style.visibility = "hidden"; if ((this.discretZoomBlocked == true || this.zoomActive == true || this.wheeling == true || this.movestarted == false)) { jsMaps.Native.Dom.addClass(img,'map-image no-anim'); } else { jsMaps.Native.Dom.addClass(img,'map-image'); if (jsMaps.Native.Browser.any3d && jsMaps.Native.Utils.TRANSITION != false) { var fn = function (evt) { jsMaps.Native.Event.preventDefault(evt); jsMaps.Native.Event.stopPropagation(evt); jsMaps.Native.Dom.addClass(this,'no-anim'); }; img.addEventListener(jsMaps.Native.Utils.TRANSITION_END, fn, false); } } img.style.left = i * this.tileW + "px"; img.style.top = j * this.tileH + "px"; img.style.width = this.tileW + "px"; img.style.height = this.tileH + "px"; // add img before SVG, SVG will be visible if (layerDiv.childNodes.length > 0) { layerDiv.insertBefore(img, layerDiv.childNodes.item(0)); } else { layerDiv.appendChild(img); } // To increase performance all references are in an array this.layers[intZoom]["images"][id] = {}; this.layers[intZoom]["images"][id]["img"] = img; this.layers[intZoom]["images"][id]["array"] = []; this.layers[intZoom]["images"][id]["array"].push(img); this.layers[intZoom]["loadComplete"] = false; //tileOverlays for (var ov in this.tileOverlays) { if (this.tileOverlays.hasOwnProperty(ov) == false) continue; var ovObj = this.tileOverlays[ov]; var ovImg = img.cloneNode(true); var imgSrc = ovObj.src(xx, yy, intZoom); var ovId = id + "_" + ov; jsMaps.Native.Event.attach(ovImg, "load", this.imgLoaded, this, false); if (this.discretZoomBlocked == true) { jsMaps.Native.Dom.addClass(ovImg,'map-image no-anim'); } else { jsMaps.Native.Dom.addClass(ovImg,'map-image'); } ovImg.setAttribute("src", imgSrc); ovImg.setAttribute("overlay", ov); layerDiv.appendChild(ovImg); this.layers[intZoom]["images"][id]["array"].push(ovImg); } // if the images are loaded, they will get visible in the imgLoad function if (preLoad == false) { jsMaps.Native.Event.attach(img, "load", this.imgLoaded, this, false); jsMaps.Native.Event.attach(img, "error", this.imgError, this, false); } if (preLoad == false) { img.setAttribute("src", src); } else { img.setAttribute("src", "data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOwA="); img.setAttribute("data-src", src); } } else if (this.layers[intZoom]["images"][id] != null && sc >=minsc) { var Img = this.layers[intZoom]["images"][id]["img"]; if (Img.getAttribute('data-src')!=null && Img.getAttribute('data-src') != '' && preLoad == false) { jsMaps.Native.Event.attach(Img, "load", this.imgLoaded, this, false); jsMaps.Native.Event.attach(Img, "error", this.imgError, this, false); Img.setAttribute('src',Img.getAttribute('data-src')); Img.setAttribute('data-src',''); } } // set all images that should be visible at the current view to visible (only in the layer); if(this.layers[intZoom]["images"][id]){ this.layers[intZoom]["images"][id]["visibility"] = true; } } } // remove all images that are not loaded and are not visible in current view. // if the images is out of the current view, there is no reason to load it. // Think about fast moving maps. Moving is faster than loading. // If you started in London and are already in Peking, you don't care // about images that show vienna for example // this code is useless for webkit browsers (march 2010) because of bug: // https://bugs.webkit.org/show_bug.cgi?id=6656 for (var vImg in this.layers[intZoom]["images"]) { if (this.layers[intZoom]["images"].hasOwnProperty(vImg) == false) continue; var overlayImages; var o; if (this.layers[intZoom]["images"][vImg]['img'].getAttribute('data-src')!=null && this.layers[intZoom]["images"][vImg]['img'].getAttribute('data-src') != '' && preLoad == false) { jsMaps.Native.Event.attach(this.layers[intZoom]["images"][vImg]['img'], "load", this.imgLoaded, this, false); jsMaps.Native.Event.attach(this.layers[intZoom]["images"][vImg]['img'], "error", this.imgError, this, false); this.layers[intZoom]["images"][vImg]['img'].setAttribute('src',this.layers[intZoom]["images"][vImg]['img'].getAttribute('data-src')); this.layers[intZoom]["images"][vImg]['img'].setAttribute('data-src',''); } if (this.layers[intZoom]["images"][vImg]["visibility"]) { if (this.layers[intZoom]["images"][vImg]["array"][0].getAttribute("loaded") == "yes") { overlayImages = this.layers[intZoom]["images"][vImg]["array"]; for (o = 0; o < overlayImages.length; o++) { if (overlayImages[o].getAttribute("loaded") == "yes") { overlayImages[o].style.visibility = ""; } } } } else { overlayImages = this.layers[intZoom]["images"][vImg]["array"]; for (o = 0; o < overlayImages.length; o++) { this.layers[intZoom]["images"][vImg]["array"][o].style.visibility = "hidden"; // delete img if not loaded and not needed at the moment if (this.layers[intZoom]["images"][vImg]["array"][o].getAttribute("loaded") != "yes") { layerDiv.removeChild(this.layers[intZoom]["images"][vImg]["array"][o]); } } delete this.layers[intZoom]["images"][vImg]["img"]; delete this.layers[intZoom]["images"][vImg]; } } // Move and zoom the layer var sc = Math.pow(2, zoom - intZoom); this.scale = sc; var dxLeft = -(dxDelta * this.tileW); var dxTop = -(dyDelta * this.tileH); jsMaps.Native.Utils.setTransform(layerDiv,{x:dxLeft,y:dxTop},sc); jsMaps.Native.Utils.setTransformOrigin(layerDiv,{x:(-1 * dxLeft) ,y:(-1 * dxTop)}); // Set the visibleZoom to visible layerDiv.style.visibility = ""; // Not needed images are removed now. Lets check if all needed images are loaded already var notLoaded = 0; var total = 0; for (var q in this.layers[this.loadingZoomLevel]["images"]) { if (this.layers[this.loadingZoomLevel]["images"].hasOwnProperty(q) == false) continue; total++; var imgCheck = this.layers[this.loadingZoomLevel]["images"][q]["array"][0]; if (!(imgCheck.getAttribute("loaded") == "yes")) notLoaded++; } if (notLoaded < 1) this.layers[this.loadingZoomLevel]["loadComplete"] = true; if (this.loadingZoomLevel == intZoom) this.imgLoadInfo(total, notLoaded); }; // ====== END OF DRAW ====== /** * fade effect for int zoom change * @type {null} */ this.fadeOutTimeout = null; this.fadeOut = function (div, alpha) { if (jsMaps.Native.Browser.ielt9) return; if (alpha > 0 && jsMaps.Native.Browser.any3d && jsMaps.Native.Utils.TRANSITION != false) { div.style[jsMaps.Native.Utils.TRANSITION] = 'opacity 500ms ease-out'; div.style.opacity = 0; var fn = function (evt) { jsMaps.Native.Event.preventDefault(evt); jsMaps.Native.Event.stopPropagation(evt); div.style[jsMaps.Native.Utils.TRANSITION] = ""; }; div.addEventListener(jsMaps.Native.Utils.TRANSITION_END, fn, false); return; } if (this.fadeOutTimeout) { clearTimeout(this.fadeOutTimeout); } if (alpha > 0) { div.style.opacity = alpha; var that = this; var tempFunction = function () { that.fadeOut(div, alpha - 0.2); }; this.fadeOutTimeout = setTimeout(tempFunction, 50); } else { div.style.visibility = "hidden"; } }; /** * this function trys to remove images if they are not needed at the moment. * For webkit it's a bit useless because of bug * * https://bugs.webkit.org/show_bug.cgi?id=6656 * For Firefox it really brings speed * * @param zoomlevel */ this.hideLayer = function (zoomlevel) { if (this.intZoom != zoomlevel) { if (this.layers[zoomlevel]) { this.layers[zoomlevel]["layerDiv"].style.opacity = 1; this.fadeOut(this.layers[zoomlevel]["layerDiv"], 1); } } if (!this.layers[zoomlevel]) { return; } for (var vImg in this.layers[zoomlevel]["images"]) { if (this.layers[zoomlevel]["images"].hasOwnProperty(vImg) == false) continue; if (typeof this.layers[zoomlevel]["images"][vImg] == 'undefined' || this.layers[zoomlevel]["images"][vImg] == false) continue; if (typeof this.layers[zoomlevel]["images"][vImg]["img"] == 'undefined' || this.layers[zoomlevel]["images"][vImg]["img"] == false) continue; if (this.layers[zoomlevel]["images"][vImg]["img"].getAttribute("loaded") == "yes") continue; if (zoomlevel != this.intZoom) { var overlayImages = this.layers[zoomlevel]["images"][vImg]["array"]; for (var o = 0; o < overlayImages.length; o++) { this.layers[zoomlevel]["layerDiv"].removeChild(this.layers[zoomlevel]["images"][vImg]["array"][o]); } delete this.layers[zoomlevel]["images"][vImg]["img"]; delete this.layers[zoomlevel]["images"][vImg]; } } }; /** * handling images of tile overlays * * @param evt */ this.ovImgLoaded = function (evt) { var img = (evt.target) ? evt.target: evt.srcElement; img.style.visibility = ""; }; /** * method is called if an image has finished loading (onload event) * * @param evt */ this.imgLoaded = function (evt) { var img = evt.target || evt.srcElement; var loadComplete = true; img.style.visibility = ""; img.setAttribute("loaded", "yes"); if (!img.parentNode) return; img.style.opacity = 1; var notLoaded = 0; var total = 0; var zoomLevel = img.parentNode.getAttribute("zoomlevel"); for (var i = 0; i < img.parentNode.getElementsByTagName("img").length; i++) { var theImg = img.parentNode.getElementsByTagName("img").item(i); if (theImg.getAttribute("overlay")) continue; total++; if (theImg.getAttribute("loaded") != "yes") { notLoaded++; loadComplete = false; } } if (notLoaded < total && jsMaps.Native.Browser.ielt9 && this.layers[this.getIntZoom()]) { this.layers[this.getIntZoom()]["layerDiv"].style.display = ""; } var center; if (this.loadingZoomLevel == zoomLevel) { this.imgLoadInfo(total, notLoaded); if (this.wheeling == true) { if (zoomLevel >= this.tileSource.minzoom && zoomLevel <= this.tileSource.maxzoom) { jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.zoom_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); center = this.getCenter(); this.moveX = 0; this.moveY = 0; this.position.center = center; this.lat = center.lat; this.lng = center.lng; this.position.zoom = this.getZoom(); //this.centerAndZoom(this.getCenter(),this.getZoom()); } this.wheeling = false; } } if (typeof this.layers[zoomLevel]!='undefined') { this.layers[zoomLevel]["loadComplete"] = loadComplete; } if (loadComplete) { if (this.zoomActive == true) { jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.idle); center = this.getCenter(); this.moveX = 0; this.moveY = 0; this.position.center = center; this.lat = center.lat; this.lng = center.lng; this.position.zoom = this.getZoom(); //this.centerAndZoom(this.getCenter(),this.getZoom()); this.zoomActive = false; } if (this.loadingZoomLevel == zoomLevel) { this.hideLayer(this.visibleZoom); if (jsMaps.Native.Browser.ielt9) { this.hideLayer(this.visibleZoom + 1); //no idea why } this.visibleZoom = zoomLevel; } } }; /** * Image load error (there maybe is an IE bug) * * @param evt */ this.imgError = function (evt) { var img = (evt.target) ? evt.target: evt.srcElement; if (!img.parentNode) return; img.setAttribute("src", "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAQAAAAEABAMAAACuXLVVAAAAG1BMVEX19fUAAADW1tY9PT23t7eZmZlbW1t6enoeHh7MsBpLAAAD40lEQVR4nO2YsXPaMBSHhTGGEcUBPBqapB1xuV474lxzXXHv0mTESdN2jHtHyIgvbY4/u5aeZMDUQUCTLr9vQTGR9CHpPT1gDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgGdiMJIv1Xe7dH418FXLGrw1nG+JsXjAb+XzSmcXgZCPVKvGW2ZdUr5gtrdAxPuq5XDDAc4Fl558Ge0tkKRN1Wp47S36Vd1Fey+B+P5QtcJH98n/LBf46O8hEHzR3ZLv3o4Cit0E0jM9UnDO/4cA7+lp+RH3X17A4iwdypbtWXz48gK2x+KxbNU7LE8JLyhQc1k0oxEPWUrhdHw95w83smlNGXv9wXtcN1sWmA5XBY5/pA8/DQXqLuseyFa3yYKxaNxTjvstBbh/Iv7w1gyeyAOyB78yE3DarEoZOOqzuC/ndCenveNY7ofFe2n71PqUrq1uuYCd9WB27PlGAo0Oq1MCjG9ZMhNzqsUPmlLgjSsGctQcJgKhnNpO+8yESkucQ0EWDFFz6Z1uSwrQtrDkwFggnS36b6abJWIZ/lk8snB5lmpbPlXzNIqHvlSgrrJJ3SyvijkD0T8LB2mTIwewuHKqFZNUqUBDXWmGWUWseiJ2y2nJ/VhQI4Hx6iSbBUL9KYIxM0Ccu67Y+sqBPJFLAlwK6PiLC8OVCiT6ICVGp1BEXkM4hzO17QWB4ZKpkUCs5w2bzACxTo7omIiGGtS6GMznKQn46h+jwnClAvnKmwmI7CvOHwtG+cGtBVnqm89XBcJCHJYKpFc9otjj74g9FhEoY1GuepZCvBs/34LtBfKC1USA9jgdUTay5XShJ57tLvCgS3aTKp+mzE54vaN1LJVDdxYYG8ybQ4ueXcgyEuSGON7SW0sCWx9CI+jYZTlA5gJ5JHU2qu8YBXkYGkGBl13Ikewm7PVaO6t5oDju5kRkBKWe7EKOb/Us+qNWSEBn4HTrVGwEJd8sBKgyjWcLgYgE1Ae3i1fLxsvIDLXh/JRqc7F8agsslQnViXCKl+vG69gMdQEHE9KOssm7dCNV2ySgKquwWF6UCliGpRChPm4yoPHF/lWpPkm/kkD8S7xh8+Ko5RVRtM0eqA3vqrQpyiiLZ1NaiWuTwAkXBXayVmKWC9SpoLbvTATUJdugHxpoiIg/fgv4WAn4CX9/F/C1tPpEWX7J3en0mhsthIpuRxU+8gTbgazqtYAV6y8JpgL0zcI1/WqyjnUxGeqmOBBnk89bjnB0sXWXMpetYuoZgAAEIAABK/8tHQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD4F/wBHuGbzfTjEywAAAAASUVORK5CYII="); this.imgLoaded(evt); }; /** * next function is from wiki.openstreetmap.org * * @param lat * @param lon * @param zoom * @returns {*[]} */ var getTileNumber = function (lat, lon, zoom) { var xtile = ((lon + 180) / 360 * (1 << zoom)); var ytile = ((1 - Math.log(Math.tan(lat * Math.PI / 180) + 1 / Math.cos(lat * Math.PI / 180)) / Math.PI) / 2 * (1 << zoom)); return [xtile, ytile]; }; /** * map is positioned absolute and is an a clone of the original map div. * on window resize it must be positioned again * * if there are problems with CSS margin, padding, border,.. this is the place to fix it * * @returns {{top: number, left: number, width: number, height: number, deltaTop: Number, deltaLeft: Number, deltaBottom: Number, deltaRight: Number}} */ this.calculateMapSize = function () { //this method is very slow in 2010 browsers var el = this.mapParent; var size1= el.getBoundingClientRect(); var height = (size1.height) ? size1.height: size1.bottom - size1.top; var width = (size1.width) ? size1.width : size1.right - size1.left; // Make sure that there is no scroll bar when the map takes 100% of the screen if (parseInt(width) === parseInt(document.body.clientWidth) && parseInt(height) === parseInt(document.body.clientHeight)) document.body.style.overflow = "hidden"; return { top : size1.top - document.body.scrollTop, left : size1.left - document.body.scrollLeft, width : width, height : height, deltaTop : size1.top - document.body.scrollTop, deltaLeft : size1.left - document.body.scrollLeft, deltaBottom : size1.bottom, deltaRight : size1.right }; }; this.redraw = function () { this.setMapPosition(); }; this.setMapPosition = function () { this.mapsize = this.calculateMapSize(); this.size = this.calculateMapSize(); var el = this.mapParent; this.mapTop = this.mapsize.top;// + this.mapsize.deltaTop; this.mapLeft = this.mapsize.left;// + this.mapsize.deltaLeft; this.width = this.mapsize.width; this.height = this.mapsize.height; this.clone.style.height = this.mapsize.height + "px"; this.map.style.left = this.mapsize.width / 2 + "px"; this.map.style.top = this.mapsize.height / 2 + "px"; var center = this.getCenter(); if (!center) return; var zoom = this.getZoom(); if (zoom) this.centerAndZoom(this.getCenter(), this.getZoom()); }; this.clearMap = function () { if (!this.map)return; while (this.map.firstChild) { this.map.removeChild(this.map.firstChild); } while (this.layers.length > 0) { this.layers.pop(); } this.redraw(); }; //functions from wiki gps2xy var lat2y = function (a) { return 180 / Math.PI * Math.log(Math.tan(Math.PI / 4 + a * (Math.PI / 180) / 2)); }; var y2lat = function (a) { return 180 / Math.PI * (2 * Math.atan(Math.exp(a * Math.PI / 180)) - Math.PI / 2); }; //the image load information in the upper right corner this.imgLoadInfo = function (total, missing) { if (!this.loadInfoDiv) { this.loadInfoDiv = document.createElement("div"); this.loadInfoDiv.style.position = "absolute"; this.loadInfoDiv.style.top = "0px"; this.loadInfoDiv.style.right = "0px"; this.loadInfoDiv.style.backgroundColor = "white"; this.loadInfoDiv.style.border = "1px solid gray"; this.loadInfoDiv.style.fontSize = "10px"; this.map.parentNode.appendChild(this.loadInfoDiv); } if (missing == 0) { this.loadInfoDiv.style.display = "none"; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.tilesloaded); if (jsMaps.Native.Browser.ielt9) { this.clone.style.visibility = ""; } } else { this.loadInfoDiv.style.display = ""; while (this.loadInfoDiv.firstChild) { this.loadInfoDiv.removeChild(this.loadInfoDiv.firstChild); } this.loadInfoDiv.innerHTML = missing + ' images to load' } }; this.mapCopyrightNode = false; this.mapCopyright = function () { if (typeof this.tileSource.copyright!='undefined') { if (!this.mapCopyrightNode) { this.mapCopyrightNode = jsMaps.Native.CreateDiv(this.map.parentNode,'map-copyright') } this.mapCopyrightNode.innerHTML = this.tileSource.copyright; } }; this.scaleDivExec = function () { if (options.scale_control) { jsMaps.Native.ScaleUI.init(this); jsMaps.Native.ScaleUI._update(); } }; // ************* INIT ********** this.internetExplorer = navigator.userAgent.indexOf("MSIE") != -1; if (navigator.userAgent.indexOf("Android") != -1) { var that = this; var tempFunction = function () { that.blocked = false }; setInterval(tempFunction, 300); } this.position = {}; if (typeof tileLayers == 'undefined') throw "No tile servers defined"; if (tileLayers.Layers.length == 0) throw "Empty tile severs list"; var counter = 0; for (var t in tileLayers.Layers) { if (tileLayers.Layers.hasOwnProperty(t) == false) continue; if (counter == 0) { this.tiles(tileLayers.Layers[t]); } counter++; } this.selectedTileLayer = 0; this.wheelSpeedConfig = []; this.wheelSpeedConfig["acceleration"] = 2; this.wheelSpeedConfig["maxSpeed"] = 2; this.wheelSpeedConfig["digizoom"] = true; this.wheelSpeedConfig["zoomAnimationSlowdown"] = 0.05; this.wheelSpeedConfig["animationFPS"] = 100; this.wheelSpeedConfig["moveAnimateDesktop"] = true; this.wheelSpeedConfig["moveAnimationSlowdown"] = 0.4; this.wheelSpeedConfig["rectShiftAnimate"] = false; this.wheelSpeedConfig["rectShiftAnimationTime"] = 500; this.wheelSpeedConfig["animateMinSpeed"] = 0.4; this.wheelSpeedConfig["animateMaxSpeed"] = 200; //variables for performance check this.wheelEventCounter = 0; this.framesCounter = 0; this.mapParent = map; this.overlays = []; // Add the zoom ui this.uiContainer = false; if (options.zoom_control) this.addOverlay(new jsMaps.Native.ZoomUI(this)); if (options.map_type) this.addOverlay(new jsMaps.Native.LayersUI(this,tileLayers)); this.clone = map.cloneNode(true); //clone is the same as the map div, but absolute positioned this.clone = document.createElement("div"); this.clone.removeAttribute("id"); jsMaps.Native.setCursor(this.clone,"grab"); jsMaps.Native.Dom.addClass(this.clone,'jsMaps-Native-Box'); if (map.firstChild) { map.insertBefore(this.clone, map.firstChild); } else { map.appendChild(this.clone); } this.map = document.createElement("div"); //this is the div that holds the layers, but no marker and svg overlayes this.map.style.position = "absolute"; this.clone.appendChild(this.map); this.setMapPosition(); //div for markers this.overlayDiv = document.createElement("div"); this.overlayDiv.style.position = "absolute"; this.clone.appendChild(this.overlayDiv); //distance tool this.distanceMeasuring = "no"; this.moveMarker = null; this.measureLine = null; this.moveAnimationMobile = true; this.moveAnimationDesktop = false; this.moveAnimationBlocked = false; this.lastMouseX = this.width / 2; this.lastMouseY = this.height / 2; this.layers = []; this.visibleZoom = null; this.oldVisibleZoom = null; this.intZoom = null; this.moveX = 0; this.moveY = 0; this.lastMoveX = 0; this.lastMoveY = 0; this.lastMoveTime = 0; this.startMoveX = 0; this.startMoveY = 0; this.sc = 1; this.blocked = false; this.tileW = 256; this.tileH = 256; this.position.zoom = 1; this.movestarted = false; //touchscreen this.mousedownTime = null; this.doubleclickTime = 400; //mouse this.mousedownTime2 = null; this.doubleclickTime2 = 500; this.zoomOutTime = 1000; this.zoomOutSpeed = 0.01; this.zoomOutInterval = null; this.zoomOutStarted = false; this.draggable = true; var w; if (jsMaps.Native.Browser.ie && !jsMaps.Native.Browser.ie11) { w = map; jsMaps.Native.Event.attach(document.documentElement, "mouseup", function (e) { this.leftClick = false; }, this, false); jsMaps.Native.Event.attach(document.documentElement, "mousemove", this.mousemove, this, false); } else { w = window; jsMaps.Native.Event.attach(window, "resize", this.setMapPosition, this, false); } if (navigator.userAgent.indexOf("Konqueror") != -1) w = map; jsMaps.Native.Event.attach(map, "touchstart", this.start, this, false); jsMaps.Native.Event.attach(map, "touchmove", this.move, this, false); jsMaps.Native.Event.attach(map, "touchend", this.end, this, false); jsMaps.Native.Event.attach(w, "mousemove", this.mousemove, this, false); jsMaps.Native.Event.attach(map, "mousedown", this.mousedown, this, false); jsMaps.Native.Event.attach(w, "mouseup", this.mouseup, this, false); jsMaps.Native.Event.attach(w, "orientationchange", this.reSize, this, false); if (options.mouse_scroll) jsMaps.Native.Event.attach(map, "DOMMouseScroll", this.mousewheel, this, false); jsMaps.Native.Event.attach(map, "dblclick", this.doubleclick, this, false); if (typeof(this.keydown) == "function") { jsMaps.Native.Event.attach(w, "keydown", this.keydown, this, false); jsMaps.Native.Event.attach(w, "keyup", this.keyup, this, false); } this.mapCopyright(); var center=new jsMaps.geo.Location(options.center.latitude,options.center.longitude); this.centerAndZoom(center,options.zoom); var hooking = function() {}; hooking.prototype = new jsMaps.MapStructure(); hooking.prototype.object = this; hooking.prototype.getCenter = function () { var map = this.object.getCenter(); return {lat: map.lat, lng: map.lng}; }; hooking.prototype.getElement = function () { return this.object.clone; }; hooking.prototype.setDraggable = function (flag) { this.object.draggable = flag; }; hooking.prototype.latLngToPoint = function (lat, lng) { var point = new jsMaps.geo.Location(lat, lng); var xy = this.object.latlngToXY(point); return {x: xy['x'],y: xy['y']} }; hooking.prototype.pointToLatLng = function (x, y) { var pos = this.object.XYTolatlng(x,y); return {lat:pos.lat,lng:pos.lng}; }; hooking.prototype.moveXY = function (x, y) { this.object.moveXY(x,y); }; hooking.prototype.setCenter = function (lat, lng) { this.object.centerAndZoom(new jsMaps.geo.Location(lat, lng),this.object.getZoom()); jsMaps.Native.Event.trigger(this.object.mapParent,jsMaps.api.supported_events.center_changed); }; hooking.prototype.getBounds = function () { return jsMaps.Native.prototype.bounds(this.object); }; hooking.prototype.getZoom = function () { return this.object.getIntZoom(); }; hooking.prototype.setZoom = function (number) { this.object.wheeling = true; this.object.zoom(number); }; hooking.prototype.fitBounds = function (bounds) { this.object.wheeling = true; return this.object.setBounds(bounds.bounds); }; return new hooking(); }; /** * Bounds object * * @param themap * @returns hooking */ jsMaps.Native.prototype.bounds = function (themap) { var __bounds; if (typeof themap != 'undefined') { __bounds = themap.getBounds(); } else { __bounds = new jsMaps.Native.InnerBounds({lat:0,lng:0},{lat:0,lng:0}); } var hooking = function () {}; hooking.prototype = new jsMaps.BoundsStructure(); hooking.prototype.bounds = __bounds; hooking.prototype.addLatLng = function (lat, lng) { this.bounds.extend(new jsMaps.geo.Location(lat, lng)); }; hooking.prototype.getCenter = function () { var center = this.bounds.getCenter(); return {lat: center.lat, lng: center.lng}; }; hooking.prototype.getTopLeft = function () { var topLeft = this.bounds.ne(); return {lat: topLeft.lat, lng: topLeft.lng}; }; hooking.prototype.getBottomRight = function () { var bottomRight = this.bounds.sw(); return {lat: bottomRight.lat, lng: bottomRight.lng}; }; return new hooking(); }; /** * Attach map events * * @param content * @param event * @param fnCore * @param once * @returns {*} */ jsMaps.Native.prototype.attachEvent = function (content,event,fnCore,once) { var elem; var customEvent = false; if (typeof content.object != 'undefined' && typeof content.object.mapParent!='undefined') { elem = content.object.mapParent; } else if (typeof content.object != 'undefined' && typeof content.object.marker!='undefined') { elem = content.object.marker; if (event == jsMaps.api.supported_events.click) customEvent = true; } else if (typeof content.object != 'undefined' && typeof content.object.infobox!='undefined') { elem = content.object.infobox; } else if (typeof content.object != 'undefined' && typeof content.object.vectorPath!='undefined') { elem = content.object.vectorPath; if (event == jsMaps.api.supported_events.click) customEvent = true; } var eventTranslation = event; if (event == jsMaps.api.supported_events.mouseover) eventTranslation = 'mouseenter'; if (event == jsMaps.api.supported_events.rightclick) eventTranslation = 'contextmenu'; if (event == jsMaps.api.supported_events.tilt_changed) eventTranslation = 'orientationchange'; var fn = fnCore; // this is stupid, damn you micosoft if (typeof jsMaps.Native.Event[eventTranslation] != 'undefined') eventTranslation = jsMaps.Native.Event[eventTranslation]; var useFn = function (e) { if (event == jsMaps.api.supported_events.mouseout) { var mouseOut = jsMaps.Native.MakeMouseOutFn(elem,e); if (mouseOut == false) return; } if ((event == jsMaps.api.supported_events.click || event == jsMaps.api.supported_events.dblclick) && typeof content.object.clickable != 'undefined' && content.object.clickable == false) { return; } if (event == jsMaps.api.supported_events.dblclick) { e.cancelBubble = true; } var eventHooking = function() {}; eventHooking.prototype = new jsMaps.Event(e,event,content); eventHooking.prototype.getCursorPosition = function () { if (typeof this.container.object.mouseToLatLng!='undefined') { return this.container.object.mouseToLatLng(this.eventObject); } return {lat: 0, lng: 0}; }; if (!jsMaps.Native.Browser.ielt9) { eventHooking.prototype.stopPropagation = function () { this.eventObject.stopPropagation(); this.eventObject.stopImmediatePropagation(); this.eventObject.cancelBubble = true; }; } fn(new eventHooking); }; // Create the event. if (customEvent == false) { if (jsMaps.Native.Browser.ielt9) { if (!elem[eventTranslation]) { elem[eventTranslation] = 0; } if (eventTranslation == 'mouseenter' || eventTranslation == 'mouseout' || eventTranslation == 'mousemove' || eventTranslation == 'mouseup' || eventTranslation == 'mousedown' ) { var trigger; if (eventTranslation == 'mouseenter') trigger = 'onmouseover'; if (eventTranslation == 'mouseout') trigger = 'onmouseout'; if (eventTranslation == 'mousemove') trigger = 'onmousemove'; if (eventTranslation == 'mouseup') trigger = 'onmouseup'; if (eventTranslation == 'mousedown') trigger = 'onmousedown'; elem.attachEvent(trigger, function (e) { useFn(e); }); } else { elem.attachEvent("onpropertychange", function (e) { if (e.propertyName == eventTranslation) { useFn(e); } }); } } else { if (jsMaps.Native.Browser.touch) { if (eventTranslation == 'click' || eventTranslation == 'mousedown' || eventTranslation == 'mouseenter' ) eventTranslation = 'touchstart'; if (eventTranslation == 'mousemove' || eventTranslation == 'drag') eventTranslation = 'touchmove'; if (eventTranslation == 'mouseup' || eventTranslation == 'mouseout' ) eventTranslation = 'touchend'; } var eventTarget = document.createEvent('Event'); eventTarget.initEvent(eventTranslation, true, true); elem.addEventListener(eventTranslation, useFn, false); } } else { elem = content.object.attachEvent(jsMaps.api.supported_events.click,useFn,false,false); } return {eventObj: elem, eventName: event}; }; /** * * @param element * @param eventName */ jsMaps.Native.prototype.triggerEvent = function (element,eventName) { var elem; if (typeof element.object != 'undefined' && typeof element.object.mapParent!='undefined') { elem = element.object.mapParent; }else if (typeof element.object != 'undefined' && typeof element.object.marker!='undefined') { elem = element.object.marker; } jsMaps.Native.Event.trigger(elem,eventName); }; /** * * @param map * @param eventObject * @returns {*} */ jsMaps.Native.prototype.removeEvent = function (map,eventObject) { jsMaps.Native.Event.remove(eventObject.eventObj,eventObject.eventName); }; /** * Generate markers * * @param {jsMaps.MapStructure} map * @param {jsMaps.MarkerOptions} parameters */ jsMaps.Native.prototype.marker = function (map,parameters) { var options = { position: new jsMaps.geo.Location( parameters.position.lat, parameters.position.lng), map: map.object, title:parameters.title, draggable: parameters.draggable, visible: true }; if (parameters.zIndex != null) options.zIndex = parameters.zIndex; if (parameters.icon != null) { options.icon = new jsMaps.Native.Overlay.MarkerImage( parameters.icon ); }else if (parameters.html != null) { options.icon = parameters.html; options.raiseOnDrag = false; } var marker = new jsMaps.Native.Overlay.Marker(options); var hooking = function () {}; hooking.prototype = new jsMaps.MarkerStructure(); hooking.prototype.object = marker; hooking.prototype._objectName = 'marker'; /** * * @returns {{lat: *, lng: *}} */ hooking.prototype.getPosition = function () { var pos = this.object.getPosition(); return {lat: pos.lat, lng: pos.lng} }; hooking.prototype.setPosition = function (lat, lng) { jsMaps.Native.Event.trigger(this.object.marker,jsMaps.api.additional_events.position_changed); this.object.setPosition({lat: lat,lng: lng}); }; hooking.prototype.getVisible = function () { return this.object.getVisible(); }; hooking.prototype.setVisible = function (variable) { return this.object.setVisible(variable); }; hooking.prototype.getIcon = function () { if (typeof this.object.MarkerOptions.icon != 'undefined' && typeof this.object.MarkerOptions.icon.url != 'undefined') { return this.object.MarkerOptions.icon.url; } return null; }; hooking.prototype.setIcon = function (icon) { while (this.object.marker.firstChild) { this.object.marker.removeChild(this.object.marker.firstChild); } if (this.object.shadow) { while (this.object.shadow.firstChild) { this.object.shadow.removeChild(this.object.shadow.firstChild); } } this.object.MarkerOptions.shape = false; this.object.MarkerOptions.icon = new jsMaps.Native.Overlay.MarkerImage(icon); this.object.populateIcon(this.object.MarkerOptions); this.object.init(this.object.MarkerOptions.map); this.object.render(); jsMaps.Native.Event.trigger(this.object.marker,jsMaps.api.additional_events.icon_changed); }; hooking.prototype.getZIndex = function () { return this.object.marker.style.zIndex; }; hooking.prototype.setZIndex = function (number) { this.object.MarkerOptions.zIndex = number; this.object.render(); }; hooking.prototype.setDraggable = function (flag) { this.object.MarkerOptions.draggable = flag; this.object.init(this.object.MarkerOptions.map); this.object.render(); }; hooking.prototype.remove = function () { while (this.object.marker.firstChild) { this.object.marker.removeChild(this.object.marker.firstChild); } if (this.object.shadow) { while (this.object.shadow.firstChild) { this.object.shadow.removeChild(this.object.shadow.firstChild); } } this.object.clear(); this.object.destroy(); this.object = null; }; return new hooking(); }; /** * Info windows * * Create bubbles to be displayed on the map * * @param {jsMaps.InfoWindowOptions} parameters * @returns {jsMaps.InfoWindowStructure} */ jsMaps.Native.prototype.infoWindow = function (parameters) { var options = {content: parameters.content}; if (parameters.position != null) options.position = parameters.position; var infoWindow = new jsMaps.Native.Overlay.InfoWindow(options); var hooking = function () {}; hooking.prototype = new jsMaps.InfoWindowStructure(); /** * @type {jsMaps.Native.Overlay.InfoWindow} */ hooking.prototype.object = infoWindow; hooking.prototype.getPosition = function () { var pos = this.object.getPosition(); return {lat: pos.lat, lng: pos.lng} }; hooking.prototype.setPosition = function (lat, lng) { this.object.setPosition({lat: lat,lng: lng}); }; hooking.prototype.close = function () { this.object.close(); }; /** * * @param {jsMaps.MapStructure} map * @param {jsMaps.MarkerStructure} marker */ hooking.prototype.open = function(map,marker) { this.object.open(map.object,((typeof marker == 'undefined' || typeof marker.object == 'undefined') ? undefined: marker.object)); }; hooking.prototype.setContent = function (content) { this.object.setContent(content); }; return new hooking(); }; /** * Create PolyLine * * @param {jsMaps.MapStructure} map * @param {jsMaps.PolyLineOptions} parameters * @returns jsMaps.PolyLineStructure */ jsMaps.Native.prototype.polyLine = function (map,parameters) { var vector = new jsMaps.Native.Overlay.Vector({ clickable: parameters.clickable, stroke: parameters.strokeColor, strokeWidth: parameters.strokeWeight, strokeOpacity: parameters.strokeOpacity, fill: "none", draggable: parameters.draggable, editable: parameters.editable, visible: parameters.visible, zIndex: parameters.zIndex }, parameters.path, jsMaps.Native.Vector.elements.polyLine); map.object.addOverlay(vector); var hooking = function () {}; hooking.prototype = new jsMaps.PolyLineStructure(); hooking.prototype.object = vector; hooking.prototype.getEditable = function () { return this.object._vectorOptions.editable; }; hooking.prototype.getPath = function () { var arrayOfPaths = []; /** * * @type {jsMaps.Native.Overlay.Vector._vectorPoints|*|jsMaps.Native.Overlay.Vector.vectorObject._vectorPoints} */ var path = this.object._vectorPoints; for (var i in path) { if (path.hasOwnProperty(i) == false) continue; var pos = path[i]; arrayOfPaths.push ({lat: pos.lat, lng: pos.lng}); } return arrayOfPaths; }; hooking.prototype.getVisible = function () { return this.object._vectorOptions.visible; }; hooking.prototype.setDraggable = function (draggable) { this.object._vectorOptions.draggable = draggable; }; hooking.prototype.setEditable = function (editable) { this.object._vectorOptions.editable = editable; this.object.render(true); }; hooking.prototype.setPath = function (pathArray) { this.object._vectorPoints = pathArray; this.object.render(true); }; /** * @param {jsMaps.MapStructure} map * @returns {{lat: *, lng: *}} */ hooking.prototype.setMap = function (map) { this.object.theMap.removeOverlay(this.object); map.object.addOverlay(this.object); this.object.theMap = map.object; this.object._vectorOptions.map = map.object; this.object.render(true); }; hooking.prototype.setVisible = function (visible) { this.object.setVisible(visible); }; hooking.prototype.removeLine = function () { this.object.destroy(); }; return new hooking(); }; /** * @param {jsMaps.MapStructure} map * @param {jsMaps.PolygonOptions} parameters * @returns jsMaps.PolygonStructure */ jsMaps.Native.prototype.polygon = function (map,parameters) { var vector = new jsMaps.Native.Overlay.Vector({ clickable: parameters.clickable, stroke: parameters.strokeColor, strokeWidth: parameters.strokeWeight, strokeOpacity: parameters.strokeOpacity, fill: parameters.fillColor, fillOpacity: parameters.fillOpacity, draggable: parameters.draggable, editable: parameters.editable, visible: parameters.visible, zIndex: parameters.zIndex }, parameters.paths, jsMaps.Native.Vector.elements.polygon); map.object.addOverlay(vector); var hooking = function () {}; hooking.prototype = new jsMaps.PolygonStructure(); hooking.prototype.object = vector; hooking.prototype.getDraggable = function () { return this.object._vectorOptions.draggable; }; hooking.prototype.getEditable = function () { return this.object._vectorOptions.editable; }; hooking.prototype.getPath = function () { var arrayOfPaths = []; /** * * @type {jsMaps.Native.Overlay.Vector._vectorPoints|*|jsMaps.Native.Overlay.Vector.vectorObject._vectorPoints} */ var path = this.object._vectorPoints; for (var i in path) { if (path.hasOwnProperty(i) == false) continue; var pos = path[i]; arrayOfPaths.push ({lat: pos.lat, lng: pos.lng}); } return arrayOfPaths; }; hooking.prototype.getVisible = function () { return this.object._vectorOptions.visible; }; hooking.prototype.setDraggable = function (draggable) { this.object._vectorOptions.draggable = draggable; }; hooking.prototype.setEditable = function (editable) { this.object._vectorOptions.editable = editable; this.object.render(true); }; hooking.prototype.setPath = function (pathArray) { this.object._vectorPoints = pathArray; this.object.render(true); }; /** * @param {jsMaps.MapStructure} map * @returns {{lat: *, lng: *}} */ hooking.prototype.setMap = function (map) { this.object.theMap.removeOverlay(this.object); map.object.addOverlay(this.object); this.object.theMap = map.object; this.object._vectorOptions.map = map.object; this.object.render(true); }; hooking.prototype.setVisible = function (visible) { this.object.setVisible(visible); }; hooking.prototype.removePolyGon = function () { this.object.destroy(); }; return new hooking(); }; /** * Create PolyLine * * @param {jsMaps.MapStructure} map * @param {jsMaps.CircleOptions} parameters * @returns jsMaps.CircleStructure */ jsMaps.Native.prototype.circle = function (map,parameters) { var vector = new jsMaps.Native.Overlay.Vector({ clickable: parameters.clickable, stroke: parameters.strokeColor, strokeWidth: parameters.strokeWeight, strokeOpacity: parameters.strokeOpacity, fill: parameters.fillColor, fillOpacity: parameters.fillOpacity, draggable: parameters.draggable, editable: parameters.editable, // currently not supported visible: parameters.visible, zIndex: parameters.zIndex, center: parameters.center, radius: parameters.radius }, [], jsMaps.Native.Vector.elements.circle); map.object.addOverlay(vector); var hooking = function () { }; hooking.prototype = new jsMaps.CircleStructure(); hooking.prototype.object = vector; hooking.prototype.getBounds = function () { var bBox = new jsMaps.Native.prototype.bounds(); bBox.bounds = this.object.pointsBounds(); return bBox; }; hooking.prototype.getCenter = function () { var theCenter = this.getBounds().getCenter(); return {lat: theCenter.lat, lng: theCenter.lng}; }; hooking.prototype.getDraggable = function () { return this.object._vectorOptions.draggable; }; hooking.prototype.getEditable = function () { return this.object._vectorOptions.editable; }; hooking.prototype.getRadius = function () { return this.object._vectorOptions.radius; }; hooking.prototype.getVisible = function () { return this.object._vectorOptions.visible; }; hooking.prototype.setCenter = function (lat, lng) { this.object._vectorOptions.center = {lat: lat, lng: lng}; this.object.render(true); }; hooking.prototype.setDraggable = function (draggable) { this.object._vectorOptions.draggable = draggable; }; hooking.prototype.setEditable = function (editable) { this.object._vectorOptions.editable = editable; this.object.render(true); }; /** * @param {jsMaps.MapStructure} map * @returns {{lat: *, lng: *}} */ hooking.prototype.setMap = function (map) { this.object.theMap.removeOverlay(this.object); map.object.addOverlay(this.object); this.object.theMap = map.object; this.object._vectorOptions.map = map.object; this.object.render(true); }; hooking.prototype.setVisible = function (visible) { this.object.setVisible(visible); }; hooking.prototype.setRadius = function (radius) { this.object._vectorOptions.radius = radius; this.object.render(true); }; hooking.prototype.removeCircle = function () { this.object.destroy(); }; return new hooking(); };
library/native/core.native.js
/** * The native maps is a fork of https://github.com/robotnic/khtmlib * * khtmlib credits: * verion 0.54 * LGPL Bernhard Zwischenbrugger * * @param mapDomDocument * @constructor */ if (typeof jsMaps.Native == 'undefined') { jsMaps.Native = function (mapDomDocument) {}; jsMaps.Native.prototype = new jsMaps.Abstract(); } jsMaps.Native.MapCount = 0; jsMaps.Native.Overlay = {}; /** * create the map * * @param map * @param options * @param {jsMaps.Native.Tiles} tileLayers * @returns {jsMaps.MapStructure} */ jsMaps.Native.prototype.initializeMap = function (map, options, tileLayers) { jsMaps.Native.Dom.addClass(map,'jsMaps-Native'); jsMaps.Native.MapCount++; this.MapNumber = jsMaps.Native.MapCount; // **** Overlays handling **** this.addOverlay = function (obj) { this.overlays.push(obj); if (typeof(obj.init) == "function") { obj.init(this); } this.renderOverlay(obj); }; this.renderOverlay = function (obj) { obj.render(); }; this.renderOverlays = function () { this.overlayDiv.style.display = ""; var that = this; var i = 0; for (var obj in this.overlays) { if (this.overlays.hasOwnProperty(obj) == false) continue; if (i == 0) { try { //this.overlays[obj].clear(that); } catch (e) { } i++; } this.overlays[obj].render(); } }; this.hideOverlays = function () { for (var obj in this.overlays) { if (this.overlays.hasOwnProperty(obj) == false) continue; try { if (typeof this.overlays[obj].hide !='undefined') { this.overlays[obj].hide(that); } } catch (e) { } } }; this.removeOverlays = function () { while (this.overlays.length > 0) { var overlay = this.overlays.pop(); overlay.clear(); } }; this.stopRenderOverlays = function () { for (var obj in this.overlays) { if (this.overlays.hasOwnProperty(obj) == false) continue; if (typeof(this.overlays[obj].cancel) == "function") { this.overlays[obj].cancel(); } } }; this.removeOverlay = function (ov) { for (var i = 0; i < this.overlays.length; i++) { var overlay = this.overlays[i]; if (ov == overlay) { ov.clear(); this.overlays.splice(i, 1); break; } } }; // every change (lat,lng,zoom) will call a user defined function this.callbackFunctions = []; this.addCallbackFunction = function (func) { if (typeof(func) == "function") { this.callbackFunctions.push(func); } }; this.executeCallbackFunctions = function () { for (var i = 0; i < this.callbackFunctions.length; i++) { this.callbackFunctions[i].call(); } }; /*================================================== // // Touchscreen and Mouse EVENTS // ===================================================*/ // // Touchscreen // Here also the multitouch zoom is done this.moving=false; this.start = function(evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; // The IE way } jsMaps.Native.Dom.addClass(document.body,'jsMaps-Native-no-scroll'); this.moving = this.center(); this.moveAnimationBlocked = true; if (evt.touches.length == 1) { if (this.mousedownTime != null) { var now = (new Date()).getTime(); if (now - this.mousedownTime < this.doubleclickTime) { this.discretZoom(1,this.pageX(evt.touches[0]), this.pageY(evt.touches[0])); } } if (!this.discretZoomBlocked) { this.mousedownTime2 = (new Date()).getTime(); this.startMoveX = this.moveX - (this.pageX(evt.touches[0])) / this.factor / this.sc; this.startMoveY = this.moveY - (this.pageY(evt.touches[0])) / this.factor / this.sc; this.movestarted = true; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.dragstart); this.mousedownTime = (new Date()).getTime(); } } if (evt.touches.length == 2 && !this.discretZoomBlocked) { this.mousedownTime = null; this.movestarted = false; var X1 = this.pageX(evt.touches[0]); var Y1 = this.pageY(evt.touches[0]); var X2 = this.pageX(evt.touches[1]); var Y2 = this.pageY(evt.touches[1]); this.startDistance = Math.sqrt(Math.pow((X2 - X1), 2) + Math.pow((Y2 - Y1), 2)); this.startZZ = this.position.zoom; var x = (X1 + X2) / 2 / this.factor / this.sc; var y = (Y1 + Y2) / 2 / this.factor / this.sc; this.startMoveX = this.moveX - x; this.startMoveY = this.moveY - y; this.prevxy = {x:0,y:0}; } }; this.moveok = true; this.prevxy = {x:0,y:0}; this.move = function(evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; // The IE way } var center; if (evt.touches.length == 1 && this.movestarted) { this.lastMouseX = this.pageX(evt.touches[0]); this.lastMouseY = this.pageY(evt.touches[0]); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.drag); this.lastMoveX = this.moveX; this.lastMoveY = this.moveY; this.lastMoveTime = new Date(); this.moveX = (this.pageX(evt.touches[0])) / this.factor / this.sc + this.startMoveX; this.moveY = (this.pageY(evt.touches[0])) / this.factor / this.sc + this.startMoveY; center = new jsMaps.geo.Location(this.lat, this.lng); this.setCenter2(center, this.position.zoom); this.moveAnimationBlocked = false; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); } if (evt.touches.length == 2 && !this.discretZoomBlocked) { this.movestarted = false; this.mousedownTime = null; var X1 = this.pageX(evt.touches[0]); var Y1 = this.pageY(evt.touches[0]); var X2 = this.pageX(evt.touches[1]); var Y2 = this.pageY(evt.touches[1]); var Distance = Math.sqrt(Math.pow((X2 - X1), 2) + Math.pow((Y2 - Y1), 2)); var zoomDelta = (Distance / this.startDistance); var zz = this.startZZ + zoomDelta - 1; if (zz > this.tileSource.maxzoom) { zz = this.tileSource.maxzoom; zoomDelta = this.zoomDeltaOld; } else this.zoomDeltaOld = zoomDelta; var x = (X1 + X2) / 2; var y = (Y1 + Y2) / 2; var diff = (Distance - this.startDistance) / this.startDistance * 100; if (Math.round(x) != Math.round(this.prevxy.x) && Math.round(y) != Math.round(this.prevxy.y) && Math.round(this.startDistance) != Math.round(Distance) && Math.abs(diff) > 15) { this.discretZoom(((zoomDelta < 1)? -1: 1),x, y); } this.prevxy = {x:x,y:y}; } }; this.end = function(evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; } this.prevxy = {x:0,y:0}; jsMaps.Native.Dom.removeClass(document.body,'jsMaps-Native-no-scroll'); var steps = 20; for (var i = 1; i <= steps; i++) { if (typeof this.zoomTimeouts[i] != 'undefined') { clearTimeout(this.zoomTimeouts[i]); } } if (this.movestarted) { this.lastMouseX = this.pageX(evt.touches[0]); this.lastMouseY = this.pageY(evt.touches[0]); if (this.moveMarker) { this.moveMarker = null; } // using this normalize some things are working better, others not so good. // delete it will solve some problems but bring other problems var now = new Date(evt.timeStamp); var timeDelta = now - this.lastMoveTime; if (this.wheelSpeedConfig["moveAnimateDesktop"] && timeDelta != 0) { if (this.movestarted) { if (this.moveAnimationBlocked == false) { var speedX = (this.lastMoveX - this.moveX) / timeDelta; var speedY = (this.lastMoveY - this.moveY) / timeDelta; var maxSpeed = 200; if (speedX > maxSpeed)speedX = maxSpeed; if (speedY > maxSpeed)speedY = maxSpeed; if (speedX < -maxSpeed)speedX = -maxSpeed; if (speedY < -maxSpeed)speedY = -maxSpeed; if (Math.abs(speedX) > this.wheelSpeedConfig["animateMinSpeed"] || Math.abs(speedY) > this.wheelSpeedConfig["animateMinSpeed"]) { this.animateMove(speedX, speedY); } } } } var that = this; var tempFunction = function () { if (that.movestarted) { jsMaps.Native.Event.trigger(that.mapParent, jsMaps.api.supported_events.dragend); jsMaps.Native.Event.trigger(that.mapParent, jsMaps.api.supported_events.idle); } that.movestarted = false; }; setTimeout(tempFunction, 1); } if (evt.touches.length == 1) { this.startMoveX = this.moveX - evt.touches[0].pageX / this.factor / this.sc; this.startMoveY = this.moveY - evt.touches[0].pageY / this.factor / this.sc; //this.startDistance = 0; //this.startZZ = this.position.zoom; } }; /** * mouse events * (distance measure code not in use anymore) * * @param evt * @returns {*} */ this.pageX = function (evt) { try { var px = (evt.pageX === undefined) ? evt.clientX + document.body.scrollLeft: evt.pageX; return px - this.mapLeft; } catch (e) { return this.lastMouseX; } }; /** * mouse events * (distance measure code not in use anymore) * * @param evt * @returns {*} */ this.pageY = function (evt) { try { var py = (evt.pageY === undefined) ? evt.clientY + document.body.scrollTop: evt.pageY; return py - this.mapTop; } catch (e) { return this.lastMouseY; } }; this.doubleclickBlocked = false; this.doubleclick = function (evt) { this.discretZoom(1, this.pageX(evt), this.pageY(evt)); }; this.leftClick = null; this.mousedown = function (evt) { this.mapParent.focus(); if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { window.returnValue = false; // The IE way } if (jsMaps.Native.Browser.ie) this.leftClick = true; this.lastMouseX = this.pageX(evt); this.lastMouseY = this.pageY(evt); this.moveAnimationBlocked = true; if (this.mousedownTime2 != null) { var now = (new Date()).getTime(); if (now - this.mousedownTime2 < this.doubleclickTime2) { this.doubleclick(evt); return; } } this.mousedownTime2 = (new Date()).getTime(); if (evt.shiftKey) { this.selectRectLeft = this.pageX(evt); this.selectRectTop = this.pageY(evt); this.selectRect = document.createElement("div"); this.selectRect.style.left = this.selectRectLeft + "px"; this.selectRect.style.top = this.selectRectTop + "px"; this.selectRect.style.border = "1px solid gray"; if (!this.internetExplorer) { this.selectRect.style.opacity = 0.5; this.selectRect.style.backgroundColor = "white"; } this.selectRect.style.position = "absolute"; this.map.parentNode.appendChild(this.selectRect); } else { //this.hideOverlays(); this.startMoveX = this.moveX - (this.pageX(evt)) / this.factor / this.sc; this.startMoveY = this.moveY - (this.pageY(evt)) / this.factor / this.sc; this.movestarted = true; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.dragstart); jsMaps.Native.setCursor(this.clone,"grabbing"); } return false; }; this.mousemove = function (evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { window.returnValue = false; // The IE way } var leftClick = jsMaps.Native.Event.leftClick(evt); if (leftClick == false || (this.leftClick !== null && this.leftClick == false)) { this.movestarted = false; jsMaps.Native.setCursor(this.clone,"grab"); return; } if (this.draggable == false) return; this.lastMouseX = this.pageX(evt); this.lastMouseY = this.pageY(evt); if (evt.shiftKey) { if (this.selectRect) { this.selectRect.style.width = Math.abs(this.pageX(evt) - this.selectRectLeft) + "px"; this.selectRect.style.height = Math.abs(this.pageY(evt) - this.selectRectTop) + "px"; if (this.pageX(evt) < this.selectRectLeft) { this.selectRect.style.left = this.pageX(evt); } if (this.pageY(evt) < this.selectRectTop) { this.selectRect.style.top = this.pageY(evt); } jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); } } else { if (this.movestarted) { jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.drag); this.lastMoveX = this.moveX; this.lastMoveY = this.moveY; this.lastMoveTime = new Date(); this.moveX = (this.pageX(evt)) / this.factor / this.sc + this.startMoveX; this.moveY = (this.pageY(evt)) / this.factor / this.sc + this.startMoveY; var center = new jsMaps.geo.Location(this.lat, this.lng); this.setCenter2(center, this.position.zoom); this.moveAnimationBlocked = false; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); } } return false; }; this.mouseup = function (evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; // The IE way } jsMaps.Native.setCursor(this.clone,"grab"); this.lastMouseX = this.pageX(evt); this.lastMouseY = this.pageY(evt); if (this.moveMarker) { this.moveMarker = null; } if (this.selectRect) { var p1 = this.XYTolatlng(this.selectRect.offsetLeft, this.selectRect.offsetTop + this.selectRect.offsetHeight); var p2 = this.XYTolatlng(this.selectRect.offsetLeft + this.selectRect.offsetWidth, this.selectRect.offsetTop); var inner_bounds = new jsMaps.Native.InnerBounds(p1, p2); this.setBounds(inner_bounds); this.selectRect.parentNode.removeChild(this.selectRect); this.selectRect = null; } // using this normalize some things are working better, others not so good. // delete it will solve some problems but bring other problems var now = new Date(evt.timeStamp); var timeDelta = now - this.lastMoveTime; if (this.wheelSpeedConfig["moveAnimateDesktop"] && timeDelta != 0) { if (this.movestarted) { if (this.moveAnimationBlocked == false) { var speedX = (this.lastMoveX - this.moveX) / timeDelta; var speedY = (this.lastMoveY - this.moveY) / timeDelta; var maxSpeed = 200; if (speedX > maxSpeed)speedX = maxSpeed; if (speedY > maxSpeed)speedY = maxSpeed; if (speedX < -maxSpeed)speedX = -maxSpeed; if (speedY < -maxSpeed)speedY = -maxSpeed; if (Math.abs(speedX) > this.wheelSpeedConfig["animateMinSpeed"] || Math.abs(speedY) > this.wheelSpeedConfig["animateMinSpeed"]) { this.animateMove(speedX, speedY); } } } } var that = this; var tempFunction = function () { if (that.movestarted) { jsMaps.Native.Event.trigger(that.mapParent,jsMaps.api.supported_events.dragend); jsMaps.Native.Event.trigger(that.mapParent,jsMaps.api.supported_events.idle); } that.movestarted = false; }; setTimeout(tempFunction, 1); }; this.startZoomTime = null; this.wheeling = false; this.zoomActive = true; /** * Mouse wheel * * @param evt */ this.mousewheel = function (evt) { if (evt.preventDefault) { evt.preventDefault(); // The W3C DOM way } else { evt.returnValue = false; // The IE way } this.mapParent.focus(); this.zoomActive = true; if (!evt) evt = window.event; if (evt.wheelDelta) { /* IE/Opera/Chrom. */ delta = evt.wheelDelta / 120; } else if (evt.detail) { /** Mozilla case. */ delta = -evt.detail / 3; if (this.lastWheelDelta * delta < 0) { if (!this.wheelSpeedConfig["digizoom"]) { delta = 0; } } this.lastWheelDelta = -evt.detail / 3; } var direction = (delta < 0) ? -1: 1; var that = this; this.wheeling = true; if (this.wheelSpeedConfig["digizoom"]) { this.discretZoom(direction, this.pageX(evt), this.pageY(evt)); return; } if (!this.startZoomTime) { this.startZoomTime = (new Date()); this.startZoomTime2 = (new Date()); this.oldZoom = this.zoom(); this.speed = 1; } var delta = (new Date()) - this.startZoomTime; var delta2 = (new Date()) - this.startZoomTime2; var tempFunc = function () { that.startZoomTime = new Date(); }; this.startZoomTime = new Date(); if (delta > 300) { this.startZoomTime2 = new Date(); this.oldZoom = this.zoom(); this.speed = 1; delta2 = 0.1; } this.speed = this.speed * 2; if (this.speed > 5) this.speed = 5; var zoom = this.oldZoom + delta2 / 3000 * this.speed * direction; if (zoom > this.position.maxZoom) zoom = this.position.maxZoom; if (zoom < this.position.minZoom) zoom = this.position.minZoom; this.scaleDivExec(); this.centerAndZoomXY(this.center(), zoom, this.pageX(evt), this.pageY(evt)); }; this.zoomTimeouts = []; this.discretZoomBlocked = false; this.discretZoom = function (direction, x, y) { var that = this; if (this.discretZoomBlocked) return; var func = function () { that.discretZoomBlocked = false; }; this.zoomActive = true; this.discretZoomBlockedTimeout = setTimeout(func, 400); this.discretZoomBlocked = true; var steps = 20; for (var i = 1; i <= steps; i++) { if (this.zoomTimeouts[i]) { clearTimeout(this.zoomTimeouts[i]); } } var start = this.zoom(); var end = (direction == 1) ? Math.ceil(this.zoom() + 0.9): Math.floor(this.zoom() - 0.9); var q; if (direction == -1) { for (q in this.layers) { if (this.layers.hasOwnProperty(q) == false) continue; if (q > start &&typeof this.layers[q]!='undefined') { this.map.removeChild(this.layers[q]['layerDiv']); this.layers[q] = false; delete this.layers[q]; } } } var delta = Math.abs(start - end); this.scaleDivExec(); var lastDZ = 0; for (var ii = 1; ii <= steps; ii++) { var rad = ii / steps * Math.PI / 2; var dz = direction * (Math.sin(rad)) * delta; var ddz = dz - lastDZ; this.zoomTimeouts[i] = this.discretZoomExec(x, y, ddz, ii, steps); lastDZ = dz; } if (end >= this.tileSource.minzoom && end <= this.tileSource.maxzoom) { jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.zoom_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); } }; this.discretZoomExec = function (x, y, dz, i, steps) { var that = this; var tempFunc = function () { var zoom = that.zoom() + dz; if (i == steps) zoom = Math.round(zoom); that.centerAndZoomXY(that.center(), zoom, x, y); }; return setTimeout(tempFunc, i*15); }; /** * Map continues moving after mouse up * * @param speedX * @param speedY * @param faktor */ this.animateMove = function (speedX, speedY, faktor) { if (jsMaps.Native.Browser.ie && jsMaps.Native.Utils.TRANSFORM == false) return; if (typeof faktor == 'undefined') faktor=Math.pow(2,this.zoom()); clearTimeout(this.animateMoveTimeout); if (Math.abs(speedX) < this.wheelSpeedConfig["animateMinSpeed"]/faktor && Math.abs(speedY) < this.wheelSpeedConfig["animateMinSpeed"]/faktor){ this.moving=false; this.setCenter2(this.position.center, this.position.zoom); return; } var framesPerSecond=50; this.moveX += -speedX; this.moveY += -speedY; that = this; var speed=Math.sqrt(Math.pow(speedX,2) + Math.pow(speedY,2)); var fx=speedX/speed; var fy=speedY/speed; var tempFunction = function() { var newSpeedX=speedX - fx*that.wheelSpeedConfig["moveAnimationSlowdown"]/faktor; var newSpeedY=speedY - fy*that.wheelSpeedConfig["moveAnimationSlowdown"]/faktor; that.animateMove(newSpeedX,newSpeedY,faktor); }; this.animateMoveTimeout = window.setTimeout(tempFunction, 1/framesPerSecond * 1000); this.setCenter2(this.position.center, this.position.zoom); }; //*** zoom animation *** this.autoZoomInTimeout = null; this.autoZoomStartTime = null; this.autoZoomIn = function (x, y, z) { if (this.autoZoomInTimeout) window.clearTimeout(this.autoZoomInTimeout); var stepwidth = 0.20; if (z < 0) { stepwidth = -stepwidth } var zoomGap = Math.abs(z) <= Math.abs(stepwidth); //this.hideOverlays(); var dzoom = stepwidth; var zoom = this.position.zoom + dzoom; zoom = Math.round(zoom * 1000) / 1000; if (zoomGap) { if (z < 0) { zoom = Math.floor(zoom); } else { zoom = Math.ceil(zoom - 0.2); } dzoom = zoom - this.position.zoom; } factor = Math.pow(2, zoom); var zoomCenterDeltaX = (x - this.mapLeft) - this.width / 2; var zoomCenterDeltaY = (y - this.mapTop) - this.height / 2; var f = Math.pow(2, dzoom); var dx = zoomCenterDeltaX - zoomCenterDeltaX * f; var dy = zoomCenterDeltaY - zoomCenterDeltaY * f; var that = this; var now = new Date().getMilliseconds(); var timeDelta = (this.autoZoomStartTime) ? now - this.autoZoomStartTime: 0; this.autoZoomStartTime = now; var tempFunction; if (timeDelta < 100 || zoomGap) { if (zoom >= this.tileSource.minzoom && zoom <= this.tileSource.maxzoom) { this.moveX = this.moveX + dx / factor; this.moveY = this.moveY + dy / factor; } var center = new jsMaps.geo.Location(this.lat, this.lng); if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; tempFunction = function () { that.setCenter2(center, zoom); }; setTimeout(tempFunction, 1); } var newz = z - dzoom; if (!zoomGap) { tempFunction = function () { that.autoZoomIn(x, y, newz); }; this.autoZoomInTimeout = window.setTimeout(tempFunction, 40); } }; /** * same as centerAndZoom but zoom center is not map center * * @param center * @param zoom * @param x * @param y */ this.centerAndZoomXY = function (center, zoom, x, y) { var factor = Math.pow(2, zoom); var zoomCenterDeltaX = x - this.mapsize.width / 2; var zoomCenterDeltaY = y - this.mapsize.height / 2; var dzoom = zoom - this.zoom(); var f = Math.pow(2, dzoom); var dx = zoomCenterDeltaX - zoomCenterDeltaX * f; var dy = zoomCenterDeltaY - zoomCenterDeltaY * f; if (zoom >= this.tileSource.minzoom && zoom <= this.tileSource.maxzoom) { this.moveX = this.moveX + dx / factor; this.moveY = this.moveY + dy / factor; } var center = new jsMaps.geo.Location(this.lat, this.lng); if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; this.setCenter2(center, zoom); }; /** * Set the map coordinates and zoom * * @param center * @param zoom */ this.centerAndZoom = function (center, zoom) { this.moveX = 0; this.moveY = 0; if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; this.record(); this.setCenterNoLog(center, zoom); this.scaleDivExec(); }; /** * @param center * @param zoom */ this.setCenter3 = function (center, zoom) { this.moveX = 0; this.moveY = 0; this.setCenterNoLog(center, zoom); }; /** * same as setCenter but moveX,moveY are not reset (for internal use) * * @param center * @param zoom */ this.setCenter2 = function (center, zoom) { this.record(); this.setCenterNoLog(center, zoom); }; /** * same as setCenter but no history item is generated (for undo, redo) * * @param center * @param zoom */ this.setCenterNoLog = function (center, zoom) { this.position.center = center; this.lat = center.lat; this.lng = center.lng; zoom = parseFloat(zoom); if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; this.position.zoom = zoom; this.layer(this.map, this.lat, this.lng, this.moveX, this.moveY, zoom); this.executeCallbackFunctions(); }; /** * read the map center (no zoom value) * * @param center * @returns {*} */ this.center = function (center) { if (center) { //this.position.center=center; this.centerAndZoom(center, this.getZoom()); } if (this.moveX != 0 || this.moveY != 0) { return new jsMaps.geo.Location(this.movedLat, this.movedLng); } return this.position.center; }; this.zoom = function (zoom) { if (zoom) { this.centerAndZoom(this.position.center, zoom); } return this.position.zoom; }; this.moveXY = function (x, y) { this.moveX = parseFloat(x) / this.factor / this.sc + this.moveDelayedX; this.moveY = parseFloat(y) / this.factor / this.sc + this.moveDelayedY; this.setCenter2(this.center(), this.zoom()); }; this.tiles = function (tileSource) { this.clearMap(); this.tileSource = tileSource; }; this.tileOverlays = []; this.addTilesOverlay = function (t) { this.tileOverlays.push(t); var ov = this.tileOverlays[this.tileOverlays.length - 1]; this.clearMap(); return ov; }; this.removeTilesOverlay = function (ov) { //alert(this.tileOverlays.length); for (var i = 0; i < this.tileOverlays.length; i++) { var overlay = this.tileOverlays[i]; if (ov == overlay) { //ov.clear(); this.tileOverlays.splice(i, 1); break; } } this.clearMap(); }; this.getCenter = function () { var center; if (this.moveX != 0 || this.moveY != 0) { center = new jsMaps.geo.Location(this.movedLat, this.movedLng); } else { if (!this.position.center) { } else { center = this.position.center; } } return center; }; /** * read bounds. The Coordinates at corners of the map div sw, ne would be better (change it!) * * @param b * @returns {*} */ this.mapBounds = function (b) { if (b) { this.setBounds(b); } else { return this.getBounds(); } }; this.getBounds = function () { var sw = this.XYTolatlng(0, this.height); var ne = this.XYTolatlng(this.width, 0); return new jsMaps.Native.InnerBounds(sw, ne); }; /** * like setCenter but with two gps points * * @param b */ this.setBounds = function (b) { //this.normalize(); //the setbounds should be a mathematical formula and not guessing around. //if you know this formula pease add it here. //this.getSize(); var p1 = b.sw(); var p2 = b.ne(); var minlat = p1.lat; var maxlat = p2.lat; var minlng = p1.lng; var maxlng = p2.lng; var minlat360 = lat2y(minlat); var maxlat360 = lat2y(maxlat); var centerLng = (minlng + maxlng) / 2; var centerLat360 = (minlat360 + maxlat360) / 2; var centerLat = y2lat(centerLat360); var center = new jsMaps.geo.Location(centerLat, centerLng); var extendY = Math.abs(maxlat360 - minlat360); var extendX = Math.abs(maxlng - minlng); var extend, screensize; if (extendX / this.width > extendY / this.height) { extend = extendX; screensize = this.width; } else { extend = extendY; screensize = this.height; } var scalarZoom = 360 / extend; var screenfactor = 512 / screensize; var zoom = (Math.log(scalarZoom / screenfactor)) / (Math.log(2)) + 1; if (zoom > this.tileSource.maxzoom) zoom = this.tileSource.maxzoom; if (zoom < this.tileSource.minzoom) zoom = this.tileSource.minzoom; if (this.position.center) { if (this.wheelSpeedConfig["rectShiftAnimate"]) { this.animatedGoto(center, zoom, this.wheelSpeedConfig["rectShiftAnimationTime"]); } else { this.centerAndZoom(center, zoom); } } else { this.centerAndZoom(center, zoom); } }; this.animatedGotoStep = null; this.animatedGotoTimeout = []; this.animatedGoto = function (newCenter, newZoom, time) { //this.hideOverlays(); var zoomSteps = time / 10; var oldCenter = this.getCenter(); var newLat = newCenter.lat; var newLng = newCenter.lng; var oldLat = oldCenter.lat; var oldLng = oldCenter.lng; var oldZoom = this.getZoom(); var dLat = (newLat - oldLat) / zoomSteps; var dLng = (newLng - oldLng) / zoomSteps; var dZoom = (newZoom - oldZoom) / zoomSteps; var dMoveX = this.moveX / zoomSteps; var dMoveY = this.moveY / zoomSteps; var oldMoveX = this.moveX; var oldMoveY = this.moveY; this.animatedGotoStep = 0; var that = this; while (timeout = this.animatedGotoTimeout.pop()) { clearTimeout(timeout); } for (var i = 0; i < zoomSteps; i++) { var tempFunction = function () { that.animatedGotoExec(oldLat, oldLng, oldZoom, dLat, dLng, dZoom, oldMoveX, oldMoveY, dMoveX, dMoveY) }; this.animatedGotoTimeout[i] = window.setTimeout(tempFunction, 10 * i); } }; this.animatedGotoExec = function (oldLat, oldLng, oldZoom, dLat, dLng, dZoom, oldMoveX, oldMoveY, dMoveX, dMoveY) { this.moveX = -dMoveX; this.moveY = -dMoveY; var lat = oldLat + dLat * this.animatedGotoStep; var lng = oldLng + dLng * this.animatedGotoStep; var zoom = oldZoom + dZoom * this.animatedGotoStep; this.animatedGotoStep++; this.centerAndZoom(new jsMaps.geo.Location(lat, lng), zoom); }; this.getZoom = function () { return this.position.zoom; }; this.getIntZoom = function () { return this.intZoom; }; /** * WGS84 to x,y at the div calculation * * @param point * @returns {Array} */ this.latlngToXY = function (point) { var lat = point.lat; var lng = point.lng; if(lat >90) lat =lat -180; if(lat <-90) lat =lat +180; var worldCenter = this.getCenter(); var intZoom = this.getIntZoom(); var tileCenter = getTileNumber(worldCenter.lat, worldCenter.lng, intZoom); var tileTest = getTileNumber(lat, lng, intZoom); var x = (tileCenter[0] - tileTest[0]) * this.tileW * this.sc - this.width / 2; var y = (tileCenter[1] - tileTest[1]) * this.tileW * this.sc - this.height / 2; point = []; point["x"] = -x; point["y"] = -y; return (point); }; /** * screen (map div) coordinates to lat,lng * * @param x * @param y * @returns {jsMaps.geo.Location} * @constructor */ this.XYTolatlng = function (x, y) { var center = this.getCenter(); if (!center) return; var factor = Math.pow(2, this.intZoom); var centerLat = center.lat; var centerLng = center.lng; var xypoint = getTileNumber(centerLat, centerLng, this.intZoom); var dx = x - this.width / 2; var dy = y - this.height / 2; //das style var lng = (xypoint[0] + dx / this.tileW / this.sc) / factor * 360 - 180; var lat360 = (xypoint[1] + dy / this.tileH / this.sc) / factor * 360 - 180; var lat = -y2lat(lat360); if(lat >90) lat =lat -180; if(lat <-90) lat =lat +180; return new jsMaps.geo.Location(lat, lng); }; this.mouseToLatLng = function (evt) { var x = this.pageX(evt); var y = this.pageY(evt); return this.XYTolatlng(x, y); }; /** * for iPhone to make page fullscreen (maybe not working) */ this.reSize = function () { var that = this; //setTimeout("window.scrollTo(0,1)",500); var tempFunction = function () { that.getSize(that) }; window.setTimeout(tempFunction, 1050); }; /** * read the size of the DIV that will contain the map */ this.getSize = function () { this.width = this.map.parentNode.offsetWidth; this.height = this.map.parentNode.offsetHeight; var obj = this.map; var left = 0; var top = 0; do { left += obj.offsetLeft; top += obj.offsetTop; obj = obj.offsetParent; } while (obj.offsetParent); this.mapTop = top; this.mapLeft = left; }; //for undo,redo this.recordArray = []; this.record = function () { var center = this.getCenter(); if (center) { var lat = center.lat; var lng = center.lng; var zoom = this.getZoom(); var item = [lat, lng, zoom]; this.recordArray.push(item); } }; this.play = function (i) { if (i < 1) return; if (i > (this.recordArray.length - 1)) return; var item = this.recordArray[i]; var center = new jsMaps.geo.Location(item[0], item[1]); //undo,redo must not generate history items this.moveX = 0; this.moveY = 0; this.setCenter3(center, item[2]); }; this.myTimer = function(interval) { /** * reset/stop countdown. */ this.reset = function() { this._isRunning = false; }; /** * Is countdown running? */ this.isTimeRunning = function() { if(false == this._isRunning) return false; var now = new Date(); if(this.time + this.myInterval > now.getTime()) return false; this._isRunning = false; return true; }; /** * Start countdown. */ this.start = function() { this._isRunning = true; var d = new Date(); this.time = d.getTime(); }; /** * Setter/getter */ this.interval = function(arg1) { if(typeof(arg1) != "undefined") { this.myInterval = parseInt(arg1); } return this.myInterval; }; // Constructor this.reset(); this.interval(interval); return this; }; /*================== LAYERMANAGER (which layer is visible) ===================== Description: This method desides witch zoom layer is visible at the moment. It has the same parameters as the "draw" method, but no "intZoom". This Layers are NOT tile or vector overlays ========================================================================= */ this.layerDrawLastFrame = null; this.doTheOverlays = true; this.finalDraw = false; this.layerOldZoom = 0; this.moveDelayedX = 0; this.moveDelayedY = 0; this.layerTimer = new this.myTimer(400); this.layer = function (map, lat, lng, moveX, moveY, zoom) { var delta = (new Date()) - this.startZoomTime; this.stopRenderOverlays(); if (!zoom) zoom = this.getZoom(); var intZoom = (this.wheelSpeedConfig["digizoom"]) ? Math.floor(zoom): Math.round(zoom); if (this.layerDrawLastFrame) { window.clearTimeout(this.layerDrawLastFrame); this.layerDrawLastFrame = null; } var that = this; var tempFunction; if (this.layerTimer.isTimeRunning() || this.finalDraw == false ) { //the last frames must be drawn to have good result tempFunction = function () { if(intZoom == that.visibleZoom){ that.finalDraw = true; } that.layer(map, lat, lng, moveX, moveY, zoom); }; this.visibleZoom=parseInt(this.visibleZoom); if(!that.finalDraw){ this.layerDrawLastFrame = window.setTimeout(tempFunction, 250); } if (this.layerTimer.isTimeRunning()) { this.moveDelayedX = moveX; //used in method moveXY this.moveDelayedY = moveY; return; } } this.layerTimer.start(); this.moveDelayedX = 0; this.moveDelayedY = 0; for (var i = 0; i < 22; i++) { if (this.layers[i]) this.layers[i]["layerDiv"].style.visibility = "hidden"; } if (this.layerOldZoom > zoom && !this.finalDraw) { if (this.layers[intZoom] && !this.layers[intZoom]["loadComplete"]) { this.visibleZoom = intZoom + 1; } } this.intZoom = intZoom; if (intZoom > this.tileSource.maxzoom) intZoom = this.tileSource.maxzoom; if (!this.visibleZoom) { this.visibleZoom = intZoom; this.oldIntZoom = intZoom; } this.factor = Math.pow(2, intZoom); var zoomDelta = zoom - intZoom; this.sc = Math.pow(2, zoomDelta); // Calculate the next displayed layer this.loadingZoomLevel = intZoom; if (this.visibleZoom < intZoom) { if (Math.abs(this.visibleZoom - intZoom) < 4) { this.loadingZoomLevel = parseInt(this.visibleZoom) + 1; } } // draw the layer with current zoomlevel this.draw(this.map, lat, lng, moveX, moveY, this.loadingZoomLevel, zoom, this.tileSource.src); this.layers[this.loadingZoomLevel]["layerDiv"].style.visibility = ""; //if the current zoomlevel is not loaded completly, there must be a second layer displayed if (intZoom != this.visibleZoom && typeof this.layers[this.visibleZoom] != 'undefined') { if (this.visibleZoom < intZoom + 2) { this.draw(this.map, lat, lng, moveX, moveY, this.visibleZoom, zoom, this.tileSource.src,true); this.layers[this.visibleZoom]["layerDiv"].style.visibility = ""; } else { this.layers[this.visibleZoom]["layerDiv"].style.visibility = "hidden"; } } // pre Load for zoom out if (intZoom == this.visibleZoom && typeof this.layers[this.visibleZoom - 1] != 'undefined') { this.draw(this.map, lat, lng, moveX, moveY, this.visibleZoom - 1, zoom, this.tileSource.src,true); this.layers[this.visibleZoom - 1]["layerDiv"].style.visibility = "hidden"; } if (this.layers[this.loadingZoomLevel]["loadComplete"]) { if (this.visibleZoom != intZoom) { this.layers[this.loadingZoomLevel]["loadComplete"] = false; this.hideLayer(this.visibleZoom); this.visibleZoom = this.loadingZoomLevel; } } if (this.quadtreeTimeout) clearTimeout(this.quadtreeTimeout); if (this.loadingZoomLevel != intZoom) { that = this; tempFunction = function () { that.layer(map, lat, lng, moveX, moveY); }; this.quadtreeTimeout = window.setTimeout(tempFunction, 100); } if (this.oldIntZoom != this.intZoom) { if (this.oldIntZoom != this.visibleZoom) { this.hideLayer(this.oldIntZoom); } } this.oldIntZoom = intZoom; if (this.doTheOverlays || this.finalDraw || this.layerOldZoom == this.zoom()) { var startTime = new Date(); this.lastDX = this.moveX; this.lastDY = this.moveY; this.renderOverlays(); this.layerOldZoom = this.zoom(); var duration = (new Date() - startTime); this.doTheOverlays = !(duration > 10 && !this.finalDraw); } else { this.hideOverlays(); } that = this; var func = function () { that.blocked = false; that.layerTimer.reset();}; if (this.layerBlockTimeout) clearTimeout(this.layerBlockTimeout); this.layerBlockTimeout = window.setTimeout(func, 20); this.finalDraw = false; }; /* ==================== DRAW (speed optimized!!!)=============================== This function draws one layer. It is highly opimized for iPhone. Please DO NOT CHANGE things except you want to increase speed! For opimization you need a benchmark test. How it works: The position of the images is fixed. The layer (not the images) is moved because of better performance Even zooming does not change position of the images, if 3D CSS is active (webkit). this method uses "this.layers" , "this.oldIntZoom", "this.width", "this.height"; ===================================================================================*/ this.draw = function (map, lat, lng, moveX, moveY, intZoom, zoom, tileFunc,preLoad) { this.framesCounter++; var that = this; var tempFunction = function () { that.framesCounter-- }; if (typeof preLoad == 'undefined') preLoad = false; window.setTimeout(tempFunction, 1000); var factor = Math.pow(2, intZoom); var latDelta,lngDelta,layerDiv; //create new layer if (!this.layers[intZoom]) { var tile = getTileNumber(lat, lng, intZoom); this.layers[intZoom] = []; this.layers[intZoom]["startTileX"] = tile[0]; this.layers[intZoom]["startTileY"] = tile[1]; this.layers[intZoom]["startLat"] = lat2y(lat); this.layers[intZoom]["startLng"] = lng; this.layers[intZoom]["images"] = {}; layerDiv = document.createElement("div"); layerDiv.setAttribute("zoomlevel", intZoom); layerDiv.style.position = "relative"; //higher zoomlevels are places in front of lower zoomleves. //no z-index in use. z-index could give unwanted side effects to you application if you use this lib. var layers = map.childNodes; var appended = false; for (var i = layers.length - 1; i >= 0; i--) { var l = layers.item(i); if (l.getAttribute("zoomlevel") < intZoom) { this.map.insertBefore(layerDiv, l); appended = true; //break; } } if (!appended) this.map.appendChild(layerDiv); //for faster access, a referenz to this div is in an array this.layers[intZoom]["layerDiv"] = layerDiv; latDelta = 0; lngDelta = 0; } else { //The layer with this zoomlevel already exists. If there are new lat,lng value, the lat,lng Delta is calculated layerDiv = this.layers[intZoom]["layerDiv"]; latDelta = lat2y(lat) - this.layers[intZoom]["startLat"]; lngDelta = lng - this.layers[intZoom]["startLng"]; } layerDiv.style.visibility = "hidden"; layerDiv.style.opacity = 1; //if the map is moved with drag/drop, the moveX,moveY gives the movement in Pixel (not degree as lat/lng) //here the real values of lat, lng are caculated //this.movedLng = (this.layers[intZoom]["startTileX"] / factor - moveX / this.tileW) * 360 - 180 + lngDelta; var ttt = this.latlngToXY(this.getCenter()); this.movedLng = (this.layers[intZoom]["startTileX"] / factor - moveX / this.tileW) * 360 - 180 + lngDelta; var movedLat360 = (this.layers[intZoom]["startTileY"] / factor - moveY / this.tileH) * 360 - 180 - latDelta; this.movedLat = -y2lat(movedLat360); // -latDelta; //the bug // calculate real x,y tile = getTileNumber(this.movedLat, this.movedLng, intZoom); var x = tile[0]; var y = tile[1]; var intX = Math.floor(x); var intY = Math.floor(y); var startX = this.layers[intZoom]["startTileX"]; var startY = this.layers[intZoom]["startTileY"]; var startIntX = Math.floor(startX); var startIntY = Math.floor(startY); var startDeltaX = -startX + startIntX; var startDeltaY = -startY + startIntY; var dx = x - startX; var dy = y - startY; var dxDelta = dx - startDeltaX; var dyDelta = dy - startDeltaY; //set all images to hidden (only in Array) - the values are used later in this function for (var vimg in this.layers[intZoom]["images"]) { this.layers[intZoom]["images"][vimg]["visibility"] = false; } //for debug only var width = this.width; var height = this.height; var zoomDelta = zoom - intZoom; sc = Math.pow(2, zoomDelta); if (sc < 0.5) sc = 0.5; //here the bounds of the map are calculated. //there is NO preload of images. Preload makes everything slow var minX = Math.floor((-width / 2 / sc) / this.tileW + dxDelta); var maxX = Math.ceil((width / 2 / sc) / this.tileW + dxDelta); var minY = Math.floor((-height / 2 / sc) / this.tileH + dyDelta); var maxY = Math.ceil((height / 2 / sc) / this.tileH + dyDelta); var minsc; //now the images are placed on to the layer for (var i = minX; i < maxX; i++) { for (var j = minY; j < maxY; j++) { var xxx = Math.floor(startX + i); var yyy = Math.floor(startY + j); // The world is recursive. West of America is Asia. var xx = xxx % factor; var yy = yyy; if (xx < 0) xx = xx + factor; // modulo function gives negative value for negative numbers if (yy < 0) continue; if (yy >= factor) continue; var src = tileFunc(xx, yy, intZoom); var id = src + "-" + xxx + "-" + yyy; //if zoom out, without this too much images are loaded if (this.wheelSpeedConfig["digizoom"]) { minsc=1; }else{ minsc=0.5; } // draw images only if they don't exist on the layer if (this.layers[intZoom]["images"][id] == null && sc >=minsc) { var img = document.createElement("img"); img.style.visibility = "hidden"; if (this.discretZoomBlocked == true) { jsMaps.Native.Dom.addClass(img,'map-image no-anim'); } else { jsMaps.Native.Dom.addClass(img,'map-image'); if (jsMaps.Native.Browser.any3d && jsMaps.Native.Utils.TRANSITION != false) { var fn = function (evt) { jsMaps.Native.Event.preventDefault(evt); jsMaps.Native.Event.stopPropagation(evt); jsMaps.Native.Dom.addClass(this,'no-anim'); }; img.addEventListener(jsMaps.Native.Utils.TRANSITION_END, fn, false); } } img.style.left = i * this.tileW + "px"; img.style.top = j * this.tileH + "px"; img.style.width = this.tileW + "px"; img.style.height = this.tileH + "px"; // add img before SVG, SVG will be visible if (layerDiv.childNodes.length > 0) { layerDiv.insertBefore(img, layerDiv.childNodes.item(0)); } else { layerDiv.appendChild(img); } // To increase performance all references are in an array this.layers[intZoom]["images"][id] = {}; this.layers[intZoom]["images"][id]["img"] = img; this.layers[intZoom]["images"][id]["array"] = []; this.layers[intZoom]["images"][id]["array"].push(img); this.layers[intZoom]["loadComplete"] = false; //tileOverlays for (var ov in this.tileOverlays) { if (this.tileOverlays.hasOwnProperty(ov) == false) continue; var ovObj = this.tileOverlays[ov]; var ovImg = img.cloneNode(true); var imgSrc = ovObj.src(xx, yy, intZoom); var ovId = id + "_" + ov; jsMaps.Native.Event.attach(ovImg, "load", this.imgLoaded, this, false); if (this.discretZoomBlocked == true) { jsMaps.Native.Dom.addClass(ovImg,'map-image no-anim'); } else { jsMaps.Native.Dom.addClass(ovImg,'map-image'); } ovImg.setAttribute("src", imgSrc); ovImg.setAttribute("overlay", ov); layerDiv.appendChild(ovImg); this.layers[intZoom]["images"][id]["array"].push(ovImg); } // if the images are loaded, they will get visible in the imgLoad function if (preLoad == false) { jsMaps.Native.Event.attach(img, "load", this.imgLoaded, this, false); jsMaps.Native.Event.attach(img, "error", this.imgError, this, false); } if (preLoad == false) { img.setAttribute("src", src); } else { img.setAttribute("src", "data:image/gif;base64,R0lGODlhAQABAIAAAP///////yH5BAEKAAEALAAAAAABAAEAAAICTAEAOwA="); img.setAttribute("data-src", src); } } else if (this.layers[intZoom]["images"][id] != null && sc >=minsc) { var Img = this.layers[intZoom]["images"][id]["img"]; if (Img.getAttribute('data-src')!=null && Img.getAttribute('data-src') != '' && preLoad == false) { jsMaps.Native.Event.attach(Img, "load", this.imgLoaded, this, false); jsMaps.Native.Event.attach(Img, "error", this.imgError, this, false); Img.setAttribute('src',Img.getAttribute('data-src')); Img.setAttribute('data-src',''); } } // set all images that should be visible at the current view to visible (only in the layer); if(this.layers[intZoom]["images"][id]){ this.layers[intZoom]["images"][id]["visibility"] = true; } } } // remove all images that are not loaded and are not visible in current view. // if the images is out of the current view, there is no reason to load it. // Think about fast moving maps. Moving is faster than loading. // If you started in London and are already in Peking, you don't care // about images that show vienna for example // this code is useless for webkit browsers (march 2010) because of bug: // https://bugs.webkit.org/show_bug.cgi?id=6656 for (var vImg in this.layers[intZoom]["images"]) { if (this.layers[intZoom]["images"].hasOwnProperty(vImg) == false) continue; var overlayImages; var o; if (this.layers[intZoom]["images"][vImg]['img'].getAttribute('data-src')!=null && this.layers[intZoom]["images"][vImg]['img'].getAttribute('data-src') != '' && preLoad == false) { jsMaps.Native.Event.attach(this.layers[intZoom]["images"][vImg]['img'], "load", this.imgLoaded, this, false); jsMaps.Native.Event.attach(this.layers[intZoom]["images"][vImg]['img'], "error", this.imgError, this, false); this.layers[intZoom]["images"][vImg]['img'].setAttribute('src',this.layers[intZoom]["images"][vImg]['img'].getAttribute('data-src')); this.layers[intZoom]["images"][vImg]['img'].setAttribute('data-src',''); } if (this.layers[intZoom]["images"][vImg]["visibility"]) { if (this.layers[intZoom]["images"][vImg]["array"][0].getAttribute("loaded") == "yes") { overlayImages = this.layers[intZoom]["images"][vImg]["array"]; for (o = 0; o < overlayImages.length; o++) { if (overlayImages[o].getAttribute("loaded") == "yes") { overlayImages[o].style.visibility = ""; } } } } else { overlayImages = this.layers[intZoom]["images"][vImg]["array"]; for (o = 0; o < overlayImages.length; o++) { this.layers[intZoom]["images"][vImg]["array"][o].style.visibility = "hidden"; // delete img if not loaded and not needed at the moment if (this.layers[intZoom]["images"][vImg]["array"][o].getAttribute("loaded") != "yes") { layerDiv.removeChild(this.layers[intZoom]["images"][vImg]["array"][o]); } } delete this.layers[intZoom]["images"][vImg]["img"]; delete this.layers[intZoom]["images"][vImg]; } } // Move and zoom the layer var sc = Math.pow(2, zoom - intZoom); this.scale = sc; var dxLeft = -(dxDelta * this.tileW); var dxTop = -(dyDelta * this.tileH); jsMaps.Native.Utils.setTransform(layerDiv,{x:dxLeft,y:dxTop},sc); jsMaps.Native.Utils.setTransformOrigin(layerDiv,{x:(-1 * dxLeft) ,y:(-1 * dxTop)}); // Set the visibleZoom to visible layerDiv.style.visibility = ""; // Not needed images are removed now. Lets check if all needed images are loaded already var notLoaded = 0; var total = 0; for (var q in this.layers[this.loadingZoomLevel]["images"]) { if (this.layers[this.loadingZoomLevel]["images"].hasOwnProperty(q) == false) continue; total++; var imgCheck = this.layers[this.loadingZoomLevel]["images"][q]["array"][0]; if (!(imgCheck.getAttribute("loaded") == "yes")) notLoaded++; } if (notLoaded < 1) this.layers[this.loadingZoomLevel]["loadComplete"] = true; if (this.loadingZoomLevel == intZoom) this.imgLoadInfo(total, notLoaded); }; // ====== END OF DRAW ====== /** * fade effect for int zoom change * @type {null} */ this.fadeOutTimeout = null; this.fadeOut = function (div, alpha) { if (jsMaps.Native.Browser.ielt9) return; if (alpha > 0 && jsMaps.Native.Browser.any3d && jsMaps.Native.Utils.TRANSITION != false) { div.style[jsMaps.Native.Utils.TRANSITION] = 'opacity 500ms ease-out'; div.style.opacity = 0; var fn = function (evt) { jsMaps.Native.Event.preventDefault(evt); jsMaps.Native.Event.stopPropagation(evt); div.style[jsMaps.Native.Utils.TRANSITION] = ""; }; div.addEventListener(jsMaps.Native.Utils.TRANSITION_END, fn, false); return; } if (this.fadeOutTimeout) { clearTimeout(this.fadeOutTimeout); } if (alpha > 0) { div.style.opacity = alpha; var that = this; var tempFunction = function () { that.fadeOut(div, alpha - 0.2); }; this.fadeOutTimeout = setTimeout(tempFunction, 50); } else { div.style.visibility = "hidden"; } }; /** * this function trys to remove images if they are not needed at the moment. * For webkit it's a bit useless because of bug * * https://bugs.webkit.org/show_bug.cgi?id=6656 * For Firefox it really brings speed * * @param zoomlevel */ this.hideLayer = function (zoomlevel) { if (this.intZoom != zoomlevel) { if (this.layers[zoomlevel]) { this.layers[zoomlevel]["layerDiv"].style.opacity = 1; this.fadeOut(this.layers[zoomlevel]["layerDiv"], 1); } } if (!this.layers[zoomlevel]) { return; } for (var vImg in this.layers[zoomlevel]["images"]) { if (this.layers[zoomlevel]["images"].hasOwnProperty(vImg) == false) continue; if (typeof this.layers[zoomlevel]["images"][vImg] == 'undefined' || this.layers[zoomlevel]["images"][vImg] == false) continue; if (typeof this.layers[zoomlevel]["images"][vImg]["img"] == 'undefined' || this.layers[zoomlevel]["images"][vImg]["img"] == false) continue; if (this.layers[zoomlevel]["images"][vImg]["img"].getAttribute("loaded") == "yes") continue; if (zoomlevel != this.intZoom) { var overlayImages = this.layers[zoomlevel]["images"][vImg]["array"]; for (var o = 0; o < overlayImages.length; o++) { this.layers[zoomlevel]["layerDiv"].removeChild(this.layers[zoomlevel]["images"][vImg]["array"][o]); } delete this.layers[zoomlevel]["images"][vImg]["img"]; delete this.layers[zoomlevel]["images"][vImg]; } } }; /** * handling images of tile overlays * * @param evt */ this.ovImgLoaded = function (evt) { var img = (evt.target) ? evt.target: evt.srcElement; img.style.visibility = ""; }; /** * method is called if an image has finished loading (onload event) * * @param evt */ this.imgLoaded = function (evt) { var img = evt.target || evt.srcElement; var loadComplete = true; img.style.visibility = ""; img.setAttribute("loaded", "yes"); if (!img.parentNode) return; img.style.opacity = 1; var notLoaded = 0; var total = 0; var zoomLevel = img.parentNode.getAttribute("zoomlevel"); for (var i = 0; i < img.parentNode.getElementsByTagName("img").length; i++) { var theImg = img.parentNode.getElementsByTagName("img").item(i); if (theImg.getAttribute("overlay")) continue; total++; if (theImg.getAttribute("loaded") != "yes") { notLoaded++; loadComplete = false; } } if (notLoaded < total && jsMaps.Native.Browser.ielt9 && this.layers[this.getIntZoom()]) { this.layers[this.getIntZoom()]["layerDiv"].style.display = ""; } var center; if (this.loadingZoomLevel == zoomLevel) { this.imgLoadInfo(total, notLoaded); if (this.wheeling == true) { if (zoomLevel >= this.tileSource.minzoom && zoomLevel <= this.tileSource.maxzoom) { jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.zoom_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.bounds_changed); jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.center_changed); center = this.getCenter(); this.moveX = 0; this.moveY = 0; this.position.center = center; this.lat = center.lat; this.lng = center.lng; this.position.zoom = this.getZoom(); //this.centerAndZoom(this.getCenter(),this.getZoom()); } this.wheeling = false; } } if (typeof this.layers[zoomLevel]!='undefined') { this.layers[zoomLevel]["loadComplete"] = loadComplete; } if (loadComplete) { if (this.zoomActive == true) { jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.idle); center = this.getCenter(); this.moveX = 0; this.moveY = 0; this.position.center = center; this.lat = center.lat; this.lng = center.lng; this.position.zoom = this.getZoom(); //this.centerAndZoom(this.getCenter(),this.getZoom()); this.zoomActive = false; } if (this.loadingZoomLevel == zoomLevel) { this.hideLayer(this.visibleZoom); if (jsMaps.Native.Browser.ielt9) { this.hideLayer(this.visibleZoom + 1); //no idea why } this.visibleZoom = zoomLevel; } } }; /** * Image load error (there maybe is an IE bug) * * @param evt */ this.imgError = function (evt) { var img = (evt.target) ? evt.target: evt.srcElement; if (!img.parentNode) return; img.setAttribute("src", "data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAQAAAAEABAMAAACuXLVVAAAAG1BMVEX19fUAAADW1tY9PT23t7eZmZlbW1t6enoeHh7MsBpLAAAD40lEQVR4nO2YsXPaMBSHhTGGEcUBPBqapB1xuV474lxzXXHv0mTESdN2jHtHyIgvbY4/u5aeZMDUQUCTLr9vQTGR9CHpPT1gDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgGdiMJIv1Xe7dH418FXLGrw1nG+JsXjAb+XzSmcXgZCPVKvGW2ZdUr5gtrdAxPuq5XDDAc4Fl558Ge0tkKRN1Wp47S36Vd1Fey+B+P5QtcJH98n/LBf46O8hEHzR3ZLv3o4Cit0E0jM9UnDO/4cA7+lp+RH3X17A4iwdypbtWXz48gK2x+KxbNU7LE8JLyhQc1k0oxEPWUrhdHw95w83smlNGXv9wXtcN1sWmA5XBY5/pA8/DQXqLuseyFa3yYKxaNxTjvstBbh/Iv7w1gyeyAOyB78yE3DarEoZOOqzuC/ndCenveNY7ofFe2n71PqUrq1uuYCd9WB27PlGAo0Oq1MCjG9ZMhNzqsUPmlLgjSsGctQcJgKhnNpO+8yESkucQ0EWDFFz6Z1uSwrQtrDkwFggnS36b6abJWIZ/lk8snB5lmpbPlXzNIqHvlSgrrJJ3SyvijkD0T8LB2mTIwewuHKqFZNUqUBDXWmGWUWseiJ2y2nJ/VhQI4Hx6iSbBUL9KYIxM0Ccu67Y+sqBPJFLAlwK6PiLC8OVCiT6ICVGp1BEXkM4hzO17QWB4ZKpkUCs5w2bzACxTo7omIiGGtS6GMznKQn46h+jwnClAvnKmwmI7CvOHwtG+cGtBVnqm89XBcJCHJYKpFc9otjj74g9FhEoY1GuepZCvBs/34LtBfKC1USA9jgdUTay5XShJ57tLvCgS3aTKp+mzE54vaN1LJVDdxYYG8ybQ4ueXcgyEuSGON7SW0sCWx9CI+jYZTlA5gJ5JHU2qu8YBXkYGkGBl13Ikewm7PVaO6t5oDju5kRkBKWe7EKOb/Us+qNWSEBn4HTrVGwEJd8sBKgyjWcLgYgE1Ae3i1fLxsvIDLXh/JRqc7F8agsslQnViXCKl+vG69gMdQEHE9KOssm7dCNV2ySgKquwWF6UCliGpRChPm4yoPHF/lWpPkm/kkD8S7xh8+Ko5RVRtM0eqA3vqrQpyiiLZ1NaiWuTwAkXBXayVmKWC9SpoLbvTATUJdugHxpoiIg/fgv4WAn4CX9/F/C1tPpEWX7J3en0mhsthIpuRxU+8gTbgazqtYAV6y8JpgL0zcI1/WqyjnUxGeqmOBBnk89bjnB0sXWXMpetYuoZgAAEIAABK/8tHQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD4F/wBHuGbzfTjEywAAAAASUVORK5CYII="); this.imgLoaded(evt); }; /** * next function is from wiki.openstreetmap.org * * @param lat * @param lon * @param zoom * @returns {*[]} */ var getTileNumber = function (lat, lon, zoom) { var xtile = ((lon + 180) / 360 * (1 << zoom)); var ytile = ((1 - Math.log(Math.tan(lat * Math.PI / 180) + 1 / Math.cos(lat * Math.PI / 180)) / Math.PI) / 2 * (1 << zoom)); return [xtile, ytile]; }; /** * map is positioned absolute and is an a clone of the original map div. * on window resize it must be positioned again * * if there are problems with CSS margin, padding, border,.. this is the place to fix it * * @returns {{top: number, left: number, width: number, height: number, deltaTop: Number, deltaLeft: Number, deltaBottom: Number, deltaRight: Number}} */ this.calculateMapSize = function () { //this method is very slow in 2010 browsers var el = this.mapParent; var size1= el.getBoundingClientRect(); var height = (size1.height) ? size1.height: size1.bottom - size1.top; var width = (size1.width) ? size1.width : size1.right - size1.left; // Make sure that there is no scroll bar when the map takes 100% of the screen if (parseInt(width) === parseInt(document.body.clientWidth) && parseInt(height) === parseInt(document.body.clientHeight)) document.body.style.overflow = "hidden"; return { top : size1.top - document.body.scrollTop, left : size1.left - document.body.scrollLeft, width : width, height : height, deltaTop : size1.top - document.body.scrollTop, deltaLeft : size1.left - document.body.scrollLeft, deltaBottom : size1.bottom, deltaRight : size1.right }; }; this.redraw = function () { this.setMapPosition(); }; this.setMapPosition = function () { this.mapsize = this.calculateMapSize(); this.size = this.calculateMapSize(); var el = this.mapParent; this.mapTop = this.mapsize.top;// + this.mapsize.deltaTop; this.mapLeft = this.mapsize.left;// + this.mapsize.deltaLeft; this.width = this.mapsize.width; this.height = this.mapsize.height; this.clone.style.height = this.mapsize.height + "px"; this.map.style.left = this.mapsize.width / 2 + "px"; this.map.style.top = this.mapsize.height / 2 + "px"; var center = this.getCenter(); if (!center) return; var zoom = this.getZoom(); if (zoom) this.centerAndZoom(this.getCenter(), this.getZoom()); }; this.clearMap = function () { if (!this.map)return; while (this.map.firstChild) { this.map.removeChild(this.map.firstChild); } while (this.layers.length > 0) { this.layers.pop(); } this.redraw(); }; //functions from wiki gps2xy var lat2y = function (a) { return 180 / Math.PI * Math.log(Math.tan(Math.PI / 4 + a * (Math.PI / 180) / 2)); }; var y2lat = function (a) { return 180 / Math.PI * (2 * Math.atan(Math.exp(a * Math.PI / 180)) - Math.PI / 2); }; //the image load information in the upper right corner this.imgLoadInfo = function (total, missing) { if (!this.loadInfoDiv) { this.loadInfoDiv = document.createElement("div"); this.loadInfoDiv.style.position = "absolute"; this.loadInfoDiv.style.top = "0px"; this.loadInfoDiv.style.right = "0px"; this.loadInfoDiv.style.backgroundColor = "white"; this.loadInfoDiv.style.border = "1px solid gray"; this.loadInfoDiv.style.fontSize = "10px"; this.map.parentNode.appendChild(this.loadInfoDiv); } if (missing == 0) { this.loadInfoDiv.style.display = "none"; jsMaps.Native.Event.trigger(this.mapParent,jsMaps.api.supported_events.tilesloaded); if (jsMaps.Native.Browser.ielt9) { this.clone.style.visibility = ""; } } else { this.loadInfoDiv.style.display = ""; while (this.loadInfoDiv.firstChild) { this.loadInfoDiv.removeChild(this.loadInfoDiv.firstChild); } this.loadInfoDiv.innerHTML = missing + ' images to load' } }; this.mapCopyrightNode = false; this.mapCopyright = function () { if (typeof this.tileSource.copyright!='undefined') { if (!this.mapCopyrightNode) { this.mapCopyrightNode = jsMaps.Native.CreateDiv(this.map.parentNode,'map-copyright') } this.mapCopyrightNode.innerHTML = this.tileSource.copyright; } }; this.scaleDivExec = function () { if (options.scale_control) { jsMaps.Native.ScaleUI.init(this); jsMaps.Native.ScaleUI._update(); } }; // ************* INIT ********** this.internetExplorer = navigator.userAgent.indexOf("MSIE") != -1; if (navigator.userAgent.indexOf("Android") != -1) { var that = this; var tempFunction = function () { that.blocked = false }; setInterval(tempFunction, 300); } this.position = {}; if (typeof tileLayers == 'undefined') throw "No tile servers defined"; if (tileLayers.Layers.length == 0) throw "Empty tile severs list"; var counter = 0; for (var t in tileLayers.Layers) { if (tileLayers.Layers.hasOwnProperty(t) == false) continue; if (counter == 0) { this.tiles(tileLayers.Layers[t]); } counter++; } this.selectedTileLayer = 0; this.wheelSpeedConfig = []; this.wheelSpeedConfig["acceleration"] = 2; this.wheelSpeedConfig["maxSpeed"] = 2; this.wheelSpeedConfig["digizoom"] = true; this.wheelSpeedConfig["zoomAnimationSlowdown"] = 0.05; this.wheelSpeedConfig["animationFPS"] = 100; this.wheelSpeedConfig["moveAnimateDesktop"] = true; this.wheelSpeedConfig["moveAnimationSlowdown"] = 0.4; this.wheelSpeedConfig["rectShiftAnimate"] = false; this.wheelSpeedConfig["rectShiftAnimationTime"] = 500; this.wheelSpeedConfig["animateMinSpeed"] = 0.4; this.wheelSpeedConfig["animateMaxSpeed"] = 200; //variables for performance check this.wheelEventCounter = 0; this.framesCounter = 0; this.mapParent = map; this.overlays = []; // Add the zoom ui this.uiContainer = false; if (options.zoom_control) this.addOverlay(new jsMaps.Native.ZoomUI(this)); if (options.map_type) this.addOverlay(new jsMaps.Native.LayersUI(this,tileLayers)); this.clone = map.cloneNode(true); //clone is the same as the map div, but absolute positioned this.clone = document.createElement("div"); this.clone.removeAttribute("id"); jsMaps.Native.setCursor(this.clone,"grab"); jsMaps.Native.Dom.addClass(this.clone,'jsMaps-Native-Box'); if (map.firstChild) { map.insertBefore(this.clone, map.firstChild); } else { map.appendChild(this.clone); } this.map = document.createElement("div"); //this is the div that holds the layers, but no marker and svg overlayes this.map.style.position = "absolute"; this.clone.appendChild(this.map); this.setMapPosition(); //div for markers this.overlayDiv = document.createElement("div"); this.overlayDiv.style.position = "absolute"; this.clone.appendChild(this.overlayDiv); //distance tool this.distanceMeasuring = "no"; this.moveMarker = null; this.measureLine = null; this.moveAnimationMobile = true; this.moveAnimationDesktop = false; this.moveAnimationBlocked = false; this.lastMouseX = this.width / 2; this.lastMouseY = this.height / 2; this.layers = []; this.visibleZoom = null; this.oldVisibleZoom = null; this.intZoom = null; this.moveX = 0; this.moveY = 0; this.lastMoveX = 0; this.lastMoveY = 0; this.lastMoveTime = 0; this.startMoveX = 0; this.startMoveY = 0; this.sc = 1; this.blocked = false; this.tileW = 256; this.tileH = 256; this.position.zoom = 1; this.movestarted = false; //touchscreen this.mousedownTime = null; this.doubleclickTime = 400; //mouse this.mousedownTime2 = null; this.doubleclickTime2 = 500; this.zoomOutTime = 1000; this.zoomOutSpeed = 0.01; this.zoomOutInterval = null; this.zoomOutStarted = false; this.draggable = true; var w; if (jsMaps.Native.Browser.ie && !jsMaps.Native.Browser.ie11) { w = map; jsMaps.Native.Event.attach(document.documentElement, "mouseup", function (e) { this.leftClick = false; }, this, false); jsMaps.Native.Event.attach(document.documentElement, "mousemove", this.mousemove, this, false); } else { w = window; jsMaps.Native.Event.attach(window, "resize", this.setMapPosition, this, false); } if (navigator.userAgent.indexOf("Konqueror") != -1) w = map; jsMaps.Native.Event.attach(map, "touchstart", this.start, this, false); jsMaps.Native.Event.attach(map, "touchmove", this.move, this, false); jsMaps.Native.Event.attach(map, "touchend", this.end, this, false); jsMaps.Native.Event.attach(w, "mousemove", this.mousemove, this, false); jsMaps.Native.Event.attach(map, "mousedown", this.mousedown, this, false); jsMaps.Native.Event.attach(w, "mouseup", this.mouseup, this, false); jsMaps.Native.Event.attach(w, "orientationchange", this.reSize, this, false); if (options.mouse_scroll) jsMaps.Native.Event.attach(map, "DOMMouseScroll", this.mousewheel, this, false); jsMaps.Native.Event.attach(map, "dblclick", this.doubleclick, this, false); if (typeof(this.keydown) == "function") { jsMaps.Native.Event.attach(w, "keydown", this.keydown, this, false); jsMaps.Native.Event.attach(w, "keyup", this.keyup, this, false); } this.mapCopyright(); var center=new jsMaps.geo.Location(options.center.latitude,options.center.longitude); this.centerAndZoom(center,options.zoom); var hooking = function() {}; hooking.prototype = new jsMaps.MapStructure(); hooking.prototype.object = this; hooking.prototype.getCenter = function () { var map = this.object.getCenter(); return {lat: map.lat, lng: map.lng}; }; hooking.prototype.getElement = function () { return this.object.clone; }; hooking.prototype.setDraggable = function (flag) { this.object.draggable = flag; }; hooking.prototype.latLngToPoint = function (lat, lng) { var point = new jsMaps.geo.Location(lat, lng); var xy = this.object.latlngToXY(point); return {x: xy['x'],y: xy['y']} }; hooking.prototype.pointToLatLng = function (x, y) { var pos = this.object.XYTolatlng(x,y); return {lat:pos.lat,lng:pos.lng}; }; hooking.prototype.moveXY = function (x, y) { this.object.moveXY(x,y); }; hooking.prototype.setCenter = function (lat, lng) { this.object.centerAndZoom(new jsMaps.geo.Location(lat, lng),this.object.getZoom()); jsMaps.Native.Event.trigger(this.object.mapParent,jsMaps.api.supported_events.center_changed); }; hooking.prototype.getBounds = function () { return jsMaps.Native.prototype.bounds(this.object); }; hooking.prototype.getZoom = function () { return this.object.getIntZoom(); }; hooking.prototype.setZoom = function (number) { this.object.wheeling = true; this.object.zoom(number); }; hooking.prototype.fitBounds = function (bounds) { this.object.wheeling = true; return this.object.setBounds(bounds.bounds); }; return new hooking(); }; /** * Bounds object * * @param themap * @returns hooking */ jsMaps.Native.prototype.bounds = function (themap) { var __bounds; if (typeof themap != 'undefined') { __bounds = themap.getBounds(); } else { __bounds = new jsMaps.Native.InnerBounds({lat:0,lng:0},{lat:0,lng:0}); } var hooking = function () {}; hooking.prototype = new jsMaps.BoundsStructure(); hooking.prototype.bounds = __bounds; hooking.prototype.addLatLng = function (lat, lng) { this.bounds.extend(new jsMaps.geo.Location(lat, lng)); }; hooking.prototype.getCenter = function () { var center = this.bounds.getCenter(); return {lat: center.lat, lng: center.lng}; }; hooking.prototype.getTopLeft = function () { var topLeft = this.bounds.ne(); return {lat: topLeft.lat, lng: topLeft.lng}; }; hooking.prototype.getBottomRight = function () { var bottomRight = this.bounds.sw(); return {lat: bottomRight.lat, lng: bottomRight.lng}; }; return new hooking(); }; /** * Attach map events * * @param content * @param event * @param fnCore * @param once * @returns {*} */ jsMaps.Native.prototype.attachEvent = function (content,event,fnCore,once) { var elem; var customEvent = false; if (typeof content.object != 'undefined' && typeof content.object.mapParent!='undefined') { elem = content.object.mapParent; } else if (typeof content.object != 'undefined' && typeof content.object.marker!='undefined') { elem = content.object.marker; if (event == jsMaps.api.supported_events.click) customEvent = true; } else if (typeof content.object != 'undefined' && typeof content.object.infobox!='undefined') { elem = content.object.infobox; } else if (typeof content.object != 'undefined' && typeof content.object.vectorPath!='undefined') { elem = content.object.vectorPath; if (event == jsMaps.api.supported_events.click) customEvent = true; } var eventTranslation = event; if (event == jsMaps.api.supported_events.mouseover) eventTranslation = 'mouseenter'; if (event == jsMaps.api.supported_events.rightclick) eventTranslation = 'contextmenu'; if (event == jsMaps.api.supported_events.tilt_changed) eventTranslation = 'orientationchange'; var fn = fnCore; // this is stupid, damn you micosoft if (typeof jsMaps.Native.Event[eventTranslation] != 'undefined') eventTranslation = jsMaps.Native.Event[eventTranslation]; var useFn = function (e) { if (event == jsMaps.api.supported_events.mouseout) { var mouseOut = jsMaps.Native.MakeMouseOutFn(elem,e); if (mouseOut == false) return; } if ((event == jsMaps.api.supported_events.click || event == jsMaps.api.supported_events.dblclick) && typeof content.object.clickable != 'undefined' && content.object.clickable == false) { return; } if (event == jsMaps.api.supported_events.dblclick) { e.cancelBubble = true; } var eventHooking = function() {}; eventHooking.prototype = new jsMaps.Event(e,event,content); eventHooking.prototype.getCursorPosition = function () { if (typeof this.container.object.mouseToLatLng!='undefined') { return this.container.object.mouseToLatLng(this.eventObject); } return {lat: 0, lng: 0}; }; if (!jsMaps.Native.Browser.ielt9) { eventHooking.prototype.stopPropagation = function () { this.eventObject.stopPropagation(); this.eventObject.stopImmediatePropagation(); this.eventObject.cancelBubble = true; }; } fn(new eventHooking); }; // Create the event. if (customEvent == false) { if (jsMaps.Native.Browser.ielt9) { if (!elem[eventTranslation]) { elem[eventTranslation] = 0; } if (eventTranslation == 'mouseenter' || eventTranslation == 'mouseout' || eventTranslation == 'mousemove' || eventTranslation == 'mouseup' || eventTranslation == 'mousedown' ) { var trigger; if (eventTranslation == 'mouseenter') trigger = 'onmouseover'; if (eventTranslation == 'mouseout') trigger = 'onmouseout'; if (eventTranslation == 'mousemove') trigger = 'onmousemove'; if (eventTranslation == 'mouseup') trigger = 'onmouseup'; if (eventTranslation == 'mousedown') trigger = 'onmousedown'; elem.attachEvent(trigger, function (e) { useFn(e); }); } else { elem.attachEvent("onpropertychange", function (e) { if (e.propertyName == eventTranslation) { useFn(e); } }); } } else { if (jsMaps.Native.Browser.touch) { if (eventTranslation == 'click' || eventTranslation == 'mousedown' || eventTranslation == 'mouseenter' ) eventTranslation = 'touchstart'; if (eventTranslation == 'mousemove' || eventTranslation == 'drag') eventTranslation = 'touchmove'; if (eventTranslation == 'mouseup' || eventTranslation == 'mouseout' ) eventTranslation = 'touchend'; } var eventTarget = document.createEvent('Event'); eventTarget.initEvent(eventTranslation, true, true); elem.addEventListener(eventTranslation, useFn, false); } } else { elem = content.object.attachEvent(jsMaps.api.supported_events.click,useFn,false,false); } return {eventObj: elem, eventName: event}; }; /** * * @param element * @param eventName */ jsMaps.Native.prototype.triggerEvent = function (element,eventName) { var elem; if (typeof element.object != 'undefined' && typeof element.object.mapParent!='undefined') { elem = element.object.mapParent; }else if (typeof element.object != 'undefined' && typeof element.object.marker!='undefined') { elem = element.object.marker; } jsMaps.Native.Event.trigger(elem,eventName); }; /** * * @param map * @param eventObject * @returns {*} */ jsMaps.Native.prototype.removeEvent = function (map,eventObject) { jsMaps.Native.Event.remove(eventObject.eventObj,eventObject.eventName); }; /** * Generate markers * * @param {jsMaps.MapStructure} map * @param {jsMaps.MarkerOptions} parameters */ jsMaps.Native.prototype.marker = function (map,parameters) { var options = { position: new jsMaps.geo.Location( parameters.position.lat, parameters.position.lng), map: map.object, title:parameters.title, draggable: parameters.draggable, visible: true }; if (parameters.zIndex != null) options.zIndex = parameters.zIndex; if (parameters.icon != null) { options.icon = new jsMaps.Native.Overlay.MarkerImage( parameters.icon ); }else if (parameters.html != null) { options.icon = parameters.html; options.raiseOnDrag = false; } var marker = new jsMaps.Native.Overlay.Marker(options); var hooking = function () {}; hooking.prototype = new jsMaps.MarkerStructure(); hooking.prototype.object = marker; hooking.prototype._objectName = 'marker'; /** * * @returns {{lat: *, lng: *}} */ hooking.prototype.getPosition = function () { var pos = this.object.getPosition(); return {lat: pos.lat, lng: pos.lng} }; hooking.prototype.setPosition = function (lat, lng) { jsMaps.Native.Event.trigger(this.object.marker,jsMaps.api.additional_events.position_changed); this.object.setPosition({lat: lat,lng: lng}); }; hooking.prototype.getVisible = function () { return this.object.getVisible(); }; hooking.prototype.setVisible = function (variable) { return this.object.setVisible(variable); }; hooking.prototype.getIcon = function () { if (typeof this.object.MarkerOptions.icon != 'undefined' && typeof this.object.MarkerOptions.icon.url != 'undefined') { return this.object.MarkerOptions.icon.url; } return null; }; hooking.prototype.setIcon = function (icon) { while (this.object.marker.firstChild) { this.object.marker.removeChild(this.object.marker.firstChild); } if (this.object.shadow) { while (this.object.shadow.firstChild) { this.object.shadow.removeChild(this.object.shadow.firstChild); } } this.object.MarkerOptions.shape = false; this.object.MarkerOptions.icon = new jsMaps.Native.Overlay.MarkerImage(icon); this.object.populateIcon(this.object.MarkerOptions); this.object.init(this.object.MarkerOptions.map); this.object.render(); jsMaps.Native.Event.trigger(this.object.marker,jsMaps.api.additional_events.icon_changed); }; hooking.prototype.getZIndex = function () { return this.object.marker.style.zIndex; }; hooking.prototype.setZIndex = function (number) { this.object.MarkerOptions.zIndex = number; this.object.render(); }; hooking.prototype.setDraggable = function (flag) { this.object.MarkerOptions.draggable = flag; this.object.init(this.object.MarkerOptions.map); this.object.render(); }; hooking.prototype.remove = function () { while (this.object.marker.firstChild) { this.object.marker.removeChild(this.object.marker.firstChild); } if (this.object.shadow) { while (this.object.shadow.firstChild) { this.object.shadow.removeChild(this.object.shadow.firstChild); } } this.object.clear(); this.object.destroy(); this.object = null; }; return new hooking(); }; /** * Info windows * * Create bubbles to be displayed on the map * * @param {jsMaps.InfoWindowOptions} parameters * @returns {jsMaps.InfoWindowStructure} */ jsMaps.Native.prototype.infoWindow = function (parameters) { var options = {content: parameters.content}; if (parameters.position != null) options.position = parameters.position; var infoWindow = new jsMaps.Native.Overlay.InfoWindow(options); var hooking = function () {}; hooking.prototype = new jsMaps.InfoWindowStructure(); /** * @type {jsMaps.Native.Overlay.InfoWindow} */ hooking.prototype.object = infoWindow; hooking.prototype.getPosition = function () { var pos = this.object.getPosition(); return {lat: pos.lat, lng: pos.lng} }; hooking.prototype.setPosition = function (lat, lng) { this.object.setPosition({lat: lat,lng: lng}); }; hooking.prototype.close = function () { this.object.close(); }; /** * * @param {jsMaps.MapStructure} map * @param {jsMaps.MarkerStructure} marker */ hooking.prototype.open = function(map,marker) { this.object.open(map.object,((typeof marker == 'undefined' || typeof marker.object == 'undefined') ? undefined: marker.object)); }; hooking.prototype.setContent = function (content) { this.object.setContent(content); }; return new hooking(); }; /** * Create PolyLine * * @param {jsMaps.MapStructure} map * @param {jsMaps.PolyLineOptions} parameters * @returns jsMaps.PolyLineStructure */ jsMaps.Native.prototype.polyLine = function (map,parameters) { var vector = new jsMaps.Native.Overlay.Vector({ clickable: parameters.clickable, stroke: parameters.strokeColor, strokeWidth: parameters.strokeWeight, strokeOpacity: parameters.strokeOpacity, fill: "none", draggable: parameters.draggable, editable: parameters.editable, visible: parameters.visible, zIndex: parameters.zIndex }, parameters.path, jsMaps.Native.Vector.elements.polyLine); map.object.addOverlay(vector); var hooking = function () {}; hooking.prototype = new jsMaps.PolyLineStructure(); hooking.prototype.object = vector; hooking.prototype.getEditable = function () { return this.object._vectorOptions.editable; }; hooking.prototype.getPath = function () { var arrayOfPaths = []; /** * * @type {jsMaps.Native.Overlay.Vector._vectorPoints|*|jsMaps.Native.Overlay.Vector.vectorObject._vectorPoints} */ var path = this.object._vectorPoints; for (var i in path) { if (path.hasOwnProperty(i) == false) continue; var pos = path[i]; arrayOfPaths.push ({lat: pos.lat, lng: pos.lng}); } return arrayOfPaths; }; hooking.prototype.getVisible = function () { return this.object._vectorOptions.visible; }; hooking.prototype.setDraggable = function (draggable) { this.object._vectorOptions.draggable = draggable; }; hooking.prototype.setEditable = function (editable) { this.object._vectorOptions.editable = editable; this.object.render(true); }; hooking.prototype.setPath = function (pathArray) { this.object._vectorPoints = pathArray; this.object.render(true); }; /** * @param {jsMaps.MapStructure} map * @returns {{lat: *, lng: *}} */ hooking.prototype.setMap = function (map) { this.object.theMap.removeOverlay(this.object); map.object.addOverlay(this.object); this.object.theMap = map.object; this.object._vectorOptions.map = map.object; this.object.render(true); }; hooking.prototype.setVisible = function (visible) { this.object.setVisible(visible); }; hooking.prototype.removeLine = function () { this.object.destroy(); }; return new hooking(); }; /** * @param {jsMaps.MapStructure} map * @param {jsMaps.PolygonOptions} parameters * @returns jsMaps.PolygonStructure */ jsMaps.Native.prototype.polygon = function (map,parameters) { var vector = new jsMaps.Native.Overlay.Vector({ clickable: parameters.clickable, stroke: parameters.strokeColor, strokeWidth: parameters.strokeWeight, strokeOpacity: parameters.strokeOpacity, fill: parameters.fillColor, fillOpacity: parameters.fillOpacity, draggable: parameters.draggable, editable: parameters.editable, visible: parameters.visible, zIndex: parameters.zIndex }, parameters.paths, jsMaps.Native.Vector.elements.polygon); map.object.addOverlay(vector); var hooking = function () {}; hooking.prototype = new jsMaps.PolygonStructure(); hooking.prototype.object = vector; hooking.prototype.getDraggable = function () { return this.object._vectorOptions.draggable; }; hooking.prototype.getEditable = function () { return this.object._vectorOptions.editable; }; hooking.prototype.getPath = function () { var arrayOfPaths = []; /** * * @type {jsMaps.Native.Overlay.Vector._vectorPoints|*|jsMaps.Native.Overlay.Vector.vectorObject._vectorPoints} */ var path = this.object._vectorPoints; for (var i in path) { if (path.hasOwnProperty(i) == false) continue; var pos = path[i]; arrayOfPaths.push ({lat: pos.lat, lng: pos.lng}); } return arrayOfPaths; }; hooking.prototype.getVisible = function () { return this.object._vectorOptions.visible; }; hooking.prototype.setDraggable = function (draggable) { this.object._vectorOptions.draggable = draggable; }; hooking.prototype.setEditable = function (editable) { this.object._vectorOptions.editable = editable; this.object.render(true); }; hooking.prototype.setPath = function (pathArray) { this.object._vectorPoints = pathArray; this.object.render(true); }; /** * @param {jsMaps.MapStructure} map * @returns {{lat: *, lng: *}} */ hooking.prototype.setMap = function (map) { this.object.theMap.removeOverlay(this.object); map.object.addOverlay(this.object); this.object.theMap = map.object; this.object._vectorOptions.map = map.object; this.object.render(true); }; hooking.prototype.setVisible = function (visible) { this.object.setVisible(visible); }; hooking.prototype.removePolyGon = function () { this.object.destroy(); }; return new hooking(); }; /** * Create PolyLine * * @param {jsMaps.MapStructure} map * @param {jsMaps.CircleOptions} parameters * @returns jsMaps.CircleStructure */ jsMaps.Native.prototype.circle = function (map,parameters) { var vector = new jsMaps.Native.Overlay.Vector({ clickable: parameters.clickable, stroke: parameters.strokeColor, strokeWidth: parameters.strokeWeight, strokeOpacity: parameters.strokeOpacity, fill: parameters.fillColor, fillOpacity: parameters.fillOpacity, draggable: parameters.draggable, editable: parameters.editable, // currently not supported visible: parameters.visible, zIndex: parameters.zIndex, center: parameters.center, radius: parameters.radius }, [], jsMaps.Native.Vector.elements.circle); map.object.addOverlay(vector); var hooking = function () { }; hooking.prototype = new jsMaps.CircleStructure(); hooking.prototype.object = vector; hooking.prototype.getBounds = function () { var bBox = new jsMaps.Native.prototype.bounds(); bBox.bounds = this.object.pointsBounds(); return bBox; }; hooking.prototype.getCenter = function () { var theCenter = this.getBounds().getCenter(); return {lat: theCenter.lat, lng: theCenter.lng}; }; hooking.prototype.getDraggable = function () { return this.object._vectorOptions.draggable; }; hooking.prototype.getEditable = function () { return this.object._vectorOptions.editable; }; hooking.prototype.getRadius = function () { return this.object._vectorOptions.radius; }; hooking.prototype.getVisible = function () { return this.object._vectorOptions.visible; }; hooking.prototype.setCenter = function (lat, lng) { this.object._vectorOptions.center = {lat: lat, lng: lng}; this.object.render(true); }; hooking.prototype.setDraggable = function (draggable) { this.object._vectorOptions.draggable = draggable; }; hooking.prototype.setEditable = function (editable) { this.object._vectorOptions.editable = editable; this.object.render(true); }; /** * @param {jsMaps.MapStructure} map * @returns {{lat: *, lng: *}} */ hooking.prototype.setMap = function (map) { this.object.theMap.removeOverlay(this.object); map.object.addOverlay(this.object); this.object.theMap = map.object; this.object._vectorOptions.map = map.object; this.object.render(true); }; hooking.prototype.setVisible = function (visible) { this.object.setVisible(visible); }; hooking.prototype.setRadius = function (radius) { this.object._vectorOptions.radius = radius; this.object.render(true); }; hooking.prototype.removeCircle = function () { this.object.destroy(); }; return new hooking(); };
trying to fix the flickering map again
library/native/core.native.js
trying to fix the flickering map again
<ide><path>ibrary/native/core.native.js <ide> <ide> this.zoomActive = true; <ide> <del> this.discretZoomBlockedTimeout = setTimeout(func, 400); <add> setTimeout(func, 400); <ide> this.discretZoomBlocked = true; <ide> <ide> var steps = 20; <del> for (var i = 1; i <= steps; i++) { <add> for (var i = 1; i <= 20; i++) { <ide> if (this.zoomTimeouts[i]) { <ide> clearTimeout(this.zoomTimeouts[i]); <ide> } <ide> } <ide> var start = this.zoom(); <ide> var end = (direction == 1) ? Math.ceil(this.zoom() + 0.9): Math.floor(this.zoom() - 0.9); <del> var q; <ide> <ide> if (direction == -1) { <ide> for (q in this.layers) { <ide> var img = document.createElement("img"); <ide> img.style.visibility = "hidden"; <ide> <del> if (this.discretZoomBlocked == true) { <add> if ((this.discretZoomBlocked == true || this.zoomActive == true || this.wheeling == true || this.movestarted == false)) { <ide> jsMaps.Native.Dom.addClass(img,'map-image no-anim'); <ide> } else { <ide> jsMaps.Native.Dom.addClass(img,'map-image');
JavaScript
apache-2.0
c0b5866c3a52593e290d3940484a31ff0bfb5f7c
0
Jedwondle/openstorefront,Jedwondle/openstorefront,skycow/openstorefront,Razaltan/openstorefront,Jedwondle/openstorefront,Jedwondle/openstorefront,Razaltan/openstorefront,skycow/openstorefront,tyler-travis/openstorefront,jbottel/openstorefront,tyler-travis/openstorefront,tyler-travis/openstorefront,Razaltan/openstorefront,jbottel/openstorefront,skycow/openstorefront,Razaltan/openstorefront,skycow/openstorefront,Jedwondle/openstorefront,jbottel/openstorefront,skycow/openstorefront,tyler-travis/openstorefront,jbottel/openstorefront,Razaltan/openstorefront,tyler-travis/openstorefront
/* * Copyright 2014 Space Dynamics Laboratory - Utah State University Research Foundation. * * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an 'AS IS' BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; /* global isEmpty, setupPopovers, openClick:true, setupResults, fullClick, openFiltersToggle, buttonOpen, buttonClose, toggleclass, resetAnimations, filtClick, setPageHeight*/ app.controller('ResultsCtrl', ['$scope', 'localCache', 'business', '$filter', '$timeout', '$location', '$rootScope', '$q', '$route', '$sce', function ($scope, localCache, Business, $filter, $timeout, $location, $rootScope, $q, $route, $sce) { /*jshint unused: false*/ ////////////////////////////////////////////////////////////////////////////// // Here we put our variables... ////////////////////////////////////////////////////////////////////////////// // set the page height so the loading masks look good. setPageHeight($('.page1'), 52); $scope.scrollTo = $rootScope.scrollTo; // start the loading masks $scope.$emit('$TRIGGERLOAD', 'mainLoader'); // $scope.$emit('$TRIGGERLOAD', 'resultsLoad'); $scope.$emit('$TRIGGERLOAD', 'filtersLoad'); // set variables $scope._scopename = 'results'; $scope.orderProp = ''; $scope.query = ''; $scope.lastUsed = new Date(); $scope.modal = {}; $scope.details = {}; $scope.data = {}; $scope.isPage1 = true; $scope.showSearch = false; $scope.showDetails = false; $scope.showMessage = false; $scope.modal.isLanding = false; $scope.single = false; $scope.isArticle = false; $scope.searchCode = null; $scope.filteredTotal = null; $scope.searchTitle = null; $scope.searchDescription = null; $scope.details.details = null; $scope.typeahead = null; $scope.searchGroup = null; $scope.searchKey = null; $scope.filters = null; $scope.resetFilters = null; $scope.total = null; $scope.ratingsFilter = 0; $scope.rowsPerPage = 200; $scope.pageNumber = 1; $scope.maxPageNumber = 1; $scope.noDataMessage = $sce.trustAsHtml('<p>There are no results for your search</p> <p>&mdash; Or &mdash;</p> <p>You have filtered out all of the results.</p><button class="btn btn-default" ng-click="clearFilters()">Reset Filters</button>'); // grab what we need from the server. $scope.setupTagList = function() { Business.getTagsList(true).then(function(result) { if (result) { $scope.tagsList = result; $scope.tagsList.sort(); } else { $scope.tagsList = null; } }); } $scope.setupTagList(); $scope.$on('$REFRESHTAGLIST', function(event) { $scope.setupTagList(); }) Business.getProsConsList().then(function(result) { if (result) { $scope.prosConsList = result; } else { $scope.prosConsList = null; } }); Business.userservice.getWatches().then(function(result) { if (result) { $scope.watches = result; } else { $scope.watches = null; } }); Business.lookupservice.getExpertise().then(function(result) { if (result) { $scope.expertise = result; } else { $scope.expertise = []; } }); Business.lookupservice.getUserTypeCodes().then(function(result) { if (result) { $scope.userTypeCodes = result; } else { $scope.userTypeCodes = []; } }); Business.componentservice.getComponentDetails().then(function(result) { Business.typeahead(result, 'name').then(function(value){ if (value) { $scope.typeahead = value; } else { $scope.typeahead = []; } }); }); ////////////////////////////////////////////////////////////////////////////// // Here we put our Functions ////////////////////////////////////////////////////////////////////////////// /*************************************************************** * This function selects the initial tab. * params: tab -- The tab that is selected ***************************************************************/ $scope.setSelectedTab = function(tab) { $scope.selectedTab = tab; }; $scope.getNumThings = function(article){ if ($scope.data && $scope.data.data && $scope.data.data.length) { var count = 0; _.each($scope.data.data, function(item){ if (article) { if (item.listingType === 'Article') { count++; } } else if (!article) { if (item.listingType !== 'Article') count++; } }) return count; } else { return 0; } } /*************************************************************** * Here we set the tab class * params: tab -- The tab to check to see if it is selected * returns: class -- The classes that the tab will have ***************************************************************/ $scope.tabClass = function(tab) { if ($scope.selectedTab === tab) { return 'active'; } else { return ''; } }; /*************************************************************** * This function is looked at for auto suggestions for the tag list * if a ' ' is the user's entry, it will auto suggest the next 20 tags that * are not currently in the list of tags. Otherwise, it will look at the * string and do a substring search. * params: query -- The input that the user has typed so far * params: list -- The list of tags already tagged on the item * params: source -- The source of the tags options * returns: deferred.promise -- The promise that we will return a resolved tags list ***************************************************************/ $scope.checkTagsList = function(query, list, source) { var deferred = $q.defer(); var subList = null; if (query === ' ') { subList = _.reject(source, function(item) { return !!(_.where(list, {'text': item}).length); }); } else { subList = _.filter(source, function(item) { return item.toLowerCase().indexOf(query.toLowerCase()) > -1; }); } deferred.resolve(subList); return deferred.promise; }; /*************************************************************** * Description * params: param name -- param description * returns: Return name -- return description ***************************************************************/ var getBody = function(route) { var deferred = $q.defer(); $.get(route).then(function(responseData) { deferred.resolve(responseData); }); return deferred.promise; }; /*************************************************************** * This function is called once we have the search request from the business layer * The order and manner in which we do this call will most likely change once * we get the httpbackend fleshed out. ***************************************************************/ $scope.reAdjust = function(key) { $scope.searchGroup = key; $scope.searchKey = $rootScope.searchKey; if (!isEmpty($scope.searchGroup)) { // grab all of the keys in the filters $scope.searchKey = $scope.searchGroup[0].key; $scope.searchCode = $scope.searchGroup[0].code; } else { $scope.searchKey = 'search'; $scope.searchCode = ''; } Business.getFilters().then(function(result) { if (result) { $scope.filters = result; $scope.filters = angular.copy($scope.filters); $scope.filters = _.sortBy($scope.filters, function(item){ return item.description; }); } else { $scope.filters = null; } setupResults(); var architecture = null; if ($scope.searchKey === 'attribute') { if ($scope.searchCode.type) { var filter = _.find($scope.filters, {'type': $scope.searchCode.type}); if (filter){ architecture = filter.architectureFlg; } } } Business.componentservice.doSearch($scope.searchKey, $scope.searchCode, architecture).then(function(result) { if (result && result.data && result.data.length > 0) { $scope.total = result.data || []; } else { $scope.total = []; } $scope.filteredTotal = $scope.total; /*Simulate wait for the filters*/ /*This is simulating the wait time for building the data so that we get a loader*/ $scope.data.data = $scope.total; _.each($scope.data.data, function(item){ if (item.description !== null && item.description !== undefined && item.description !== '') { var desc = item.description.match(/^(.*?)[.?!]\s/); item.shortdescription = (desc && desc[0])? desc[0] + '.': item.description; } else { item.shortdescription = 'This is a temporary short description'; } }); $scope.setupData(); // var end = new Date().getTime(); // var time = end - start; // console.log('Total Execution time ****: ' + time); $scope.$emit('$TRIGGERUNLOAD', 'mainLoader'); $scope.$emit('$TRIGGERUNLOAD', 'filtersLoad'); $scope.initializeData(key); adjustFilters(); }, function(result){ if (result && result.data && result.data.length > 0) { $scope.total = result.data || []; } else { $scope.total = []; } $scope.data.data = $scope.total; $scope.$emit('$TRIGGERUNLOAD', 'mainLoader'); $scope.$emit('$TRIGGERUNLOAD', 'filtersLoad'); $scope.initializeData(key); $scope.showMessage = true; $scope.setupData(); }); }); }; // /*************************************************************** * This is used to initialize the scope title, key, and code. Once we have a * database, this is most likely where we'll do the first pull for data. * * TODO:: Add query prameters capabilities for this page so that we don't have * to rely on the local/session storrage to pass us the search key * * TODO:: When we do start using actual transfered searches from the main page * we need to initialize checks on the filters that were sent to us from that * page (or we need to disable the filter all together) * * This function is called by the reAdjustment function in order * to reinitialze all of the data if the list of items changes. * which usually would hinge on the key of the search * params: key -- The search object we use to initialize data with. ***************************************************************/ $scope.initializeData = function(key) { if (!isEmpty($scope.searchGroup)) { // grab all of the keys in the filters $scope.searchKey = $scope.searchGroup[0].key; $scope.searchCode = $scope.searchGroup[0].code; var keys = _.pluck($scope.filters, 'type'); var foundFilter = null; var foundCollection = null; var type = ''; // TODO: CLEAN UP THIS IF/ELSE switch!!!!!!! if (_.contains(keys, $scope.searchCode.type)) { $scope.showSearch = true; foundFilter = _.find($scope.filters, {'type': $scope.searchCode.type}); foundCollection = _.find(foundFilter.codes, {'code': $scope.searchCode.key}); // console.log('found', foundFilter); // console.log('found', foundCollection); // if the search group is based on one of those filters do this if ($scope.searchCode !== 'all' && foundFilter && foundCollection) { $scope.filters = _.reject($scope.filters, function(filter) { return filter.type === foundFilter.type; }); $scope.searchColItem = foundCollection; $scope.searchTitle = foundFilter.description + ', ' + foundCollection.label; $scope.modal.modalTitle = foundFilter.description + ', ' + foundCollection.label; $scope.searchDescription = getShortDescription(foundCollection.description) || 'The results on this page are restricted by an implied filter on the attribute: ' + $scope.searchTitle; if (foundCollection.landing !== undefined && foundCollection.landing !== null) { getBody(foundCollection.landing).then(function(result) { $scope.modal.modalBody = result; $scope.modal.isLanding = true; }); } else { $scope.modal.modalBody = foundCollection.description || 'The results on this page are restricted by an implied filter on the attribute: ' + $scope.searchTitle; $scope.modal.isLanding = false; } } else { $scope.searchTitle = $scope.searchType + ', All'; $scope.modal.modalTitle = $scope.searchType + ', All'; $scope.searchDescription = 'The results on this page are restricted by an implied filter on the attribute: ' + $scope.searchType; $scope.modal.modalBody = 'This will eventually hold a description for this attribute type.'; $scope.modal.isLanding = false; } } else if ($scope.searchGroup[0].key === 'search') { // Otherwise check to see if it is a search $scope.searchKey = 'DOALLSEARCH'; $scope.showSearch = true; $scope.searchTitle = $scope.searchGroup[0].code; $scope.modal.modalTitle = $scope.searchGroup[0].code; $scope.searchDescription = 'Search results based on the search key: ' + $scope.searchGroup[0].code; $scope.modal.modalBody = 'The results on this page are restricted by an implied filter on words similar to the search key \'' + $scope.searchGroup[0].code + '\''; } else { // In this case, our tempData object exists, but has no useable data $scope.searchKey = 'DOALLSEARCH'; $scope.showSearch = true; $scope.searchTitle = 'All'; $scope.modal.modalTitle = 'All'; $scope.searchDescription = 'Search all results'; $scope.modal.modalBody = 'The results found on this page are not restricted by any implied filters.'; } } else { // In this case, our tempData doesn't exist $scope.searchKey = 'DOALLSEARCH'; $scope.showSearch = true; $scope.searchTitle = 'All'; $scope.modal.modalTitle = 'All'; $scope.searchDescription = 'Search all results'; $scope.modal.modalBody = 'The results found on this page are not restricted by any implied filters.'; } $scope.applyFilters(); $scope.$broadcast('dataloaded', !$scope.single); }; /*************************************************************** * This function grabs the search key and resets the page in order to update the search ***************************************************************/ var callSearch = function(key) { $scope.$emit('$TRIGGERLOAD', 'mainLoader'); var type = 'search'; var code = 'all'; var query = null; if (key === null || key === undefined) { if (!isEmpty($location.search())) { query = $location.search(); if (query.type === 'attribute') { if (query.keyType && query.keyKey) { type = 'attribute'; code = { 'type': query.keyType, 'key': query.keyKey }; } } else if (query.type && query.code) { type = query.type; code = query.code; } } } else { if (!isEmpty($location.search())) { query = $location.search(); if (query.type === 'attribute') { if (query.keyType && query.keyKey) { type = 'attribute'; code = { 'type': query.keyType, 'key': query.keyKey }; } } else if (query.type && query.code) { type = query.type; code = query.code; } } } $scope.reAdjust([{ 'key': type, 'code': code }]); }; $scope.$on('$CHANGESEARCHRESULTTAGS', function(event, id, tags){ $timeout(function() { var temp = _.find($scope.data.data, {'componentId': id}); temp.tags = tags; }) }); // $scope.resetSearch = function() { $scope.$emit('$TRIGGERLOAD', 'mainLoader'); // $scope.$emit('$TRIGGERLOAD', 'resultsLoad'); $scope.$emit('$TRIGGERLOAD', 'filtersLoad'); var type = 'search'; var code = 'all'; $rootScope.searchKey = 'all'; $location.search({ 'type': type, 'code': code }); $scope.reAdjust([{ 'key': type, 'code': code }]); } /*************************************************************** * This function is used by the reviews section in the details to remove * and add the ellipsis ***************************************************************/ $scope.toggleclass = function(id, className) { toggleclass(id, className); }; /*************************************************************** * This function removes the inherent filter (if you click on apps, types no longer applies etc) ***************************************************************/ var adjustFilters = function() { if ($scope.searchGroup[0].key) { $scope.filters = _.reject($scope.filters, function(item) { return item.key === $scope.searchGroup[0].key; }); } $scope.resetFilters = JSON.parse(JSON.stringify($scope.filters)); }; /*************************************************************** * This funciton calls the global buttonOpen function that handles page * flyout animations according to the state to open the details ***************************************************************/ $scope.doButtonOpen = function() { buttonOpen(); }; /*************************************************************** * This funciton calls the global buttonClose function that handles page * flyout animations according to the state to close the details ***************************************************************/ $scope.doButtonClose = function() { buttonClose(); }; /*************************************************************** * This function handles toggleing filter checks per filter heading click. ***************************************************************/ $scope.toggleChecks = function(collection, override){ $scope.applyFilters(); }; /*************************************************************** * This function updates the details when a component title is clicked on ***************************************************************/ $scope.updateDetails = function(id, article){ // $scope.$emit('$TRIGGERLOAD', 'fullDetailsLoader'); // console.log('article', article); if (article && article.listingType === 'Article') { $scope.isArticle = true; localCache.save('type', article.articleAttributeType); localCache.save('code', article.articleAttributeCode); // $scope.$emit('$TRIGGERUNLOAD', 'fullDetailsLoader'); $scope.$emit('$TRIGGEREVENT', '$TRIGGERLANDING', false); $scope.showDetails = true; if (!openClick) { buttonOpen(); } $timeout(function(){ $('.page1').focus(); $scope.scrollTo('componentScroll'+article.attributes[0].type.replace(/\W/g, '')+article.attributes[0].code.replace(/\W/g, '')); }, 500); } else { $scope.isArticle = false; $('.page2').scrollTop(0); if (!openClick) { buttonOpen(); } $scope.showDetails = false; // console.log('id', id); Business.componentservice.getComponentDetails(id, true).then( function (result){ if (result) { // grab the evaluation schedule. $scope.sendPageView(result.name); $scope.details.details = result; // Code here will be linted with JSHint. /* jshint ignore:start */ // Code here will be linted with ignored by JSHint. if ($scope.details.details.attributes[0] !== undefined) { var foundEvaluation = null; _.each($scope.details.details.attributes, function(attribute) { if (attribute.type === 'DI2ELEVEL') { foundEvaluation = attribute; } }); $scope.details.details.evaluationAttribute = foundEvaluation; } if ($scope.details.details.lastActivityDts && $scope.details.details.lastViewedDts) { var update = new Date($scope.details.details.lastActivityDts); var view = new Date($scope.details.details.lastViewedDts); if (view < update) { showUpdateNotify(); } else { resetUpdateNotify(); } } else { resetUpdateNotify(); } /* jshint ignore:end */ } // $scope.$emit('$TRIGGERUNLOAD', 'fullDetailsLoader'); $scope.showDetails = true; $timeout(function(){ $('.page1').focus(); $scope.scrollTo('componentScroll'+$scope.details.details.componentId.replace(/\W/g, '')); }, 500); }); } // }; // /*************************************************************** * This function adds a component to the watch list and toggles the buttons ***************************************************************/ $scope.goToFullPage = function(id){ var url = $location.absUrl().substring(0, $location.absUrl().length - $location.url().length); url = url + '/single?id=' + id; window.open(url, 'Component_' + id, 'toolbar=no, location=no, directories=no, status=no, menubar=no, scrollbars=yes, resizable=yes, width=840, height=840'); }; /*************************************************************** * This function adds a component to the watch list and toggles the buttons ***************************************************************/ $scope.goToCompare = function(){ var list = []; _.each($scope.data.data, function(item) { list.push(item.componentId); }); $location.search({ 'id': list }); $location.path('/compare'); }; /*************************************************************** * This function resets the filters in the results page in order to clear * the filters as quickly as possible ***************************************************************/ $scope.clearFilters = function() { $scope.orderProp = ''; $scope.ratingsFilter = null; $scope.tagsFilter = null; $scope.query = null; if ($scope.resetFilters) { $scope.filters = JSON.parse(JSON.stringify($scope.resetFilters)); } $scope.applyFilters(); }; /*************************************************************** * This function is used to watch filters in order to show the 'applied' * message so that they won't forget one of the filters is applied. ***************************************************************/ $scope.checkFilters = function() { _.each($scope.filters, function(filter){ filter.hasChecked = _.some(filter.codes, function(item){ return item.checked; }); if (!filter.hasChecked) { filter.checked = false; } }); $scope.applyFilters(); } /*************************************************************** * This function applies the filters that have been given to us to filter the * data with ***************************************************************/ $scope.applyFilters = function() { if ($scope.filteredTotal) { var results = // We must use recursive filtering or we will get incorrect results // the order DOES matter here. $filter('orderBy') // ($filter('ratingFilter') ($filter('tagFilter') ($filter('componentFilter') ($filter('filter') //filter by the string ($scope.total, $scope.query), // filter the data by the filters $scope.filters), // filter the data by the tags $scope.tagsFilter), // filter the data by the ratings $scope.ratingsFilter), // Then order-by the orderProp $scope.orderProp); // make sure we reset the data and then copy over the results $scope.filteredTotal = ['']; $scope.filteredTotal = results; // Do the math required to assure that we have a valid page number and // maxPageNumber $scope.maxPageNumber = Math.ceil($scope.filteredTotal.length / $scope.rowsPerPage); if (($scope.pageNumber - 1) * $scope.rowsPerPage >= $scope.filteredTotal.length) { $scope.pageNumber = 1; } // Set the data that will be displayed to the first 'n' results of the filtered data $scope.data.data = $scope.filteredTotal.slice((($scope.pageNumber - 1) * $scope.rowsPerPage), ($scope.pageNumber * $scope.rowsPerPage)); if ($scope.data.data.length) { $scope.showMessage = false; } else { $scope.showMessage = true; } // after a slight wait, reapply the popovers for the results ratings. $timeout(function() { setupPopovers(); }, 300); } }; ////////////////////////////////////////////////////////////////////////////// // Here we put our Event Watchers ////////////////////////////////////////////////////////////////////////////// /*************************************************************** * Event for callSearch caught here. This is triggered by the nav * search bar when you are already on the results page. ***************************************************************/ // $scope.$on('$callSearch', function(event, data) {jshint unused: false // callSearch(data); // }); /*************************************************************** * Event to trigger an update of the details that are shown ***************************************************************/ $scope.$on('$detailsUpdated', function(event, id) {/*jshint unused: false*/ if ($scope.details.details && $scope.details.details.componentId === id) { $timeout(function() { $scope.updateDetails($scope.details.details.componentId, $scope.details.details.listingType); }); } }); /*************************************************************** * Catch the enter/select event here for typeahead ***************************************************************/ $scope.$on('$typeahead.select', function(event, value, index) {/*jshint unused: false*/ $scope.applyFilters(); $scope.sendEvent('Filter Set', 'Text', $scope.query); }); /******************************************************************************* * This function watches for the view content loaded event and runs a timeout * function to handle the initial movement of the display buttons. *******************************************************************************/ $scope.$on('$viewContentLoaded', function(){ resetAnimations($('.page1'), $('.page2'), $('.filters')); $timeout(function() { if (fullClick === 0) { if ($(window).width() >= 768) { if (filtClick === 0) { openFiltersToggle(); } } } }, 1000); }); ////////////////////////////////////////////////////////////////////////////// // Here we put our Scope Watchers ////////////////////////////////////////////////////////////////////////////// /*************************************************************** * This function is used to watch the pagenumber variable. When it changes * we need to readjust the pagination ***************************************************************/ $scope.$watch('pageNumber',function(val, old){ /* jshint unused:false */ $scope.pageNumber = parseInt(val); if ($scope.pageNumber < 1) { $scope.pageNumber = 1; } if ($scope.pageNumber > $scope.maxPageNumber) { $scope.pageNumber = $scope.maxPageNumber; } var page = $scope.pageNumber; if (page < 1 || page === '' || isNaN(page) || page === null){ page = 1; } if ($scope.filteredTotal) { $scope.data.data = $scope.filteredTotal.slice(((page - 1) * $scope.rowsPerPage), (page * $scope.rowsPerPage)); } else { $scope.data.data = []; } $scope.applyFilters(); }); /*************************************************************** * This function is used to watch the rowsPerPage variable. When it changes * we need to adjust pagination ***************************************************************/ $scope.$watch('rowsPerPage',function(val, old){ /* jshint unused:false */ var rowPP = $scope.rowsPerPage; if (rowPP < 1 || rowPP === '' || isNaN(rowPP) || rowPP === null){ rowPP = 1; } $scope.pageNumber = 1; if ($scope.filteredTotal) { $scope.maxPageNumber = Math.ceil($scope.filteredTotal.length / rowPP); } $scope.applyFilters(); }); /*************************************************************** * This function is used to watch the orderProp variable. When it changes * re-filter the data ***************************************************************/ $scope.$watch('orderProp',function(val, old){ /* jshint unused:false */ $scope.applyFilters(); }); /*************************************************************** * This function is used to watch the query variable. When it changes * re-filter the data ***************************************************************/ $scope.$watch('query',function(val, old){ /* jshint unused:false */ $scope.applyFilters(); }); /*************************************************************** * This function is used to watch the query variable. When it changes * re-filter the data ***************************************************************/ $scope.$watch('ratingsFilter',function(val, old){ /* jshint unused:false */ $scope.applyFilters(); }); /*************************************************************** * This function is a deep watch on the data variable to see if * data.data changes. When it does, we need to see if the result set * for the search results is larger than the 'max' displayed ***************************************************************/ $scope.setupData = function() { if ($scope.data && $scope.data.data) { // max needs to represent the total number of results you want to load // on the initial search. var max = 2000; // also, we'll probably check the total number of possible results that // could come back from the server here instead of the length of the // data we have already. if ($scope.data.data.length > max) { $scope.moreThan200 = true; } else { $scope.moreThan200 = false; } } } callSearch(); }]);
client/openstorefront/app/scripts/controllers/results.js
/* * Copyright 2014 Space Dynamics Laboratory - Utah State University Research Foundation. * * Licensed under the Apache License, Version 2.0 (the 'License'); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an 'AS IS' BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ 'use strict'; /* global isEmpty, setupPopovers, openClick:true, setupResults, fullClick, openFiltersToggle, buttonOpen, buttonClose, toggleclass, resetAnimations, filtClick, setPageHeight*/ app.controller('ResultsCtrl', ['$scope', 'localCache', 'business', '$filter', '$timeout', '$location', '$rootScope', '$q', '$route', '$sce', function ($scope, localCache, Business, $filter, $timeout, $location, $rootScope, $q, $route, $sce) { /*jshint unused: false*/ ////////////////////////////////////////////////////////////////////////////// // Here we put our variables... ////////////////////////////////////////////////////////////////////////////// // set the page height so the loading masks look good. setPageHeight($('.page1'), 52); $scope.scrollTo = $rootScope.scrollTo; // start the loading masks $scope.$emit('$TRIGGERLOAD', 'mainLoader'); // $scope.$emit('$TRIGGERLOAD', 'resultsLoad'); $scope.$emit('$TRIGGERLOAD', 'filtersLoad'); // set variables $scope._scopename = 'results'; $scope.orderProp = ''; $scope.query = ''; $scope.lastUsed = new Date(); $scope.modal = {}; $scope.details = {}; $scope.data = {}; $scope.isPage1 = true; $scope.showSearch = false; $scope.showDetails = false; $scope.showMessage = false; $scope.modal.isLanding = false; $scope.single = false; $scope.isArticle = false; $scope.searchCode = null; $scope.filteredTotal = null; $scope.searchTitle = null; $scope.searchDescription = null; $scope.details.details = null; $scope.typeahead = null; $scope.searchGroup = null; $scope.searchKey = null; $scope.filters = null; $scope.resetFilters = null; $scope.total = null; $scope.ratingsFilter = 0; $scope.rowsPerPage = 200; $scope.pageNumber = 1; $scope.maxPageNumber = 1; $scope.noDataMessage = $sce.trustAsHtml('<p>There are no results for your search</p> <p>&mdash; Or &mdash;</p> <p>You have filtered out all of the results.</p><button class="btn btn-default" ng-click="clearFilters()">Reset Filters</button>'); // grab what we need from the server. $scope.setupTagList = function() { Business.getTagsList(true).then(function(result) { if (result) { $scope.tagsList = result; $scope.tagsList.sort(); } else { $scope.tagsList = null; } }); } $scope.setupTagList(); $scope.$on('$REFRESHTAGLIST', function(event) { $scope.setupTagList(); }) Business.getProsConsList().then(function(result) { if (result) { $scope.prosConsList = result; } else { $scope.prosConsList = null; } }); Business.userservice.getWatches().then(function(result) { if (result) { $scope.watches = result; } else { $scope.watches = null; } }); Business.lookupservice.getExpertise().then(function(result) { if (result) { $scope.expertise = result; } else { $scope.expertise = []; } }); Business.lookupservice.getUserTypeCodes().then(function(result) { if (result) { $scope.userTypeCodes = result; } else { $scope.userTypeCodes = []; } }); Business.componentservice.getComponentDetails().then(function(result) { Business.typeahead(result, 'name').then(function(value){ if (value) { $scope.typeahead = value; } else { $scope.typeahead = []; } }); }); ////////////////////////////////////////////////////////////////////////////// // Here we put our Functions ////////////////////////////////////////////////////////////////////////////// /*************************************************************** * This function selects the initial tab. * params: tab -- The tab that is selected ***************************************************************/ $scope.setSelectedTab = function(tab) { $scope.selectedTab = tab; }; $scope.getNumThings = function(article){ if ($scope.data && $scope.data.data && $scope.data.data.length) { var count = 0; _.each($scope.data.data, function(item){ if (item.listingType === 'Article' && article) { count++; } else if (!article) { count++; } }) return count; } else { return 0; } } /*************************************************************** * Here we set the tab class * params: tab -- The tab to check to see if it is selected * returns: class -- The classes that the tab will have ***************************************************************/ $scope.tabClass = function(tab) { if ($scope.selectedTab === tab) { return 'active'; } else { return ''; } }; /*************************************************************** * This function is looked at for auto suggestions for the tag list * if a ' ' is the user's entry, it will auto suggest the next 20 tags that * are not currently in the list of tags. Otherwise, it will look at the * string and do a substring search. * params: query -- The input that the user has typed so far * params: list -- The list of tags already tagged on the item * params: source -- The source of the tags options * returns: deferred.promise -- The promise that we will return a resolved tags list ***************************************************************/ $scope.checkTagsList = function(query, list, source) { var deferred = $q.defer(); var subList = null; if (query === ' ') { subList = _.reject(source, function(item) { return !!(_.where(list, {'text': item}).length); }); } else { subList = _.filter(source, function(item) { return item.toLowerCase().indexOf(query.toLowerCase()) > -1; }); } deferred.resolve(subList); return deferred.promise; }; /*************************************************************** * Description * params: param name -- param description * returns: Return name -- return description ***************************************************************/ var getBody = function(route) { var deferred = $q.defer(); $.get(route).then(function(responseData) { deferred.resolve(responseData); }); return deferred.promise; }; /*************************************************************** * This function is called once we have the search request from the business layer * The order and manner in which we do this call will most likely change once * we get the httpbackend fleshed out. ***************************************************************/ $scope.reAdjust = function(key) { $scope.searchGroup = key; $scope.searchKey = $rootScope.searchKey; if (!isEmpty($scope.searchGroup)) { // grab all of the keys in the filters $scope.searchKey = $scope.searchGroup[0].key; $scope.searchCode = $scope.searchGroup[0].code; } else { $scope.searchKey = 'search'; $scope.searchCode = ''; } Business.getFilters().then(function(result) { if (result) { $scope.filters = result; $scope.filters = angular.copy($scope.filters); $scope.filters = _.sortBy($scope.filters, function(item){ return item.description; }); } else { $scope.filters = null; } setupResults(); var architecture = null; if ($scope.searchKey === 'attribute') { if ($scope.searchCode.type) { var filter = _.find($scope.filters, {'type': $scope.searchCode.type}); if (filter){ architecture = filter.architectureFlg; } } } Business.componentservice.doSearch($scope.searchKey, $scope.searchCode, architecture).then(function(result) { if (result && result.data && result.data.length > 0) { $scope.total = result.data || []; } else { $scope.total = []; } $scope.filteredTotal = $scope.total; /*Simulate wait for the filters*/ /*This is simulating the wait time for building the data so that we get a loader*/ $scope.data.data = $scope.total; _.each($scope.data.data, function(item){ if (item.description !== null && item.description !== undefined && item.description !== '') { var desc = item.description.match(/^(.*?)[.?!]\s/); item.shortdescription = (desc && desc[0])? desc[0] + '.': item.description; } else { item.shortdescription = 'This is a temporary short description'; } }); $scope.setupData(); // var end = new Date().getTime(); // var time = end - start; // console.log('Total Execution time ****: ' + time); $scope.$emit('$TRIGGERUNLOAD', 'mainLoader'); $scope.$emit('$TRIGGERUNLOAD', 'filtersLoad'); $scope.initializeData(key); adjustFilters(); }, function(result){ if (result && result.data && result.data.length > 0) { $scope.total = result.data || []; } else { $scope.total = []; } $scope.data.data = $scope.total; $scope.$emit('$TRIGGERUNLOAD', 'mainLoader'); $scope.$emit('$TRIGGERUNLOAD', 'filtersLoad'); $scope.initializeData(key); $scope.showMessage = true; $scope.setupData(); }); }); }; // /*************************************************************** * This is used to initialize the scope title, key, and code. Once we have a * database, this is most likely where we'll do the first pull for data. * * TODO:: Add query prameters capabilities for this page so that we don't have * to rely on the local/session storrage to pass us the search key * * TODO:: When we do start using actual transfered searches from the main page * we need to initialize checks on the filters that were sent to us from that * page (or we need to disable the filter all together) * * This function is called by the reAdjustment function in order * to reinitialze all of the data if the list of items changes. * which usually would hinge on the key of the search * params: key -- The search object we use to initialize data with. ***************************************************************/ $scope.initializeData = function(key) { if (!isEmpty($scope.searchGroup)) { // grab all of the keys in the filters $scope.searchKey = $scope.searchGroup[0].key; $scope.searchCode = $scope.searchGroup[0].code; var keys = _.pluck($scope.filters, 'type'); var foundFilter = null; var foundCollection = null; var type = ''; // TODO: CLEAN UP THIS IF/ELSE switch!!!!!!! if (_.contains(keys, $scope.searchCode.type)) { $scope.showSearch = true; foundFilter = _.find($scope.filters, {'type': $scope.searchCode.type}); foundCollection = _.find(foundFilter.codes, {'code': $scope.searchCode.key}); // console.log('found', foundFilter); // console.log('found', foundCollection); // if the search group is based on one of those filters do this if ($scope.searchCode !== 'all' && foundFilter && foundCollection) { $scope.filters = _.reject($scope.filters, function(filter) { return filter.type === foundFilter.type; }); $scope.searchColItem = foundCollection; $scope.searchTitle = foundFilter.description + ', ' + foundCollection.label; $scope.modal.modalTitle = foundFilter.description + ', ' + foundCollection.label; $scope.searchDescription = getShortDescription(foundCollection.description) || 'The results on this page are restricted by an implied filter on the attribute: ' + $scope.searchTitle; if (foundCollection.landing !== undefined && foundCollection.landing !== null) { getBody(foundCollection.landing).then(function(result) { $scope.modal.modalBody = result; $scope.modal.isLanding = true; }); } else { $scope.modal.modalBody = foundCollection.description || 'The results on this page are restricted by an implied filter on the attribute: ' + $scope.searchTitle; $scope.modal.isLanding = false; } } else { $scope.searchTitle = $scope.searchType + ', All'; $scope.modal.modalTitle = $scope.searchType + ', All'; $scope.searchDescription = 'The results on this page are restricted by an implied filter on the attribute: ' + $scope.searchType; $scope.modal.modalBody = 'This will eventually hold a description for this attribute type.'; $scope.modal.isLanding = false; } } else if ($scope.searchGroup[0].key === 'search') { // Otherwise check to see if it is a search $scope.searchKey = 'DOALLSEARCH'; $scope.showSearch = true; $scope.searchTitle = $scope.searchGroup[0].code; $scope.modal.modalTitle = $scope.searchGroup[0].code; $scope.searchDescription = 'Search results based on the search key: ' + $scope.searchGroup[0].code; $scope.modal.modalBody = 'The results on this page are restricted by an implied filter on words similar to the search key \'' + $scope.searchGroup[0].code + '\''; } else { // In this case, our tempData object exists, but has no useable data $scope.searchKey = 'DOALLSEARCH'; $scope.showSearch = true; $scope.searchTitle = 'All'; $scope.modal.modalTitle = 'All'; $scope.searchDescription = 'Search all results'; $scope.modal.modalBody = 'The results found on this page are not restricted by any implied filters.'; } } else { // In this case, our tempData doesn't exist $scope.searchKey = 'DOALLSEARCH'; $scope.showSearch = true; $scope.searchTitle = 'All'; $scope.modal.modalTitle = 'All'; $scope.searchDescription = 'Search all results'; $scope.modal.modalBody = 'The results found on this page are not restricted by any implied filters.'; } $scope.applyFilters(); $scope.$broadcast('dataloaded', !$scope.single); }; /*************************************************************** * This function grabs the search key and resets the page in order to update the search ***************************************************************/ var callSearch = function(key) { $scope.$emit('$TRIGGERLOAD', 'mainLoader'); var type = 'search'; var code = 'all'; var query = null; if (key === null || key === undefined) { if (!isEmpty($location.search())) { query = $location.search(); if (query.type === 'attribute') { if (query.keyType && query.keyKey) { type = 'attribute'; code = { 'type': query.keyType, 'key': query.keyKey }; } } else if (query.type && query.code) { type = query.type; code = query.code; } } } else { if (!isEmpty($location.search())) { query = $location.search(); if (query.type === 'attribute') { if (query.keyType && query.keyKey) { type = 'attribute'; code = { 'type': query.keyType, 'key': query.keyKey }; } } else if (query.type && query.code) { type = query.type; code = query.code; } } } $scope.reAdjust([{ 'key': type, 'code': code }]); }; $scope.$on('$CHANGESEARCHRESULTTAGS', function(event, id, tags){ $timeout(function() { var temp = _.find($scope.data.data, {'componentId': id}); temp.tags = tags; }) }); // $scope.resetSearch = function() { $scope.$emit('$TRIGGERLOAD', 'mainLoader'); // $scope.$emit('$TRIGGERLOAD', 'resultsLoad'); $scope.$emit('$TRIGGERLOAD', 'filtersLoad'); var type = 'search'; var code = 'all'; $rootScope.searchKey = 'all'; $location.search({ 'type': type, 'code': code }); $scope.reAdjust([{ 'key': type, 'code': code }]); } /*************************************************************** * This function is used by the reviews section in the details to remove * and add the ellipsis ***************************************************************/ $scope.toggleclass = function(id, className) { toggleclass(id, className); }; /*************************************************************** * This function removes the inherent filter (if you click on apps, types no longer applies etc) ***************************************************************/ var adjustFilters = function() { if ($scope.searchGroup[0].key) { $scope.filters = _.reject($scope.filters, function(item) { return item.key === $scope.searchGroup[0].key; }); } $scope.resetFilters = JSON.parse(JSON.stringify($scope.filters)); }; /*************************************************************** * This funciton calls the global buttonOpen function that handles page * flyout animations according to the state to open the details ***************************************************************/ $scope.doButtonOpen = function() { buttonOpen(); }; /*************************************************************** * This funciton calls the global buttonClose function that handles page * flyout animations according to the state to close the details ***************************************************************/ $scope.doButtonClose = function() { buttonClose(); }; /*************************************************************** * This function handles toggleing filter checks per filter heading click. ***************************************************************/ $scope.toggleChecks = function(collection, override){ $scope.applyFilters(); }; /*************************************************************** * This function updates the details when a component title is clicked on ***************************************************************/ $scope.updateDetails = function(id, article){ // $scope.$emit('$TRIGGERLOAD', 'fullDetailsLoader'); // console.log('article', article); if (article && article.listingType === 'Article') { $scope.isArticle = true; localCache.save('type', article.articleAttributeType); localCache.save('code', article.articleAttributeCode); // $scope.$emit('$TRIGGERUNLOAD', 'fullDetailsLoader'); $scope.$emit('$TRIGGEREVENT', '$TRIGGERLANDING', false); $scope.showDetails = true; if (!openClick) { buttonOpen(); } $timeout(function(){ $('.page1').focus(); $scope.scrollTo('componentScroll'+article.attributes[0].type.replace(/\W/g, '')+article.attributes[0].code.replace(/\W/g, '')); }, 500); } else { $scope.isArticle = false; $('.page2').scrollTop(0); if (!openClick) { buttonOpen(); } $scope.showDetails = false; // console.log('id', id); Business.componentservice.getComponentDetails(id, true).then( function (result){ if (result) { // grab the evaluation schedule. $scope.sendPageView(result.name); $scope.details.details = result; // Code here will be linted with JSHint. /* jshint ignore:start */ // Code here will be linted with ignored by JSHint. if ($scope.details.details.attributes[0] !== undefined) { var foundEvaluation = null; _.each($scope.details.details.attributes, function(attribute) { if (attribute.type === 'DI2ELEVEL') { foundEvaluation = attribute; } }); $scope.details.details.evaluationAttribute = foundEvaluation; } if ($scope.details.details.lastActivityDts && $scope.details.details.lastViewedDts) { var update = new Date($scope.details.details.lastActivityDts); var view = new Date($scope.details.details.lastViewedDts); if (view < update) { showUpdateNotify(); } else { resetUpdateNotify(); } } else { resetUpdateNotify(); } /* jshint ignore:end */ } // $scope.$emit('$TRIGGERUNLOAD', 'fullDetailsLoader'); $scope.showDetails = true; $timeout(function(){ $('.page1').focus(); $scope.scrollTo('componentScroll'+$scope.details.details.componentId.replace(/\W/g, '')); }, 500); }); } // }; // /*************************************************************** * This function adds a component to the watch list and toggles the buttons ***************************************************************/ $scope.goToFullPage = function(id){ var url = $location.absUrl().substring(0, $location.absUrl().length - $location.url().length); url = url + '/single?id=' + id; window.open(url, 'Component_' + id, 'toolbar=no, location=no, directories=no, status=no, menubar=no, scrollbars=yes, resizable=yes, width=840, height=840'); }; /*************************************************************** * This function adds a component to the watch list and toggles the buttons ***************************************************************/ $scope.goToCompare = function(){ var list = []; _.each($scope.data.data, function(item) { list.push(item.componentId); }); $location.search({ 'id': list }); $location.path('/compare'); }; /*************************************************************** * This function resets the filters in the results page in order to clear * the filters as quickly as possible ***************************************************************/ $scope.clearFilters = function() { $scope.orderProp = ''; $scope.ratingsFilter = null; $scope.tagsFilter = null; $scope.query = null; if ($scope.resetFilters) { $scope.filters = JSON.parse(JSON.stringify($scope.resetFilters)); } $scope.applyFilters(); }; /*************************************************************** * This function is used to watch filters in order to show the 'applied' * message so that they won't forget one of the filters is applied. ***************************************************************/ $scope.checkFilters = function() { _.each($scope.filters, function(filter){ filter.hasChecked = _.some(filter.codes, function(item){ return item.checked; }); if (!filter.hasChecked) { filter.checked = false; } }); $scope.applyFilters(); } /*************************************************************** * This function applies the filters that have been given to us to filter the * data with ***************************************************************/ $scope.applyFilters = function() { if ($scope.filteredTotal) { var results = // We must use recursive filtering or we will get incorrect results // the order DOES matter here. $filter('orderBy') // ($filter('ratingFilter') ($filter('tagFilter') ($filter('componentFilter') ($filter('filter') //filter by the string ($scope.total, $scope.query), // filter the data by the filters $scope.filters), // filter the data by the tags $scope.tagsFilter), // filter the data by the ratings $scope.ratingsFilter), // Then order-by the orderProp $scope.orderProp); // make sure we reset the data and then copy over the results $scope.filteredTotal = ['']; $scope.filteredTotal = results; // Do the math required to assure that we have a valid page number and // maxPageNumber $scope.maxPageNumber = Math.ceil($scope.filteredTotal.length / $scope.rowsPerPage); if (($scope.pageNumber - 1) * $scope.rowsPerPage >= $scope.filteredTotal.length) { $scope.pageNumber = 1; } // Set the data that will be displayed to the first 'n' results of the filtered data $scope.data.data = $scope.filteredTotal.slice((($scope.pageNumber - 1) * $scope.rowsPerPage), ($scope.pageNumber * $scope.rowsPerPage)); if ($scope.data.data.length) { $scope.showMessage = false; } else { $scope.showMessage = true; } // after a slight wait, reapply the popovers for the results ratings. $timeout(function() { setupPopovers(); }, 300); } }; ////////////////////////////////////////////////////////////////////////////// // Here we put our Event Watchers ////////////////////////////////////////////////////////////////////////////// /*************************************************************** * Event for callSearch caught here. This is triggered by the nav * search bar when you are already on the results page. ***************************************************************/ // $scope.$on('$callSearch', function(event, data) {jshint unused: false // callSearch(data); // }); /*************************************************************** * Event to trigger an update of the details that are shown ***************************************************************/ $scope.$on('$detailsUpdated', function(event, id) {/*jshint unused: false*/ if ($scope.details.details && $scope.details.details.componentId === id) { $timeout(function() { $scope.updateDetails($scope.details.details.componentId, $scope.details.details.listingType); }); } }); /*************************************************************** * Catch the enter/select event here for typeahead ***************************************************************/ $scope.$on('$typeahead.select', function(event, value, index) {/*jshint unused: false*/ $scope.applyFilters(); $scope.sendEvent('Filter Set', 'Text', $scope.query); }); /******************************************************************************* * This function watches for the view content loaded event and runs a timeout * function to handle the initial movement of the display buttons. *******************************************************************************/ $scope.$on('$viewContentLoaded', function(){ resetAnimations($('.page1'), $('.page2'), $('.filters')); $timeout(function() { if (fullClick === 0) { if ($(window).width() >= 768) { if (filtClick === 0) { openFiltersToggle(); } } } }, 1000); }); ////////////////////////////////////////////////////////////////////////////// // Here we put our Scope Watchers ////////////////////////////////////////////////////////////////////////////// /*************************************************************** * This function is used to watch the pagenumber variable. When it changes * we need to readjust the pagination ***************************************************************/ $scope.$watch('pageNumber',function(val, old){ /* jshint unused:false */ $scope.pageNumber = parseInt(val); if ($scope.pageNumber < 1) { $scope.pageNumber = 1; } if ($scope.pageNumber > $scope.maxPageNumber) { $scope.pageNumber = $scope.maxPageNumber; } var page = $scope.pageNumber; if (page < 1 || page === '' || isNaN(page) || page === null){ page = 1; } if ($scope.filteredTotal) { $scope.data.data = $scope.filteredTotal.slice(((page - 1) * $scope.rowsPerPage), (page * $scope.rowsPerPage)); } else { $scope.data.data = []; } $scope.applyFilters(); }); /*************************************************************** * This function is used to watch the rowsPerPage variable. When it changes * we need to adjust pagination ***************************************************************/ $scope.$watch('rowsPerPage',function(val, old){ /* jshint unused:false */ var rowPP = $scope.rowsPerPage; if (rowPP < 1 || rowPP === '' || isNaN(rowPP) || rowPP === null){ rowPP = 1; } $scope.pageNumber = 1; if ($scope.filteredTotal) { $scope.maxPageNumber = Math.ceil($scope.filteredTotal.length / rowPP); } $scope.applyFilters(); }); /*************************************************************** * This function is used to watch the orderProp variable. When it changes * re-filter the data ***************************************************************/ $scope.$watch('orderProp',function(val, old){ /* jshint unused:false */ $scope.applyFilters(); }); /*************************************************************** * This function is used to watch the query variable. When it changes * re-filter the data ***************************************************************/ $scope.$watch('query',function(val, old){ /* jshint unused:false */ $scope.applyFilters(); }); /*************************************************************** * This function is used to watch the query variable. When it changes * re-filter the data ***************************************************************/ $scope.$watch('ratingsFilter',function(val, old){ /* jshint unused:false */ $scope.applyFilters(); }); /*************************************************************** * This function is a deep watch on the data variable to see if * data.data changes. When it does, we need to see if the result set * for the search results is larger than the 'max' displayed ***************************************************************/ $scope.setupData = function() { if ($scope.data && $scope.data.data) { // max needs to represent the total number of results you want to load // on the initial search. var max = 2000; // also, we'll probably check the total number of possible results that // could come back from the server here instead of the length of the // data we have already. if ($scope.data.data.length > max) { $scope.moreThan200 = true; } else { $scope.moreThan200 = false; } } } callSearch(); }]);
fix for the component count on results page.
client/openstorefront/app/scripts/controllers/results.js
fix for the component count on results page.
<ide><path>lient/openstorefront/app/scripts/controllers/results.js <ide> if ($scope.data && $scope.data.data && $scope.data.data.length) { <ide> var count = 0; <ide> _.each($scope.data.data, function(item){ <del> if (item.listingType === 'Article' && article) { <del> count++; <add> if (article) { <add> if (item.listingType === 'Article') { <add> count++; <add> } <ide> } else if (!article) { <add> if (item.listingType !== 'Article') <ide> count++; <ide> } <ide> })
Java
mit
71c2f289dc65bd61448d347f87387ab737ae78a9
0
codingSteve/library
package bestcoders.library.services; import java.util.Optional; import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import bestcoders.library.BusinessDate; import bestcoders.library.InventoryService; import bestcoders.library.Library; import bestcoders.library.items.Item; import bestcoders.library.loans.LoanRecord; import bestcoders.library.loans.LoanState; import bestcoders.library.members.LibraryMember; import bestcoders.library.services.helpers.LibraryStreams; public class ReturnService implements InventoryService { private static Logger logger = LoggerFactory.getLogger(ReturnService.class); private final LibraryStreams libraryStreams; private final Library library; public ReturnService(final Library library) { this.library = library; libraryStreams = new LibraryStreams(library); } @Override public boolean apply(final LibraryMember m, final Item i) { logger.info("About to return item {} from member", i.getId(), m.getMemberNumber()); final Stream<LoanRecord> openLoans = libraryStreams.getOpenLoansStream(); final Stream<LoanRecord> memberLoans = libraryStreams.getLoansForMember(openLoans, m); final Stream<LoanRecord> memberLoansForItem = memberLoans.filter(l -> l.getItem().equals(i)); final Optional<LoanRecord> loanRecord = memberLoansForItem.findFirst(); final boolean op; if (loanRecord.isPresent()) { logger.debug("Item: {} has been loaned to member: {}", i, m); final LoanRecord lr = loanRecord.get(); final BusinessDate currentDate = library.getBusinessDate(); logger.info("About to set return date to {} for lr {} ", currentDate, lr); lr.setReturnDate(currentDate); lr.setState(LoanState.CLOSED); op = true; } else { logger.debug("Item: {} has not been loaned to member: {}", i, m); op = false; } return op; } }
src/main/java/bestcoders/library/services/ReturnService.java
package bestcoders.library.services; import java.util.Optional; import java.util.stream.Stream; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import bestcoders.library.BusinessDate; import bestcoders.library.InventoryService; import bestcoders.library.Library; import bestcoders.library.items.Item; import bestcoders.library.loans.LoanRecord; import bestcoders.library.loans.LoanState; import bestcoders.library.members.LibraryMember; import bestcoders.library.services.helpers.LibraryStreams; public class ReturnService implements InventoryService { private static Logger logger = LoggerFactory.getLogger(ReturnService.class); private final LibraryStreams libraryStreams; private final Library library; public ReturnService(final Library library) { this.library = library; libraryStreams = new LibraryStreams(library); } @Override public boolean apply(final LibraryMember m, final Item i) { logger.info("About to return item {} from member", i.getId(), m.getMemberNumber()); final Stream<LoanRecord> openLoans = libraryStreams.getOpenLoansStream(); final Stream<LoanRecord> memberLoans = libraryStreams.getLoansForMember(openLoans, m); // TODO: add test for multiple loans per member. final Optional<LoanRecord> loanRecord = memberLoans.findFirst(); final boolean op; if (loanRecord.isPresent()) { logger.debug("Item: {} has been loaned to member: {}", i, m); final LoanRecord lr = loanRecord.get(); final BusinessDate currentDate = library.getBusinessDate().addDays(0); logger.info("About to set return date to {} for lr {} ", currentDate, lr); lr.setReturnDate(currentDate); lr.setState(LoanState.CLOSED); op = true; } else { logger.debug("Item: {} has not been loaned to member: {}", i, m); op = false; } return op; } }
Add filter to ensure the correct item is returned.
src/main/java/bestcoders/library/services/ReturnService.java
Add filter to ensure the correct item is returned.
<ide><path>rc/main/java/bestcoders/library/services/ReturnService.java <ide> <ide> logger.info("About to return item {} from member", i.getId(), m.getMemberNumber()); <ide> final Stream<LoanRecord> openLoans = libraryStreams.getOpenLoansStream(); <del> final Stream<LoanRecord> memberLoans = libraryStreams.getLoansForMember(openLoans, m); // TODO: add test for multiple loans per member. <add> final Stream<LoanRecord> memberLoans = libraryStreams.getLoansForMember(openLoans, m); <add> final Stream<LoanRecord> memberLoansForItem = memberLoans.filter(l -> l.getItem().equals(i)); <ide> <del> final Optional<LoanRecord> loanRecord = memberLoans.findFirst(); <add> final Optional<LoanRecord> loanRecord = memberLoansForItem.findFirst(); <ide> <ide> final boolean op; <ide> <ide> if (loanRecord.isPresent()) { <ide> logger.debug("Item: {} has been loaned to member: {}", i, m); <ide> final LoanRecord lr = loanRecord.get(); <del> final BusinessDate currentDate = library.getBusinessDate().addDays(0); <add> final BusinessDate currentDate = library.getBusinessDate(); <ide> logger.info("About to set return date to {} for lr {} ", currentDate, lr); <ide> lr.setReturnDate(currentDate); <ide> lr.setState(LoanState.CLOSED);
JavaScript
mit
c80d1f743a25b76c6e3969b3bb7cafe6023103ff
0
andyrj/post-js
import test from "ava"; import { Store, autorun } from "../src/store"; test("creates snapshots", t => { const store = Store({ test: 1 }); t.deepEqual(store("snapshot"), { test: 1 }); }); test("creates snapshots for nested stores", t => { const store = Store({ foo: "BAR", nested: { test: 1 } }); t.deepEqual(store("snapshot"), { foo: "BAR", nested: { test: 1 } }); }); test("generates patches", t => { const store = Store({ test: 1 }); let count = 0; function patchHandler(patch) { count++; } store("register", patchHandler); store.test = 10; t.is(count, 1); store("unregister", patchHandler); store.test = 1; t.is(count, 1); t.throws(() => store("unregister", patchHandler)); }); test("apply patch and snapshots", t => { const store = Store({ test: 1 }); const patches = []; function patchHandler(patch) { patches.push(patch); } const initSnap = store("snapshot"); store("register", patchHandler); store.test = 2; const patchToApply = patches.pop(); store("unregister", patchHandler); store("apply", patchToApply); t.is(store.test, 2); store("restore", initSnap); t.is(store.test, 1); });
test/snapshot.test.js
import test from "ava"; import { Store, autorun } from "../src/store"; test("creates snapshots", t => { const store = Store({ test: 1 }); t.deepEqual(store("snapshot"), { test: 1 }); }); test("creates snapshots for nested stores", t => { const store = Store({ foo: "BAR", nested: { test: 1 } }); t.deepEqual(store("snapshot"), { foo: "BAR", nested: { test: 1 } }); }); test("generates patches", t => { const store = Store({ test: 1 }); let count = 0; function patchHandler(patch) { count++; } store("register", patchHandler); store.test = 10; t.is(count, 1); store("unregister", patchHandler); store.test = 1; t.is(count, 1); t.throws(() => store("unregister", patchHandler)); });
added test for apply/restore simple case...
test/snapshot.test.js
added test for apply/restore simple case...
<ide><path>est/snapshot.test.js <ide> t.is(count, 1); <ide> t.throws(() => store("unregister", patchHandler)); <ide> }); <add> <add>test("apply patch and snapshots", t => { <add> const store = Store({ test: 1 }); <add> const patches = []; <add> function patchHandler(patch) { <add> patches.push(patch); <add> } <add> const initSnap = store("snapshot"); <add> store("register", patchHandler); <add> store.test = 2; <add> const patchToApply = patches.pop(); <add> store("unregister", patchHandler); <add> store("apply", patchToApply); <add> t.is(store.test, 2); <add> store("restore", initSnap); <add> t.is(store.test, 1); <add>});
Java
agpl-3.0
3a69704e4214be329518913cc763377f67a67421
0
David-Development/ownCloud-Account-Importer
package com.nextcloud.android.sso.model; import java.io.Serializable; /** * Nextcloud SingleSignOn * * @author David Luhmer * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ public class ExceptionMessage implements Serializable { public String title; public String message; public ExceptionMessage(String title, String message) { this.title = title; this.message = message; } }
src/main/java/com/nextcloud/android/sso/model/ExceptionMessage.java
package com.nextcloud.android.sso.model; /** * Nextcloud SingleSignOn * * @author David Luhmer * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ public class ExceptionMessage { public String title; public String message; public ExceptionMessage(String title, String message) { this.title = title; this.message = message; } }
ExceptionMessage should implement Serializable To hopefully fix this strange error: ``` 2020-05-07 19:52:28.526 23303-23303/it.niedermann.nextcloud.deck.dev E/AndroidRuntime: FATAL EXCEPTION: main Process: it.niedermann.nextcloud.deck.dev, PID: 23303 java.lang.RuntimeException: Parcelable encountered IOException writing serializable object (name = com.nextcloud.android.sso.exceptions.NextcloudHttpRequestFailedException) at android.os.Parcel.writeSerializable(Parcel.java:1833) at android.os.Parcel.writeValue(Parcel.java:1780) at android.os.Parcel.writeArrayMapInternal(Parcel.java:928) at android.os.BaseBundle.writeToParcelInner(BaseBundle.java:1584) at android.os.Bundle.writeToParcel(Bundle.java:1253) at android.os.Parcel.writeBundle(Parcel.java:997) at androidx.fragment.app.FragmentState.writeToParcel(FragmentState.java:125) at android.os.Parcel.writeTypedObject(Parcel.java:1634) at android.os.Parcel.writeTypedList(Parcel.java:1513) at android.os.Parcel.writeTypedList(Parcel.java:1470) at androidx.fragment.app.FragmentManagerState.writeToParcel(FragmentManagerState.java:51) at android.os.Parcel.writeParcelable(Parcel.java:1801) at android.os.Parcel.writeValue(Parcel.java:1707) at android.os.Parcel.writeArrayMapInternal(Parcel.java:928) at android.os.BaseBundle.writeToParcelInner(BaseBundle.java:1584) at android.os.Bundle.writeToParcel(Bundle.java:1253) at android.app.IActivityTaskManager$Stub$Proxy.activityStopped(IActivityTaskManager.java:4505) at android.app.servertransaction.PendingTransactionActions$StopInfo.run(PendingTransactionActions.java:145) at android.os.Handler.handleCallback(Handler.java:883) at android.os.Handler.dispatchMessage(Handler.java:100) at android.os.Looper.loop(Looper.java:214) at android.app.ActivityThread.main(ActivityThread.java:7356) at java.lang.reflect.Method.invoke(Native Method) at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:492) at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:930) Caused by: java.io.NotSerializableException: com.nextcloud.android.sso.model.ExceptionMessage at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1240) at java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1604) at java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1565) at java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1488) at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1234) at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:354) at android.os.Parcel.writeSerializable(Parcel.java:1828) at android.os.Parcel.writeValue(Parcel.java:1780)  at android.os.Parcel.writeArrayMapInternal(Parcel.java:928)  at android.os.BaseBundle.writeToParcelInner(BaseBundle.java:1584)  at android.os.Bundle.writeToParcel(Bundle.java:1253)  at android.os.Parcel.writeBundle(Parcel.java:997)  at androidx.fragment.app.FragmentState.writeToParcel(FragmentState.java:125)  at android.os.Parcel.writeTypedObject(Parcel.java:1634)  at android.os.Parcel.writeTypedList(Parcel.java:1513)  at android.os.Parcel.writeTypedList(Parcel.java:1470)  at androidx.fragment.app.FragmentManagerState.writeToParcel(FragmentManagerState.java:51)  at android.os.Parcel.writeParcelable(Parcel.java:1801)  at android.os.Parcel.writeValue(Parcel.java:1707)  at android.os.Parcel.writeArrayMapInternal(Parcel.java:928)  at android.os.BaseBundle.writeToParcelInner(BaseBundle.java:1584)  at android.os.Bundle.writeToParcel(Bundle.java:1253)  at android.app.IActivityTaskManager$Stub$Proxy.activityStopped(IActivityTaskManager.java:4505)  at android.app.servertransaction.PendingTransactionActions$StopInfo.run(PendingTransactionActions.java:145)  at android.os.Handler.handleCallback(Handler.java:883)  at android.os.Handler.dispatchMessage(Handler.java:100)  at android.os.Looper.loop(Looper.java:214)  at android.app.ActivityThread.main(ActivityThread.java:7356)  at java.lang.reflect.Method.invoke(Native Method)  at com.android.internal.os.RuntimeInit$MethodAndArgsCaller.run(RuntimeInit.java:492)  at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:930)  ``` Signed-off-by: desperateCoder <[email protected]>
src/main/java/com/nextcloud/android/sso/model/ExceptionMessage.java
ExceptionMessage should implement Serializable
<ide><path>rc/main/java/com/nextcloud/android/sso/model/ExceptionMessage.java <ide> package com.nextcloud.android.sso.model; <add> <add>import java.io.Serializable; <ide> <ide> /** <ide> * Nextcloud SingleSignOn <ide> * along with this program. If not, see <http://www.gnu.org/licenses/>. <ide> */ <ide> <del>public class ExceptionMessage { <add>public class ExceptionMessage implements Serializable { <ide> <ide> public String title; <ide> public String message;
Java
apache-2.0
bf14227f347c9386178b2a4b57820b7680d2a612
0
grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation,grahammendick/navigation
package com.navigation.reactnative; import android.content.Context; import android.database.DataSetObserver; import android.graphics.drawable.Drawable; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.ScrollView; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.coordinatorlayout.widget.CoordinatorLayout; import androidx.viewpager.widget.PagerAdapter; import androidx.viewpager.widget.ViewPager; import com.google.android.material.appbar.AppBarLayout; import com.google.android.material.bottomnavigation.BottomNavigationView; public class TabNavigationView extends BottomNavigationView implements TabView { boolean bottomTabs; int defaultTextColor; int selectedTintColor; int unselectedTintColor; private ViewPager.OnPageChangeListener pageChangeListener; private DataSetObserver dataSetObserver; private boolean layoutRequested = false; private boolean autoSelected = false; public TabNavigationView(Context context) { super(context); TabLayoutView tabLayout = new TabLayoutView(context); selectedTintColor = unselectedTintColor = defaultTextColor = tabLayout.defaultTextColor; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); TabBarView tabBar = getTabBar(); if (bottomTabs && tabBar != null) { setupWithViewPager(tabBar); tabBar.populateTabs(); } } private TabBarView getTabBar() { for(int i = 0; getParent() != null && i < ((ViewGroup) getParent()).getChildCount(); i++) { View child = ((ViewGroup) getParent()).getChildAt(i); if (child instanceof TabBarView) return (TabBarView) child; } return null; } @Override public void setupWithViewPager(@Nullable final ViewPager viewPager) { if (viewPager != null && viewPager.getAdapter() != null) { final PagerAdapter pagerAdapter = viewPager.getAdapter(); buildMenu(pagerAdapter); setOnNavigationItemSelectedListener(new OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(@NonNull MenuItem menuItem) { if (!autoSelected && viewPager.getCurrentItem() == menuItem.getOrder()) { View tab = ((TabBarView) viewPager).getTabAt(0); if (tab instanceof CoordinatorLayout) { CoordinatorLayout coordinatorLayout = (CoordinatorLayout) tab; for(int i = 0; i < coordinatorLayout.getChildCount(); i++) { if (coordinatorLayout.getChildAt(i) instanceof AppBarLayout) ((AppBarLayout) coordinatorLayout.getChildAt(i)).setExpanded(true); if (coordinatorLayout.getChildAt(i) instanceof ScrollView) ((ScrollView) coordinatorLayout.getChildAt(i)).smoothScrollTo(0,0); } } } viewPager.setCurrentItem(menuItem.getOrder(), false); return true; } }); if (pageChangeListener != null) viewPager.removeOnPageChangeListener(pageChangeListener); pageChangeListener = new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { } @Override public void onPageSelected(int position) { autoSelected = true; setSelectedItemId(position); autoSelected = false; } @Override public void onPageScrollStateChanged(int state) { } }; viewPager.addOnPageChangeListener(pageChangeListener); if (dataSetObserver != null) pagerAdapter.unregisterDataSetObserver(dataSetObserver); dataSetObserver = new DataSetObserver() { @Override public void onChanged() { buildMenu(pagerAdapter); setSelectedItemId(viewPager.getCurrentItem()); } }; pagerAdapter.registerDataSetObserver(dataSetObserver); autoSelected = true; setSelectedItemId(viewPager.getCurrentItem()); autoSelected = false; } } private void buildMenu(PagerAdapter pagerAdapter) { getMenu().clear(); for (int i = 0; i < pagerAdapter.getCount(); i++) { getMenu().add(Menu.NONE, i, i, pagerAdapter.getPageTitle(i)); } } @Override public void requestLayout() { super.requestLayout(); if (!layoutRequested) { layoutRequested = true; post(measureAndLayout); } } private final Runnable measureAndLayout = new Runnable() { @Override public void run() { layoutRequested = false; measure( MeasureSpec.makeMeasureSpec(getWidth(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(getHeight(), MeasureSpec.EXACTLY)); layout(getLeft(), getTop(), getRight(), getBottom()); } }; @Override public int getTabCount() { return getMenu().size(); } @Override public void setTitle(int index, String title) { getMenu().getItem(index).setTitle(title); } public void setIcon(int index, Drawable icon) { getMenu().getItem(index).setIcon(icon); } }
NavigationReactNative/src/android/src/main/java/com/navigation/reactnative/TabNavigationView.java
package com.navigation.reactnative; import android.content.Context; import android.database.DataSetObserver; import android.graphics.drawable.Drawable; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import androidx.viewpager.widget.PagerAdapter; import androidx.viewpager.widget.ViewPager; import com.google.android.material.bottomnavigation.BottomNavigationView; public class TabNavigationView extends BottomNavigationView implements TabView { boolean bottomTabs; int defaultTextColor; int selectedTintColor; int unselectedTintColor; private ViewPager.OnPageChangeListener pageChangeListener; private DataSetObserver dataSetObserver; private boolean layoutRequested = false; public TabNavigationView(Context context) { super(context); TabLayoutView tabLayout = new TabLayoutView(context); selectedTintColor = unselectedTintColor = defaultTextColor = tabLayout.defaultTextColor; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); TabBarView tabBar = getTabBar(); if (bottomTabs && tabBar != null) { setupWithViewPager(tabBar); tabBar.populateTabs(); } } private TabBarView getTabBar() { for(int i = 0; getParent() != null && i < ((ViewGroup) getParent()).getChildCount(); i++) { View child = ((ViewGroup) getParent()).getChildAt(i); if (child instanceof TabBarView) return (TabBarView) child; } return null; } @Override public void setupWithViewPager(@Nullable final ViewPager viewPager) { if (viewPager != null && viewPager.getAdapter() != null) { final PagerAdapter pagerAdapter = viewPager.getAdapter(); buildMenu(pagerAdapter); setOnNavigationItemSelectedListener(new OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(@NonNull MenuItem menuItem) { viewPager.setCurrentItem(menuItem.getOrder(), false); return true; } }); if (pageChangeListener != null) viewPager.removeOnPageChangeListener(pageChangeListener); pageChangeListener = new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) { } @Override public void onPageSelected(int position) { setSelectedItemId(position); } @Override public void onPageScrollStateChanged(int state) { } }; viewPager.addOnPageChangeListener(pageChangeListener); if (dataSetObserver != null) pagerAdapter.unregisterDataSetObserver(dataSetObserver); dataSetObserver = new DataSetObserver() { @Override public void onChanged() { buildMenu(pagerAdapter); setSelectedItemId(viewPager.getCurrentItem()); } }; pagerAdapter.registerDataSetObserver(dataSetObserver); setSelectedItemId(viewPager.getCurrentItem()); } } private void buildMenu(PagerAdapter pagerAdapter) { getMenu().clear(); for (int i = 0; i < pagerAdapter.getCount(); i++) { getMenu().add(Menu.NONE, i, i, pagerAdapter.getPageTitle(i)); } } @Override public void requestLayout() { super.requestLayout(); if (!layoutRequested) { layoutRequested = true; post(measureAndLayout); } } private final Runnable measureAndLayout = new Runnable() { @Override public void run() { layoutRequested = false; measure( MeasureSpec.makeMeasureSpec(getWidth(), MeasureSpec.EXACTLY), MeasureSpec.makeMeasureSpec(getHeight(), MeasureSpec.EXACTLY)); layout(getLeft(), getTop(), getRight(), getBottom()); } }; @Override public int getTabCount() { return getMenu().size(); } @Override public void setTitle(int index, String title) { getMenu().getItem(index).setTitle(title); } public void setIcon(int index, Drawable icon) { getMenu().getItem(index).setIcon(icon); } }
Scrolled to top when primary tab reselected Ensured only user generated tap counts by turning off auto selected ones (fired by other events). Only done the coordinatorlayout case - have to scroll the appbarlayout otherwise the navigation bar stays collapsed
NavigationReactNative/src/android/src/main/java/com/navigation/reactnative/TabNavigationView.java
Scrolled to top when primary tab reselected
<ide><path>avigationReactNative/src/android/src/main/java/com/navigation/reactnative/TabNavigationView.java <ide> import android.view.MenuItem; <ide> import android.view.View; <ide> import android.view.ViewGroup; <add>import android.widget.ScrollView; <ide> <ide> import androidx.annotation.NonNull; <ide> import androidx.annotation.Nullable; <add>import androidx.coordinatorlayout.widget.CoordinatorLayout; <ide> import androidx.viewpager.widget.PagerAdapter; <ide> import androidx.viewpager.widget.ViewPager; <ide> <add>import com.google.android.material.appbar.AppBarLayout; <ide> import com.google.android.material.bottomnavigation.BottomNavigationView; <ide> <ide> public class TabNavigationView extends BottomNavigationView implements TabView { <ide> private ViewPager.OnPageChangeListener pageChangeListener; <ide> private DataSetObserver dataSetObserver; <ide> private boolean layoutRequested = false; <add> private boolean autoSelected = false; <ide> <ide> public TabNavigationView(Context context) { <ide> super(context); <ide> setOnNavigationItemSelectedListener(new OnNavigationItemSelectedListener() { <ide> @Override <ide> public boolean onNavigationItemSelected(@NonNull MenuItem menuItem) { <add> if (!autoSelected && viewPager.getCurrentItem() == menuItem.getOrder()) { <add> View tab = ((TabBarView) viewPager).getTabAt(0); <add> if (tab instanceof CoordinatorLayout) { <add> CoordinatorLayout coordinatorLayout = (CoordinatorLayout) tab; <add> for(int i = 0; i < coordinatorLayout.getChildCount(); i++) { <add> if (coordinatorLayout.getChildAt(i) instanceof AppBarLayout) <add> ((AppBarLayout) coordinatorLayout.getChildAt(i)).setExpanded(true); <add> if (coordinatorLayout.getChildAt(i) instanceof ScrollView) <add> ((ScrollView) coordinatorLayout.getChildAt(i)).smoothScrollTo(0,0); <add> } <add> } <add> } <ide> viewPager.setCurrentItem(menuItem.getOrder(), false); <ide> return true; <ide> } <ide> <ide> @Override <ide> public void onPageSelected(int position) { <add> autoSelected = true; <ide> setSelectedItemId(position); <add> autoSelected = false; <ide> } <ide> <ide> @Override <ide> } <ide> }; <ide> pagerAdapter.registerDataSetObserver(dataSetObserver); <add> autoSelected = true; <ide> setSelectedItemId(viewPager.getCurrentItem()); <add> autoSelected = false; <ide> } <ide> } <ide>
Java
agpl-3.0
f798c5c50ec9a6bdcdb02c0353160369570af13d
0
relateiq/sql-layer,ngaut/sql-layer,shunwang/sql-layer-1,shunwang/sql-layer-1,ngaut/sql-layer,wfxiang08/sql-layer-1,relateiq/sql-layer,jaytaylor/sql-layer,jaytaylor/sql-layer,qiuyesuifeng/sql-layer,relateiq/sql-layer,jaytaylor/sql-layer,wfxiang08/sql-layer-1,shunwang/sql-layer-1,qiuyesuifeng/sql-layer,wfxiang08/sql-layer-1,ngaut/sql-layer,qiuyesuifeng/sql-layer,shunwang/sql-layer-1,qiuyesuifeng/sql-layer,jaytaylor/sql-layer,wfxiang08/sql-layer-1,relateiq/sql-layer,ngaut/sql-layer
/** * END USER LICENSE AGREEMENT (“EULA”) * * READ THIS AGREEMENT CAREFULLY (date: 9/13/2011): * http://www.akiban.com/licensing/20110913 * * BY INSTALLING OR USING ALL OR ANY PORTION OF THE SOFTWARE, YOU ARE ACCEPTING * ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. YOU AGREE THAT THIS * AGREEMENT IS ENFORCEABLE LIKE ANY WRITTEN AGREEMENT SIGNED BY YOU. * * IF YOU HAVE PAID A LICENSE FEE FOR USE OF THE SOFTWARE AND DO NOT AGREE TO * THESE TERMS, YOU MAY RETURN THE SOFTWARE FOR A FULL REFUND PROVIDED YOU (A) DO * NOT USE THE SOFTWARE AND (B) RETURN THE SOFTWARE WITHIN THIRTY (30) DAYS OF * YOUR INITIAL PURCHASE. * * IF YOU WISH TO USE THE SOFTWARE AS AN EMPLOYEE, CONTRACTOR, OR AGENT OF A * CORPORATION, PARTNERSHIP OR SIMILAR ENTITY, THEN YOU MUST BE AUTHORIZED TO SIGN * FOR AND BIND THE ENTITY IN ORDER TO ACCEPT THE TERMS OF THIS AGREEMENT. THE * LICENSES GRANTED UNDER THIS AGREEMENT ARE EXPRESSLY CONDITIONED UPON ACCEPTANCE * BY SUCH AUTHORIZED PERSONNEL. * * IF YOU HAVE ENTERED INTO A SEPARATE WRITTEN LICENSE AGREEMENT WITH AKIBAN FOR * USE OF THE SOFTWARE, THE TERMS AND CONDITIONS OF SUCH OTHER AGREEMENT SHALL * PREVAIL OVER ANY CONFLICTING TERMS OR CONDITIONS IN THIS AGREEMENT. */ package com.akiban.server.test.it.qp; import org.junit.Ignore; import com.akiban.util.ShareHolder; import com.akiban.server.types.ValueSource; import com.akiban.qp.expression.IndexBound; import com.akiban.qp.operator.Operator; import org.junit.Test; import com.akiban.server.expression.std.FieldExpression; import com.akiban.qp.operator.API; import com.akiban.qp.expression.IndexKeyRange; import com.akiban.qp.operator.Cursor; import com.akiban.qp.row.Row; import com.akiban.qp.rowtype.IndexRowType; import com.akiban.qp.rowtype.RowType; import com.akiban.server.api.dml.SetColumnSelector; import com.akiban.server.api.dml.scan.NewRow; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Before; import com.akiban.qp.rowtype.Schema; import static com.akiban.qp.operator.API.cursor; import static com.akiban.qp.operator.API.indexScan_Default; import static org.junit.Assert.*; public class UniqueIndexScanJumpBoundedWithNullsIT extends OperatorITBase { // Positions of fields within the index row private static final int A = 0; private static final int B = 1; private static final int C = 2; private static final int COLUMN_COUNT = 3; private static final boolean ASC = true; private static final boolean DESC = false; private static final SetColumnSelector INDEX_ROW_SELECTOR = new SetColumnSelector(0, 1, 2); private int t; private RowType tRowType; private IndexRowType idxRowType; private Map<Long, TestRow> indexRowMap = new HashMap<Long, TestRow>(); @Before @Override public void before() { t = createTable( "schema", "t", "id int not null primary key", "a int", "b int", "c int"); createUniqueIndex("schema", "t", "idx", "a", "b", "c"); schema = new Schema(rowDefCache().ais()); tRowType = schema.userTableRowType(userTable(t)); idxRowType = indexType(t, "a", "b", "c"); db = new NewRow[] { createNewRow(t, 1010L, 1L, 11L, 110L), createNewRow(t, 1011L, 1L, 11L, 111L), createNewRow(t, 1012L, 1L, (Long)null, 122L), createNewRow(t, 1013L, 1L, (Long)null, 122L), createNewRow(t, 1014L, 1L, 13L, 132L), createNewRow(t, 1015L, 1L, 13L, 133L), createNewRow(t, 1016L, 1L, null, 122L), createNewRow(t, 1017L, 1L, 14L, 142L), createNewRow(t, 1018L, 1L, 30L, 201L), createNewRow(t, 1019L, 1L, 30L, null), createNewRow(t, 1020L, 1L, 30L, null), createNewRow(t, 1021L, 1L, 30L, null), createNewRow(t, 1022L, 1L, 30L, 300L), createNewRow(t, 1023L, 1L, 40L, 401L) }; adapter = persistitAdapter(schema); queryContext = queryContext(adapter); use(db); for (NewRow row : db) { indexRowMap.put((Long) row.get(0), new TestRow(tRowType, new Object[] {row.get(1), // a row.get(2), // b row.get(3), // c })); } } /** * * @param id * @return the b column of this id (used to make the lower and upper bound. * This is to avoid confusion as to what 'b' values correspond to what id */ private int b_of(long id) { return (int)indexRow(id).eval(1).getLong(); } @Test public void testAAA() { testSkipNulls(1010, b_of(1010), true, b_of(1015), true, getAAA(), new long[]{1010, 1011, 1014, 1015}); // skip 1012 and 1013 } @Test public void testAAAToMinNull() { testSkipNulls(1012, // jump to one of the nulls b_of(1010), true, b_of(1015), true, getAAA(), new long[] {1012, 1013, 1016, 1010, 1011, 1014, 1015}); // should see everything } // with nulls appearing first @Test public void testDDD() { testSkipNulls(1015, b_of(1010), true, b_of(1015), true, getDDD(), new long[] {1015, 1014, 1011, 1010}); // skip 1012 and 1013 } @Test public void testDDDToFirstNull() { testSkipNulls(1019, // jump to the first null b_of(1018), true, b_of(1021), true, getDDD(), new long[] {1021, 1020, 1019}); // 3 rows of [1L, 30L, null] } // (The use of (1021, 1020, 1019) is just for demonstrative purpose. // They could be anything as long as their mapping @Test // index row is [1L, 30L, null] ) public void testDDDToMiddleNull() { testSkipNulls(1020, // jump to the middle null b_of(1018), true, b_of(1021), true, getDDD(), new long[] {1021, 1020, 1019}); } @Test public void testDDDToLastNull() { testSkipNulls(1021, // jump to the first null b_of(1018), true, b_of(1021), true, getDDD(), new long[] {1021, 1020, 1019}); } @Test public void testAAAToFirstNull() { testSkipNulls(1019, // jump to the first null b_of(1018), true, b_of(1021), true, getAAA(), new long[] {1019, 1020, 1021, 1018, 1022}); } @Test public void testAAAToMiddleNull() { testSkipNulls(1020, // jump to the middle null b_of(1018), true, b_of(1021), true, getAAA(), new long[] {1021, 1020, 1019, 1018, 1022}); } @Test public void testAAAToLastNull() { testSkipNulls(1021, // jump to the first null b_of(1018), true, b_of(1021), true, getAAA(), new long[] {1021, 1020, 1019, 1018, 1022}); } @Test public void testDDDToMaxNull() { testSkipNulls(1016, b_of(1015), false, b_of(1017), true, getDDD(), new long[] {}); } @Test public void testAAD() { // currently failing // throw IndexOutOfBoundException testSkipNulls(1014, b_of(1010), true, b_of(1017), true, getAAD(), new long[] {1014, 1017}); } //TODO: add more test****() private void testSkipNulls(long targetId, // location to jump to int bLo, boolean lowInclusive, // lower bound int bHi, boolean hiInclusive, // upper bound API.Ordering ordering, long expected[]) { Operator plan = indexScan_Default(idxRowType, bounded(1, bLo, lowInclusive, bHi, hiInclusive), ordering); Cursor cursor = cursor(plan, queryContext); cursor.open(); cursor.jump(indexRow(targetId), INDEX_ROW_SELECTOR); Row row; List<Row> actualRows = new ArrayList<Row>(); List<ShareHolder<Row>> rowHolders = new ArrayList<ShareHolder<Row>>(); while ((row = cursor.next()) != null) { // Prevent sharing of rows since verification accumulates them actualRows.add(row); rowHolders.add(new ShareHolder<Row>(row)); } cursor.close(); // find the row with given id List<Row> expectedRows = new ArrayList<Row>(expected.length); for (long val : expected) expectedRows.add(indexRow(val)); // check the list of rows checkRows(expectedRows, actualRows); } private void checkRows(List<Row> expected, List<Row> actual) { List<List<Long>> expectedRows = toListOfLong(expected); List<List<Long>> actualRows = toListOfLong(actual); assertEquals(expectedRows, actualRows); } private List<List<Long>> toListOfLong(List<Row> rows) { List<List<Long>> ret = new ArrayList<List<Long>>(); for (Row row : rows) { // nulls are allowed ArrayList<Long> toLong = new ArrayList<Long>(); for (int n = 0; n < COLUMN_COUNT; ++n) addColumn(toLong, row.eval(n)); ret.add(toLong); } return ret; } private static void addColumn(List<Long> row, ValueSource v) { if (v.isNull()) { row.add(null); return; } switch(v.getConversionType()) { case LONG: row.add(v.getLong()); break; case INT: row.add(v.getInt()); break; default: throw new IllegalArgumentException("Unexpected type: " + v.getConversionType()); } } private API.Ordering getAAA() { return ordering(A, ASC, B, ASC, C, ASC); } private API.Ordering getAAD() { return ordering(A, ASC, B, ASC, C, DESC); } private API.Ordering getADA() { return ordering(A, ASC, B, DESC, C, ASC); } private API.Ordering getDAA() { return ordering(A, DESC, B, ASC, C, ASC); } private API.Ordering getDAD() { return ordering(A, DESC, B, ASC, C, DESC); } private API.Ordering getDDA() { return ordering(A, DESC, B, DESC, C, ASC); } private API.Ordering getADD() { return ordering(A, ASC, B, ASC, C, DESC); } private API.Ordering getDDD() { return ordering(A, DESC, B, DESC, C, DESC); } private TestRow indexRow(long id) { return indexRowMap.get(id); } private long[] longs(long... longs) { return longs; } private IndexKeyRange bounded(long a, long bLo, boolean loInclusive, long bHi, boolean hiInclusive) { IndexBound lo = new IndexBound(new TestRow(tRowType, new Object[] {a, bLo}), new SetColumnSelector(0, 1)); IndexBound hi = new IndexBound(new TestRow(tRowType, new Object[] {a, bHi}), new SetColumnSelector(0, 1)); return IndexKeyRange.bounded(idxRowType, lo, loInclusive, hi, hiInclusive); } private API.Ordering ordering(Object... ord) // alternating column positions and asc/desc { API.Ordering ordering = API.ordering(); int i = 0; while (i < ord.length) { int column = (Integer) ord[i++]; boolean asc = (Boolean) ord[i++]; ordering.append(new FieldExpression(idxRowType, column), asc); } return ordering; } }
src/test/java/com/akiban/server/test/it/qp/UniqueIndexScanJumpBoundedWithNullsIT.java
/** * END USER LICENSE AGREEMENT (“EULA”) * * READ THIS AGREEMENT CAREFULLY (date: 9/13/2011): * http://www.akiban.com/licensing/20110913 * * BY INSTALLING OR USING ALL OR ANY PORTION OF THE SOFTWARE, YOU ARE ACCEPTING * ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. YOU AGREE THAT THIS * AGREEMENT IS ENFORCEABLE LIKE ANY WRITTEN AGREEMENT SIGNED BY YOU. * * IF YOU HAVE PAID A LICENSE FEE FOR USE OF THE SOFTWARE AND DO NOT AGREE TO * THESE TERMS, YOU MAY RETURN THE SOFTWARE FOR A FULL REFUND PROVIDED YOU (A) DO * NOT USE THE SOFTWARE AND (B) RETURN THE SOFTWARE WITHIN THIRTY (30) DAYS OF * YOUR INITIAL PURCHASE. * * IF YOU WISH TO USE THE SOFTWARE AS AN EMPLOYEE, CONTRACTOR, OR AGENT OF A * CORPORATION, PARTNERSHIP OR SIMILAR ENTITY, THEN YOU MUST BE AUTHORIZED TO SIGN * FOR AND BIND THE ENTITY IN ORDER TO ACCEPT THE TERMS OF THIS AGREEMENT. THE * LICENSES GRANTED UNDER THIS AGREEMENT ARE EXPRESSLY CONDITIONED UPON ACCEPTANCE * BY SUCH AUTHORIZED PERSONNEL. * * IF YOU HAVE ENTERED INTO A SEPARATE WRITTEN LICENSE AGREEMENT WITH AKIBAN FOR * USE OF THE SOFTWARE, THE TERMS AND CONDITIONS OF SUCH OTHER AGREEMENT SHALL * PREVAIL OVER ANY CONFLICTING TERMS OR CONDITIONS IN THIS AGREEMENT. */ package com.akiban.server.test.it.qp; import org.junit.Ignore; import com.akiban.util.ShareHolder; import com.akiban.server.types.ValueSource; import com.akiban.qp.expression.IndexBound; import com.akiban.qp.operator.Operator; import org.junit.Test; import com.akiban.server.expression.std.FieldExpression; import com.akiban.qp.operator.API; import com.akiban.qp.expression.IndexKeyRange; import com.akiban.qp.operator.Cursor; import com.akiban.qp.row.Row; import com.akiban.qp.rowtype.IndexRowType; import com.akiban.qp.rowtype.RowType; import com.akiban.server.api.dml.SetColumnSelector; import com.akiban.server.api.dml.scan.NewRow; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.junit.Before; import com.akiban.qp.rowtype.Schema; import static com.akiban.qp.operator.API.cursor; import static com.akiban.qp.operator.API.indexScan_Default; import static org.junit.Assert.*; public class UniqueIndexScanJumpBoundedWithNullsIT extends OperatorITBase { // Positions of fields within the index row private static final int A = 0; private static final int B = 1; private static final int C = 2; private static final int COLUMN_COUNT = 3; private static final boolean ASC = true; private static final boolean DESC = false; private static final SetColumnSelector INDEX_ROW_SELECTOR = new SetColumnSelector(0, 1, 2); private int t; private RowType tRowType; private IndexRowType idxRowType; private Map<Long, TestRow> indexRowMap = new HashMap<Long, TestRow>(); @Before @Override public void before() { t = createTable( "schema", "t", "id int not null primary key", "a int", "b int", "c int"); createUniqueIndex("schema", "t", "idx", "a", "b", "c"); schema = new Schema(rowDefCache().ais()); tRowType = schema.userTableRowType(userTable(t)); idxRowType = indexType(t, "a", "b", "c"); db = new NewRow[] { createNewRow(t, 1010L, 1L, 11L, 110L), createNewRow(t, 1011L, 1L, 11L, 111L), createNewRow(t, 1012L, 1L, (Long)null, 122L), createNewRow(t, 1013L, 1L, (Long)null, 122L), createNewRow(t, 1014L, 1L, 13L, 132L), createNewRow(t, 1015L, 1L, 13L, 133L), createNewRow(t, 1016L, 1L, null, 122L), createNewRow(t, 1017L, 1L, 14L, 142L), createNewRow(t, 1018L, 1L, 30L, 201L), createNewRow(t, 1019L, 1L, 30L, null), createNewRow(t, 1020L, 1L, 30L, null), createNewRow(t, 1021L, 1L, 30L, null), createNewRow(t, 1022L, 1L, 30L, 300L), createNewRow(t, 1023L, 1L, 40L, 401L) }; adapter = persistitAdapter(schema); queryContext = queryContext(adapter); use(db); for (NewRow row : db) { indexRowMap.put((Long) row.get(0), new TestRow(tRowType, new Object[] {row.get(1), // a row.get(2), // b row.get(3), // c })); } } /** * * @param id * @return the b column of this id (used to make the lower and upper bound. * This is to avoid confusion as to what 'b' values correspond to what id */ private int b_of(long id) { return (int)indexRow(id).eval(1).getLong(); } @Test public void testAAA() { testSkipNulls(1010, b_of(1010), true, b_of(1015), true, getAAA(), new long[]{1010, 1011, 1014, 1015}); // skip 1012 and 1013 } @Test public void testAAAToMinNull() { testSkipNulls(1012, // jump to one of the nulls b_of(1010), true, b_of(1015), true, getAAA(), new long[] {1012, 1013, 1016, 1010, 1011, 1014, 1015}); // should see everything } // with nulls appearing first @Test public void testDDD() { testSkipNulls(1015, b_of(1010), true, b_of(1015), true, getDDD(), new long[] {1015, 1014, 1011, 1010}); // skip 1012 and 1013 } @Test public void testDDDToFirstNull() { testSkipNulls(1019, // jump to the first null b_of(1018), true, b_of(1021), true, getDDD(), new long[] {1021, 1020, 1019}); // 3 rows of [1L, 30L, null] } // (The use of (1021, 1020, 1019) is just for demonstrative purpose. // They could be anything as long as their mapping @Test // index row is [1L, 30L, null] ) public void testDDDToMiddleNull() { testSkipNulls(1020, // jump to the middle null b_of(1018), true, b_of(1021), true, getDDD(), new long[] {1021, 1020, 1019}); } @Test public void testDDDToLastNull() { testSkipNulls(1021, // jump to the first null b_of(1018), true, b_of(1021), true, getDDD(), new long[] {1021, 1020, 1019}); } @Test public void testAAAToFirstNull() { testSkipNulls(1019, // jump to the first null b_of(1018), true, b_of(1021), true, getAAA(), new long[] {1019, 1020, 1021, 1018, 1022}); } @Test public void testAAAToMiddleNull() { testSkipNulls(1020, // jump to the middle null b_of(1018), true, b_of(1021), true, getAAA(), new long[] {1021, 1020, 1019, 1018, 1022}); } @Test public void testAAAToLastNull() { testSkipNulls(1021, // jump to the first null b_of(1018), true, b_of(1021), true, getAAA(), new long[] {1021, 1020, 1019, 1018, 1022}); } @Test public void testDDDToMaxNull() { testSkipNulls(1016, b_of(1015), false, b_of(1017), true, getDDD(), new long[] {}); } @Ignore @Test public void testAAD() { // currently failing // throw IndexOutOfBoundException testSkipNulls(1014, b_of(1010), true, b_of(1017), true, getAAD(), new long[] {1014, 1017}); } //TODO: add more test****() private void testSkipNulls(long targetId, // location to jump to int bLo, boolean lowInclusive, // lower bound int bHi, boolean hiInclusive, // upper bound API.Ordering ordering, long expected[]) { Operator plan = indexScan_Default(idxRowType, bounded(1, bLo, lowInclusive, bHi, hiInclusive), ordering); Cursor cursor = cursor(plan, queryContext); cursor.open(); cursor.jump(indexRow(targetId), INDEX_ROW_SELECTOR); Row row; List<Row> actualRows = new ArrayList<Row>(); List<ShareHolder<Row>> rowHolders = new ArrayList<ShareHolder<Row>>(); while ((row = cursor.next()) != null) { // Prevent sharing of rows since verification accumulates them actualRows.add(row); rowHolders.add(new ShareHolder<Row>(row)); } cursor.close(); // find the row with given id List<Row> expectedRows = new ArrayList<Row>(expected.length); for (long val : expected) expectedRows.add(indexRow(val)); // check the list of rows checkRows(expectedRows, actualRows); } private void checkRows(List<Row> expected, List<Row> actual) { List<List<Long>> expectedRows = toListOfLong(expected); List<List<Long>> actualRows = toListOfLong(actual); assertEquals(expectedRows, actualRows); } private List<List<Long>> toListOfLong(List<Row> rows) { List<List<Long>> ret = new ArrayList<List<Long>>(); for (Row row : rows) { // nulls are allowed ArrayList<Long> toLong = new ArrayList<Long>(); for (int n = 0; n < COLUMN_COUNT; ++n) addColumn(toLong, row.eval(n)); ret.add(toLong); } return ret; } private static void addColumn(List<Long> row, ValueSource v) { if (v.isNull()) { row.add(null); return; } switch(v.getConversionType()) { case LONG: row.add(v.getLong()); break; case INT: row.add(v.getInt()); break; default: throw new IllegalArgumentException("Unexpected type: " + v.getConversionType()); } } private API.Ordering getAAA() { return ordering(A, ASC, B, ASC, C, ASC); } private API.Ordering getAAD() { return ordering(A, ASC, B, ASC, C, DESC); } private API.Ordering getADA() { return ordering(A, ASC, B, DESC, C, ASC); } private API.Ordering getDAA() { return ordering(A, DESC, B, ASC, C, ASC); } private API.Ordering getDAD() { return ordering(A, DESC, B, ASC, C, DESC); } private API.Ordering getDDA() { return ordering(A, DESC, B, DESC, C, ASC); } private API.Ordering getADD() { return ordering(A, ASC, B, ASC, C, DESC); } private API.Ordering getDDD() { return ordering(A, DESC, B, DESC, C, DESC); } private TestRow indexRow(long id) { return indexRowMap.get(id); } private long[] longs(long... longs) { return longs; } private IndexKeyRange bounded(long a, long bLo, boolean loInclusive, long bHi, boolean hiInclusive) { IndexBound lo = new IndexBound(new TestRow(tRowType, new Object[] {a, bLo}), new SetColumnSelector(0, 1)); IndexBound hi = new IndexBound(new TestRow(tRowType, new Object[] {a, bHi}), new SetColumnSelector(0, 1)); return IndexKeyRange.bounded(idxRowType, lo, loInclusive, hi, hiInclusive); } private API.Ordering ordering(Object... ord) // alternating column positions and asc/desc { API.Ordering ordering = API.ordering(); int i = 0; while (i < ord.length) { int column = (Integer) ord[i++]; boolean asc = (Boolean) ord[i++]; ordering.append(new FieldExpression(idxRowType, column), asc); } return ordering; } }
reove @Ignore
src/test/java/com/akiban/server/test/it/qp/UniqueIndexScanJumpBoundedWithNullsIT.java
reove @Ignore
<ide><path>rc/test/java/com/akiban/server/test/it/qp/UniqueIndexScanJumpBoundedWithNullsIT.java <ide> new long[] {}); <ide> } <ide> <del> @Ignore <ide> @Test <ide> public void testAAD() <ide> {
JavaScript
bsd-3-clause
5357c9ae37c4a7a9ea033639a9d1fc8ca4b8c4bc
0
GetStream/Winds,GetStream/Winds,GetStream/Winds
import '../loadenv'; import Queue from 'bull'; import stream from 'getstream'; import moment from 'moment'; import normalize from 'normalize-url'; import RSS from '../models/rss'; import Article from '../models/article'; import async from 'async'; import '../utils/db'; import config from '../config'; import logger from '../utils/logger'; import search from '../utils/search'; import events from '../utils/events'; import { ParseFeed } from './parsers'; const client = stream.connect(config.stream.apiKey, config.stream.apiSecret); const rssQueue = new Queue('rss', config.cache.uri); const ogQueue = new Queue('og', config.cache.uri); // connect the handler to the queue logger.info('Starting the RSS worker'); rssQueue.process((job, done) => { logger.info(`Processing RSS feed ${job.data.url}...`); // start by looking up the RSS object RSS.findOne({ _id: job.data.rss }).then(doc => { if (!doc) { return done(new Error('RSS feed does not exist.')); } // update the feed ParseFeed(job.data.url, function(feedContents, err) { // log the error if (err) { logger.error(err); done(err); return; } // mark it done (even if we have a failure) // set last scraped date on rss object in DB RSS.findByIdAndUpdate( job.data.rss, { $set: { isParsing: false, lastScraped: moment().toISOString(), }, }, { new: true, upsert: false, }, ).catch(err => { logger.error(err); }); // process all the feedContents we found async.mapLimit( feedContents.articles, 10, (post, cb) => { // lookup by url Article.findOne({ url: normalize(post.url),rss: job.data.rss }).then(article => { if (article) { // article already exists cb(null, article); return; } else { Article.create({ description: post.description, publicationDate: post.publicationDate, commentUrl: post.commentUrl, content: post.content, rss: job.data.rss, title: post.title, url: post.url, }).then(article => { // after article is created, add to algolia, stream, and opengraph scraper queue return Promise.all([ search({ _id: article._id, description: article.description, publicationDate: article.publicationDate, rss: article.rss, title: article.title, type: 'article', }), client.feed('rss', article.rss).addActivity({ actor: article.rss, foreign_id: `articles:${article._id}`, object: article._id, time: article.publicationDate, verb: 'rss_article', }), ogQueue.add( { url: normalize(article.url), }, { removeOnComplete: true, removeOnFail: true, }, ), Article.find({ rss: job.data.rss }).then(articles => { return events({ meta: { data: { [`rss:${job.data.rss}`]: { articleCount: articles.length, }, }, }, }); }), ]) .then(function() { // this is just returning the article created from the MongoDB `create` call cb(null, article); }) .catch(err => { // error: either adding to algolia, adding to Stream, or adding to OGqueue - continuing on for the time being. logger.error(err); cb(null, article); }); }); } }); }, err => { if (err) { logger.warn( `Scraping failed for ${job.data.url} with error ${err}`, ); done(err); } else { logger.info(`Completed scraping for ${job.data.url}`); done(); } }, ); }); }); });
api/src/workers/rss.js
import '../loadenv'; import Queue from 'bull'; import stream from 'getstream'; import moment from 'moment'; import normalize from 'normalize-url'; import RSS from '../models/rss'; import Article from '../models/article'; import async from 'async'; import '../utils/db'; import config from '../config'; import logger from '../utils/logger'; import search from '../utils/search'; import events from '../utils/events'; import { ParseFeed } from './parsers'; const client = stream.connect(config.stream.apiKey, config.stream.apiSecret); const rssQueue = new Queue('rss', config.cache.uri); const ogQueue = new Queue('og', config.cache.uri); // connect the handler to the queue logger.info('Starting the RSS worker'); rssQueue.process((job, done) => { logger.info(`Processing RSS feed ${job.data.url}...`); // start by looking up the RSS object RSS.findOne({ _id: job.data.rss }).then(doc => { if (!doc) { return done(new Error('RSS feed does not exist.')); } // update the feed ParseFeed(job.data.url, function(feedContents, err) { // log the error if (err) { logger.error(err); done(err); return; } // mark it done (even if we have a failure) // set last scraped date on rss object in DB RSS.findByIdAndUpdate( job.data.rss, { $set: { isParsing: false, lastScraped: moment().toISOString(), }, }, { new: true, upsert: false, }, ).catch(err => { logger.error(err); }); // process all the feedContents we found async.mapLimit( feedContents.articles, 10, (post, cb) => { // lookup by url Article.findOne({ url: normalize(post.url) }).then(article => { if (article) { // article already exists cb(null, article); return; } else { Article.create({ description: post.description, publicationDate: post.publicationDate, commentUrl: post.commentUrl, content: post.content, rss: job.data.rss, title: post.title, url: post.url, }).then(article => { // after article is created, add to algolia, stream, and opengraph scraper queue return Promise.all([ search({ _id: article._id, description: article.description, publicationDate: article.publicationDate, rss: article.rss, title: article.title, type: 'article', }), client.feed('rss', article.rss).addActivity({ actor: article.rss, foreign_id: `articles:${article._id}`, object: article._id, time: article.publicationDate, verb: 'rss_article', }), ogQueue.add( { url: normalize(article.url), }, { removeOnComplete: true, removeOnFail: true, }, ), Article.find({ rss: job.data.rss }).then(articles => { return events({ meta: { data: { [`rss:${job.data.rss}`]: { articleCount: articles.length, }, }, }, }); }), ]) .then(function() { // this is just returning the article created from the MongoDB `create` call cb(null, article); }) .catch(err => { // error: either adding to algolia, adding to Stream, or adding to OGqueue - continuing on for the time being. logger.error(err); cb(null, article); }); }); } }); }, err => { if (err) { logger.warn( `Scraping failed for ${job.data.url} with error ${err}`, ); done(err); } else { logger.info(`Completed scraping for ${job.data.url}`); done(); } }, ); }); }); });
uniqueness based on link and feed
api/src/workers/rss.js
uniqueness based on link and feed
<ide><path>pi/src/workers/rss.js <ide> 10, <ide> (post, cb) => { <ide> // lookup by url <del> Article.findOne({ url: normalize(post.url) }).then(article => { <add> Article.findOne({ url: normalize(post.url),rss: job.data.rss }).then(article => { <ide> if (article) { <ide> // article already exists <ide> cb(null, article);
Java
lgpl-2.1
115197e1e7a918cc5b72298d88544ed65d895dc0
0
levants/lightmare
package org.lightmare.scannotation; import java.io.BufferedInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.WeakHashMap; import javassist.bytecode.AnnotationsAttribute; import javassist.bytecode.ClassFile; import javassist.bytecode.annotation.Annotation; import org.apache.log4j.Logger; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.IOUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; import org.lightmare.utils.fs.codecs.ArchiveUtils; import org.scannotation.archiveiterator.Filter; import org.scannotation.archiveiterator.IteratorFactory; import org.scannotation.archiveiterator.StreamIterator; /** * Extension of {@link org.scannotation.AnnotationDB} for saving Map< * {@link String}, {@link URL}> of class name and {@link URL} for its archive * * @author levan * @since 0.0.18-SNAPSHOT */ public class AnnotationDB extends org.scannotation.AnnotationDB { /** * */ private static final long serialVersionUID = 1L; // To store which class in which URL is found protected Map<String, URL> classOwnersURLs = new WeakHashMap<String, URL>(); // To store which class in which File is found protected Map<String, String> classOwnersFiles = new WeakHashMap<String, String>(); // File separator and extension characters private static final char FILE_EXTEWNTION_SELIM = '.'; private static final char FILE_SEPARATOR_CHAR = '/'; // Log messages private static String SCANNING_STARTED_MESSAGE = "Started scanning for archives on @Stateless annotation"; private static String SCANNING_FINISHED_MESSAGE = "Finished scanning for archives on @Stateless annotation"; private static final String SCANNING_URL_MESSAGE = "Scanning URL "; private static final String FINISHED_URL_MESSAGE = "Finished URL scanning "; private static final Logger LOG = Logger.getLogger(AnnotationDB.class); /** * Filters java archive files * * @author levan * @since 0.0.84-SNAPSHOT */ protected class ArchiveFilter implements Filter { @Override public boolean accepts(String subFileName) { boolean valid; if (subFileName.endsWith(ArchiveUtils.CLASS_FILE_EXT)) { if (subFileName.startsWith(ArchiveUtils.FILE_SEPARATOR)) { subFileName = subFileName .substring(CollectionUtils.SECOND_INDEX); } String fileNameForCheck = subFileName.replace( FILE_SEPARATOR_CHAR, FILE_EXTEWNTION_SELIM); valid = !ignoreScan(fileNameForCheck); } else { valid = Boolean.FALSE; } return valid; } } /** * Gets file name from passed {@link URL} instance * @param url * @return {@link String} */ private String getFileName(URL url) { String fileName = url.getFile(); int lastIndex = fileName.lastIndexOf(ArchiveUtils.FILE_SEPARATOR); if (lastIndex > StringUtils.NOT_EXISTING_INDEX) { ++lastIndex; fileName = fileName.substring(lastIndex); } return fileName; } private boolean ignoreScan(String intf) { boolean valid = Boolean.FALSE; String value; String ignored; int length = ignoredPackages.length; for (int i = CollectionUtils.FIRST_INDEX; ObjectUtils.notTrue(valid) && i < length; i++) { ignored = ignoredPackages[i]; value = StringUtils.concat(ignored, FILE_EXTEWNTION_SELIM); if (intf.startsWith(value)) { valid = Boolean.TRUE; } } return valid; } protected void populate(Annotation[] annotations, String className, URL url) { if (ObjectUtils.notNull(annotations)) { Set<String> classAnnotations = classIndex.get(className); String fileName; for (Annotation ann : annotations) { Set<String> classes = annotationIndex.get(ann.getTypeName()); if (classes == null) { classes = new HashSet<String>(); annotationIndex.put(ann.getTypeName(), classes); } classes.add(className); if (!classOwnersURLs.containsKey(className)) { classOwnersURLs.put(className, url); } if (!classOwnersFiles.containsKey(className)) { fileName = getFileName(url); classOwnersFiles.put(className, fileName); } classAnnotations.add(ann.getTypeName()); } } } protected void scanClass(ClassFile cf, URL url) { String className = cf.getName(); AnnotationsAttribute visible = (AnnotationsAttribute) cf .getAttribute(AnnotationsAttribute.visibleTag); AnnotationsAttribute invisible = (AnnotationsAttribute) cf .getAttribute(AnnotationsAttribute.invisibleTag); if (ObjectUtils.notNull(visible)) { populate(visible.getAnnotations(), className, url); } if (ObjectUtils.notNull(invisible)) { populate(invisible.getAnnotations(), className, url); } } public void scanClass(InputStream bits, URL url) throws IOException { DataInputStream dstream = new DataInputStream(new BufferedInputStream( bits)); ClassFile cf = null; try { cf = new ClassFile(dstream); String classFileName = cf.getName(); classIndex.put(classFileName, new HashSet<String>()); if (scanClassAnnotations) { scanClass(cf, url); } if (scanMethodAnnotations || scanParameterAnnotations) { scanMethods(cf); } if (scanFieldAnnotations) { scanFields(cf); } // create an index of interfaces the class implements String[] interfaces = cf.getInterfaces(); if (ObjectUtils.notNull(interfaces)) { Set<String> intfs = new HashSet<String>(); for (String intf : interfaces) { intfs.add(intf); } implementsIndex.put(classFileName, intfs); } } finally { IOUtils.closeAll(dstream, bits); } } @Override public void scanArchives(URL... urls) throws IOException { LOG.info(SCANNING_STARTED_MESSAGE); for (URL url : urls) { Filter filter = new ArchiveFilter(); LOG.info(StringUtils.concat(SCANNING_URL_MESSAGE, url)); StreamIterator it = IteratorFactory.create(url, filter); InputStream stream = it.next(); while (ObjectUtils.notNull(stream)) { scanClass(stream, url); stream = it.next(); } LOG.info(StringUtils.concat(FINISHED_URL_MESSAGE, url)); } LOG.info(SCANNING_FINISHED_MESSAGE); } public Map<String, URL> getClassOwnersURLs() { return classOwnersURLs; } public Map<String, String> getClassOwnersFiles() { return classOwnersFiles; } }
src/main/java/org/lightmare/scannotation/AnnotationDB.java
package org.lightmare.scannotation; import java.io.BufferedInputStream; import java.io.DataInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.WeakHashMap; import javassist.bytecode.AnnotationsAttribute; import javassist.bytecode.ClassFile; import javassist.bytecode.annotation.Annotation; import org.apache.log4j.Logger; import org.lightmare.utils.CollectionUtils; import org.lightmare.utils.IOUtils; import org.lightmare.utils.ObjectUtils; import org.lightmare.utils.StringUtils; import org.lightmare.utils.fs.codecs.ArchiveUtils; import org.scannotation.archiveiterator.Filter; import org.scannotation.archiveiterator.IteratorFactory; import org.scannotation.archiveiterator.StreamIterator; /** * Extension of {@link org.scannotation.AnnotationDB} for saving Map< * {@link String}, {@link URL}> of class name and {@link URL} for its archive * * @author levan * @since 0.0.18-SNAPSHOT */ public class AnnotationDB extends org.scannotation.AnnotationDB { /** * */ private static final long serialVersionUID = 1L; // To store which class in which URL is found protected Map<String, URL> classOwnersURLs = new WeakHashMap<String, URL>(); // To store which class in which File is found protected Map<String, String> classOwnersFiles = new WeakHashMap<String, String>(); // File separator and extension characters private static final char FILE_EXTEWNTION_SELIM = '.'; private static final char FILE_SEPARATOR_CHAR = '/'; // Log messages private static String SCANNING_STARTED_MESSAGE = "Started scanning for archives on @Stateless annotation"; private static String SCANNING_FINISHED_MESSAGE = "Finished scanning for archives on @Stateless annotation"; private static final String SCANNING_URL_MESSAGE = "Scanning URL "; private static final String FINISHED_URL_MESSAGE = "Finished URL scanning "; private static final Logger LOG = Logger.getLogger(AnnotationDB.class); /** * Filters java archive files * * @author levan * @since 0.0.84-SNAPSHOT */ protected class ArchiveFilter implements Filter { @Override public boolean accepts(String subFileName) { boolean valid; if (subFileName.endsWith(ArchiveUtils.CLASS_FILE_EXT)) { if (subFileName.startsWith(ArchiveUtils.FILE_SEPARATOR)) { subFileName = subFileName .substring(CollectionUtils.SECOND_INDEX); } String fileNameForCheck = subFileName.replace( FILE_SEPARATOR_CHAR, FILE_EXTEWNTION_SELIM); valid = !ignoreScan(fileNameForCheck); } else { valid = Boolean.FALSE; } return valid; } } private String getFileName(URL url) { String fileName = url.getFile(); int lastIndex = fileName.lastIndexOf(ArchiveUtils.FILE_SEPARATOR); if (lastIndex > StringUtils.NOT_EXISTING_INDEX) { ++lastIndex; fileName = fileName.substring(lastIndex); } return fileName; } private boolean ignoreScan(String intf) { boolean valid = Boolean.FALSE; String value; String ignored; int length = ignoredPackages.length; for (int i = CollectionUtils.FIRST_INDEX; ObjectUtils.notTrue(valid) && i < length; i++) { ignored = ignoredPackages[i]; value = StringUtils.concat(ignored, FILE_EXTEWNTION_SELIM); if (intf.startsWith(value)) { valid = Boolean.TRUE; } } return valid; } protected void populate(Annotation[] annotations, String className, URL url) { if (ObjectUtils.notNull(annotations)) { Set<String> classAnnotations = classIndex.get(className); String fileName; for (Annotation ann : annotations) { Set<String> classes = annotationIndex.get(ann.getTypeName()); if (classes == null) { classes = new HashSet<String>(); annotationIndex.put(ann.getTypeName(), classes); } classes.add(className); if (!classOwnersURLs.containsKey(className)) { classOwnersURLs.put(className, url); } if (!classOwnersFiles.containsKey(className)) { fileName = getFileName(url); classOwnersFiles.put(className, fileName); } classAnnotations.add(ann.getTypeName()); } } } protected void scanClass(ClassFile cf, URL url) { String className = cf.getName(); AnnotationsAttribute visible = (AnnotationsAttribute) cf .getAttribute(AnnotationsAttribute.visibleTag); AnnotationsAttribute invisible = (AnnotationsAttribute) cf .getAttribute(AnnotationsAttribute.invisibleTag); if (ObjectUtils.notNull(visible)) { populate(visible.getAnnotations(), className, url); } if (ObjectUtils.notNull(invisible)) { populate(invisible.getAnnotations(), className, url); } } public void scanClass(InputStream bits, URL url) throws IOException { DataInputStream dstream = new DataInputStream(new BufferedInputStream( bits)); ClassFile cf = null; try { cf = new ClassFile(dstream); String classFileName = cf.getName(); classIndex.put(classFileName, new HashSet<String>()); if (scanClassAnnotations) { scanClass(cf, url); } if (scanMethodAnnotations || scanParameterAnnotations) { scanMethods(cf); } if (scanFieldAnnotations) { scanFields(cf); } // create an index of interfaces the class implements String[] interfaces = cf.getInterfaces(); if (ObjectUtils.notNull(interfaces)) { Set<String> intfs = new HashSet<String>(); for (String intf : interfaces) { intfs.add(intf); } implementsIndex.put(classFileName, intfs); } } finally { IOUtils.closeAll(dstream, bits); } } @Override public void scanArchives(URL... urls) throws IOException { LOG.info(SCANNING_STARTED_MESSAGE); for (URL url : urls) { Filter filter = new ArchiveFilter(); LOG.info(StringUtils.concat(SCANNING_URL_MESSAGE, url)); StreamIterator it = IteratorFactory.create(url, filter); InputStream stream = it.next(); while (ObjectUtils.notNull(stream)) { scanClass(stream, url); stream = it.next(); } LOG.info(StringUtils.concat(FINISHED_URL_MESSAGE, url)); } LOG.info(SCANNING_FINISHED_MESSAGE); } public Map<String, URL> getClassOwnersURLs() { return classOwnersURLs; } public Map<String, String> getClassOwnersFiles() { return classOwnersFiles; } }
improved code / comments at utility classes
src/main/java/org/lightmare/scannotation/AnnotationDB.java
improved code / comments at utility classes
<ide><path>rc/main/java/org/lightmare/scannotation/AnnotationDB.java <ide> } <ide> } <ide> <add> /** <add> * Gets file name from passed {@link URL} instance <add> * @param url <add> * @return {@link String} <add> */ <ide> private String getFileName(URL url) { <ide> <ide> String fileName = url.getFile();
Java
bsd-2-clause
7efb8ab48697420ec19bd2f5295e8ec0eb2c80d6
0
devinfrench/runelite,runelite/runelite,Sethtroll/runelite,Sethtroll/runelite,abelbriggs1/runelite,l2-/runelite,devinfrench/runelite,abelbriggs1/runelite,l2-/runelite,runelite/runelite,KronosDesign/runelite,abelbriggs1/runelite,Noremac201/runelite,Noremac201/runelite,runelite/runelite,KronosDesign/runelite
/* * Copyright (c) 2016-2017, Abel Briggs * Copyright (c) 2017, Kronos <https://github.com/KronosDesign> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.idlenotifier; import static net.runelite.api.AnimationID.COOKING_FIRE; import static net.runelite.api.AnimationID.COOKING_RANGE; import static net.runelite.api.AnimationID.CRAFTING_GLASSBLOWING; import static net.runelite.api.AnimationID.CRAFTING_SPINNING; import static net.runelite.api.AnimationID.FISHING_CAGE; import static net.runelite.api.AnimationID.FISHING_HARPOON; import static net.runelite.api.AnimationID.FISHING_KARAMBWAN; import static net.runelite.api.AnimationID.FISHING_NET; import static net.runelite.api.AnimationID.FISHING_POLE_CAST; import static net.runelite.api.AnimationID.FLETCHING_BOW_CUTTING; import static net.runelite.api.AnimationID.FLETCHING_STRING_MAGIC_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_MAGIC_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_MAPLE_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_MAPLE_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_NORMAL_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_NORMAL_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_OAK_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_OAK_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_WILLOW_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_WILLOW_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_YEW_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_YEW_SHORTBOW; import static net.runelite.api.AnimationID.GEM_CUTTING_DIAMOND; import static net.runelite.api.AnimationID.GEM_CUTTING_EMERALD; import static net.runelite.api.AnimationID.GEM_CUTTING_JADE; import static net.runelite.api.AnimationID.GEM_CUTTING_OPAL; import static net.runelite.api.AnimationID.GEM_CUTTING_REDTOPAZ; import static net.runelite.api.AnimationID.GEM_CUTTING_RUBY; import static net.runelite.api.AnimationID.GEM_CUTTING_SAPPHIRE; import static net.runelite.api.AnimationID.HERBLORE_MAKE_TAR; import static net.runelite.api.AnimationID.HERBLORE_POTIONMAKING; import static net.runelite.api.AnimationID.IDLE; import static net.runelite.api.AnimationID.MAGIC_CHARGING_ORBS; import static net.runelite.api.AnimationID.MINING_ADAMANT_PICKAXE; import static net.runelite.api.AnimationID.MINING_BLACK_PICKAXE; import static net.runelite.api.AnimationID.MINING_BRONZE_PICKAXE; import static net.runelite.api.AnimationID.MINING_DRAGON_PICKAXE; import static net.runelite.api.AnimationID.MINING_DRAGON_PICKAXE_ORN; import static net.runelite.api.AnimationID.MINING_INFERNAL_PICKAXE; import static net.runelite.api.AnimationID.MINING_IRON_PICKAXE; import static net.runelite.api.AnimationID.MINING_MITHRIL_PICKAXE; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_ADAMANT; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_BLACK; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_BRONZE; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_DRAGON; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_DRAGON_ORN; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_INFERNAL; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_IRON; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_MITHRIL; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_RUNE; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_STEEL; import static net.runelite.api.AnimationID.MINING_RUNE_PICKAXE; import static net.runelite.api.AnimationID.MINING_STEEL_PICKAXE; import static net.runelite.api.AnimationID.SMITHING_ANVIL; import static net.runelite.api.AnimationID.SMITHING_CANNONBALL; import static net.runelite.api.AnimationID.SMITHING_SMELTING; import static net.runelite.api.AnimationID.WOODCUTTING_ADAMANT; import static net.runelite.api.AnimationID.WOODCUTTING_BLACK; import static net.runelite.api.AnimationID.WOODCUTTING_BRONZE; import static net.runelite.api.AnimationID.WOODCUTTING_DRAGON; import static net.runelite.api.AnimationID.WOODCUTTING_INFERNAL; import static net.runelite.api.AnimationID.WOODCUTTING_IRON; import static net.runelite.api.AnimationID.WOODCUTTING_MITHRIL; import static net.runelite.api.AnimationID.WOODCUTTING_RUNE; import static net.runelite.api.AnimationID.WOODCUTTING_STEEL; import com.google.common.eventbus.Subscribe; import com.google.inject.Provides; import java.time.Duration; import java.time.Instant; import javax.inject.Inject; import net.runelite.api.Actor; import net.runelite.api.Client; import net.runelite.api.GameState; import net.runelite.api.Player; import net.runelite.api.Skill; import net.runelite.api.events.AnimationChanged; import net.runelite.api.events.GameStateChanged; import net.runelite.api.events.GameTick; import net.runelite.client.Notifier; import net.runelite.client.config.ConfigManager; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; import net.runelite.client.ui.ClientUI; @PluginDescriptor( name = "Idle notifier plugin" ) public class IdleNotifierPlugin extends Plugin { private static final int LOGOUT_WARNING_AFTER_TICKS = 14000; // 4 minutes and 40 seconds private static final Duration SIX_HOUR_LOGOUT_WARNING_AFTER_DURATION = Duration.ofMinutes(340); @Inject Notifier notifier; @Inject ClientUI gui; @Inject Client client; @Inject IdleNotifierConfig config; private Actor lastOpponent; private Instant lastAnimating; private Instant lastInteracting; private Instant lastHitpoints; private Instant lastPrayer; private boolean notifyIdle = false; private boolean notifyHitpoints = true; private boolean notifyPrayer = true; private boolean notifyIdleLogout = true; private boolean notify6HourLogout = true; private Instant sixHourWarningTime; private boolean ready; @Provides IdleNotifierConfig provideConfig(ConfigManager configManager) { return configManager.getConfig(IdleNotifierConfig.class); } @Subscribe public void onAnimationChanged(AnimationChanged event) { if (!config.isEnabled() || client.getGameState() != GameState.LOGGED_IN) { return; } Player localPlayer = client.getLocalPlayer(); if (localPlayer != event.getActor()) { return; } int animation = localPlayer.getAnimation(); switch (animation) { /* Woodcutting */ case WOODCUTTING_BRONZE: case WOODCUTTING_IRON: case WOODCUTTING_STEEL: case WOODCUTTING_BLACK: case WOODCUTTING_MITHRIL: case WOODCUTTING_ADAMANT: case WOODCUTTING_RUNE: case WOODCUTTING_DRAGON: case WOODCUTTING_INFERNAL: /* Cooking(Fire, Range) */ case COOKING_FIRE: case COOKING_RANGE: /* Crafting(Gem Cutting, Glassblowing, Spinning) */ case GEM_CUTTING_OPAL: case GEM_CUTTING_JADE: case GEM_CUTTING_REDTOPAZ: case GEM_CUTTING_SAPPHIRE: case GEM_CUTTING_EMERALD: case GEM_CUTTING_RUBY: case GEM_CUTTING_DIAMOND: case CRAFTING_GLASSBLOWING: case CRAFTING_SPINNING: /* Fletching(Cutting, Stringing) */ case FLETCHING_BOW_CUTTING: case FLETCHING_STRING_NORMAL_SHORTBOW: case FLETCHING_STRING_OAK_SHORTBOW: case FLETCHING_STRING_WILLOW_SHORTBOW: case FLETCHING_STRING_MAPLE_SHORTBOW: case FLETCHING_STRING_YEW_SHORTBOW: case FLETCHING_STRING_MAGIC_SHORTBOW: case FLETCHING_STRING_NORMAL_LONGBOW: case FLETCHING_STRING_OAK_LONGBOW: case FLETCHING_STRING_WILLOW_LONGBOW: case FLETCHING_STRING_MAPLE_LONGBOW: case FLETCHING_STRING_YEW_LONGBOW: case FLETCHING_STRING_MAGIC_LONGBOW: /* Smithing(Anvil, Furnace, Cannonballs */ case SMITHING_ANVIL: case SMITHING_SMELTING: case SMITHING_CANNONBALL: /* Fishing */ case FISHING_NET: case FISHING_HARPOON: case FISHING_CAGE: case FISHING_POLE_CAST: case FISHING_KARAMBWAN: /* Mining(Normal) */ case MINING_BRONZE_PICKAXE: case MINING_IRON_PICKAXE: case MINING_STEEL_PICKAXE: case MINING_BLACK_PICKAXE: case MINING_MITHRIL_PICKAXE: case MINING_ADAMANT_PICKAXE: case MINING_RUNE_PICKAXE: case MINING_DRAGON_PICKAXE: case MINING_DRAGON_PICKAXE_ORN: case MINING_INFERNAL_PICKAXE: /* Mining(Motherlode) */ case MINING_MOTHERLODE_BRONZE: case MINING_MOTHERLODE_IRON: case MINING_MOTHERLODE_STEEL: case MINING_MOTHERLODE_BLACK: case MINING_MOTHERLODE_MITHRIL: case MINING_MOTHERLODE_ADAMANT: case MINING_MOTHERLODE_RUNE: case MINING_MOTHERLODE_DRAGON: case MINING_MOTHERLODE_DRAGON_ORN: case MINING_MOTHERLODE_INFERNAL: /* Herblore */ case HERBLORE_POTIONMAKING: case HERBLORE_MAKE_TAR: /* Magic */ case MAGIC_CHARGING_ORBS: resetTimers(); notifyIdle = true; break; } } @Subscribe public void onGameStateChanged(GameStateChanged gameStateChanged) { lastInteracting = null; GameState state = gameStateChanged.getGameState(); switch (state) { case LOGGING_IN: case HOPPING: case CONNECTION_LOST: ready = true; break; case LOGGED_IN: if (ready) { sixHourWarningTime = Instant.now().plus(SIX_HOUR_LOGOUT_WARNING_AFTER_DURATION); ready = false; } break; } } @Subscribe public void onGameTick(GameTick event) { final Player local = client.getLocalPlayer(); final Duration waitDuration = Duration.ofMillis(config.getTimeout()); if (!config.isEnabled() || client.getGameState() != GameState.LOGGED_IN || local == null) { return; } if (checkIdleLogout()) { sendNotification("[" + local.getName() + "] is about to log out from idling too long!"); } if (check6hrLogout()) { sendNotification("[" + local.getName() + "] is about to log out from being online for 6 hours!"); } if (checkAnimationIdle(waitDuration, local)) { sendNotification("[" + local.getName() + "] is now idle!"); } if (checkOutOfCombat(waitDuration, local)) { sendNotification("[" + local.getName() + "] is now out of combat!"); } if (checkLowHitpoints(waitDuration)) { sendNotification("[" + local.getName() + "] has low hitpoints!"); } if (checkLowPrayer(waitDuration)) { sendNotification("[" + local.getName() + "] has low prayer!"); } } private boolean checkLowHitpoints(Duration waitDuration) { if (client.getRealSkillLevel(Skill.HITPOINTS) > config.getHitpointsThreshold()) { if (client.getBoostedSkillLevel(Skill.HITPOINTS) <= config.getHitpointsThreshold()) { if (!notifyHitpoints && Instant.now().compareTo(lastHitpoints.plus(waitDuration)) >= 0) { notifyHitpoints = true; return true; } } else { lastHitpoints = Instant.now(); notifyHitpoints = false; } } return false; } private boolean checkLowPrayer(Duration waitDuration) { if (client.getRealSkillLevel(Skill.PRAYER) > config.getPrayerThreshold()) { if (client.getBoostedSkillLevel(Skill.PRAYER) <= config.getPrayerThreshold()) { if (!notifyPrayer && Instant.now().compareTo(lastPrayer.plus(waitDuration)) >= 0) { notifyPrayer = true; return true; } } else { lastPrayer = Instant.now(); notifyPrayer = false; } } return false; } private boolean checkOutOfCombat(Duration waitDuration, Player local) { Actor opponent = local.getInteracting(); boolean isPlayer = opponent instanceof Player; if (opponent != null && !isPlayer && opponent.getCombatLevel() > 0 && opponent.getHealth() != -1) { resetTimers(); lastOpponent = opponent; } else if (opponent == null) { lastOpponent = null; } if (lastOpponent != null && opponent == lastOpponent) { lastInteracting = Instant.now(); } if (lastInteracting != null && Instant.now().compareTo(lastInteracting.plus(waitDuration)) >= 0) { lastInteracting = null; return true; } return false; } private boolean checkIdleLogout() { if (client.getMouseIdleTicks() > LOGOUT_WARNING_AFTER_TICKS && client.getKeyboardIdleTicks() > LOGOUT_WARNING_AFTER_TICKS) { if (notifyIdleLogout) { notifyIdleLogout = false; return true; } } else { notifyIdleLogout = true; } return false; } private boolean check6hrLogout() { if (Instant.now().compareTo(sixHourWarningTime) >= 0) { if (notify6HourLogout) { notify6HourLogout = false; return true; } } else { notify6HourLogout = true; } return false; } private boolean checkAnimationIdle(Duration waitDuration, Player local) { if (notifyIdle) { if (lastAnimating != null) { if (Instant.now().compareTo(lastAnimating.plus(waitDuration)) >= 0) { notifyIdle = false; lastAnimating = null; return true; } } else if (local.getAnimation() == IDLE) { lastAnimating = Instant.now(); } } return false; } private void sendNotification(String message) { if (!config.alertWhenFocused() && gui.isFocused()) { return; } if (config.requestFocus()) { gui.requestFocus(); } if (config.sendTrayNotification()) { notifier.notify(message); } } private void resetTimers() { // Reset animation idle timer notifyIdle = false; lastAnimating = null; // Reset combat idle timer lastOpponent = null; lastInteracting = null; } }
runelite-client/src/main/java/net/runelite/client/plugins/idlenotifier/IdleNotifierPlugin.java
/* * Copyright (c) 2016-2017, Abel Briggs * Copyright (c) 2017, Kronos <https://github.com/KronosDesign> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.idlenotifier; import static net.runelite.api.AnimationID.COOKING_FIRE; import static net.runelite.api.AnimationID.COOKING_RANGE; import static net.runelite.api.AnimationID.CRAFTING_GLASSBLOWING; import static net.runelite.api.AnimationID.CRAFTING_SPINNING; import static net.runelite.api.AnimationID.FISHING_CAGE; import static net.runelite.api.AnimationID.FISHING_HARPOON; import static net.runelite.api.AnimationID.FISHING_KARAMBWAN; import static net.runelite.api.AnimationID.FISHING_NET; import static net.runelite.api.AnimationID.FISHING_POLE_CAST; import static net.runelite.api.AnimationID.FLETCHING_BOW_CUTTING; import static net.runelite.api.AnimationID.FLETCHING_STRING_MAGIC_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_MAGIC_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_MAPLE_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_MAPLE_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_NORMAL_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_NORMAL_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_OAK_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_OAK_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_WILLOW_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_WILLOW_SHORTBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_YEW_LONGBOW; import static net.runelite.api.AnimationID.FLETCHING_STRING_YEW_SHORTBOW; import static net.runelite.api.AnimationID.GEM_CUTTING_DIAMOND; import static net.runelite.api.AnimationID.GEM_CUTTING_EMERALD; import static net.runelite.api.AnimationID.GEM_CUTTING_JADE; import static net.runelite.api.AnimationID.GEM_CUTTING_OPAL; import static net.runelite.api.AnimationID.GEM_CUTTING_REDTOPAZ; import static net.runelite.api.AnimationID.GEM_CUTTING_RUBY; import static net.runelite.api.AnimationID.GEM_CUTTING_SAPPHIRE; import static net.runelite.api.AnimationID.HERBLORE_MAKE_TAR; import static net.runelite.api.AnimationID.HERBLORE_POTIONMAKING; import static net.runelite.api.AnimationID.IDLE; import static net.runelite.api.AnimationID.MAGIC_CHARGING_ORBS; import static net.runelite.api.AnimationID.MINING_ADAMANT_PICKAXE; import static net.runelite.api.AnimationID.MINING_BLACK_PICKAXE; import static net.runelite.api.AnimationID.MINING_BRONZE_PICKAXE; import static net.runelite.api.AnimationID.MINING_DRAGON_PICKAXE; import static net.runelite.api.AnimationID.MINING_DRAGON_PICKAXE_ORN; import static net.runelite.api.AnimationID.MINING_INFERNAL_PICKAXE; import static net.runelite.api.AnimationID.MINING_IRON_PICKAXE; import static net.runelite.api.AnimationID.MINING_MITHRIL_PICKAXE; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_ADAMANT; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_BLACK; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_BRONZE; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_DRAGON; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_DRAGON_ORN; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_INFERNAL; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_IRON; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_MITHRIL; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_RUNE; import static net.runelite.api.AnimationID.MINING_MOTHERLODE_STEEL; import static net.runelite.api.AnimationID.MINING_RUNE_PICKAXE; import static net.runelite.api.AnimationID.MINING_STEEL_PICKAXE; import static net.runelite.api.AnimationID.SMITHING_ANVIL; import static net.runelite.api.AnimationID.SMITHING_CANNONBALL; import static net.runelite.api.AnimationID.SMITHING_SMELTING; import static net.runelite.api.AnimationID.WOODCUTTING_ADAMANT; import static net.runelite.api.AnimationID.WOODCUTTING_BLACK; import static net.runelite.api.AnimationID.WOODCUTTING_BRONZE; import static net.runelite.api.AnimationID.WOODCUTTING_DRAGON; import static net.runelite.api.AnimationID.WOODCUTTING_INFERNAL; import static net.runelite.api.AnimationID.WOODCUTTING_IRON; import static net.runelite.api.AnimationID.WOODCUTTING_MITHRIL; import static net.runelite.api.AnimationID.WOODCUTTING_RUNE; import static net.runelite.api.AnimationID.WOODCUTTING_STEEL; import com.google.common.eventbus.Subscribe; import com.google.inject.Provides; import java.time.Duration; import java.time.Instant; import javax.inject.Inject; import net.runelite.api.Actor; import net.runelite.api.Client; import net.runelite.api.GameState; import net.runelite.api.Player; import net.runelite.api.Skill; import net.runelite.api.events.AnimationChanged; import net.runelite.api.events.GameStateChanged; import net.runelite.api.events.GameTick; import net.runelite.client.Notifier; import net.runelite.client.config.ConfigManager; import net.runelite.client.plugins.Plugin; import net.runelite.client.plugins.PluginDescriptor; import net.runelite.client.ui.ClientUI; @PluginDescriptor( name = "Idle notifier plugin" ) public class IdleNotifierPlugin extends Plugin { private static final int LOGOUT_WARNING_AFTER_TICKS = 14000; // 4 minutes and 40 seconds private static final Duration SIX_HOUR_LOGOUT_WARNING_AFTER_DURATION = Duration.ofMinutes(340); @Inject Notifier notifier; @Inject ClientUI gui; @Inject Client client; @Inject IdleNotifierConfig config; private Actor lastOpponent; private Instant lastAnimating; private Instant lastInteracting; private Instant lastHitpoints; private Instant lastPrayer; private boolean notifyIdle = false; private boolean notifyHitpoints = true; private boolean notifyPrayer = true; private boolean notifyIdleLogout = true; private boolean notify6HourLogout = true; private Instant sixHourWarningTime; private boolean ready; @Provides IdleNotifierConfig provideConfig(ConfigManager configManager) { return configManager.getConfig(IdleNotifierConfig.class); } @Subscribe public void onAnimationChanged(AnimationChanged event) { if (!config.isEnabled() || client.getGameState() != GameState.LOGGED_IN) { return; } Player localPlayer = client.getLocalPlayer(); if (localPlayer != event.getActor()) { return; } int animation = localPlayer.getAnimation(); switch (animation) { /* Woodcutting */ case WOODCUTTING_BRONZE: case WOODCUTTING_IRON: case WOODCUTTING_STEEL: case WOODCUTTING_BLACK: case WOODCUTTING_MITHRIL: case WOODCUTTING_ADAMANT: case WOODCUTTING_RUNE: case WOODCUTTING_DRAGON: case WOODCUTTING_INFERNAL: /* Cooking(Fire, Range) */ case COOKING_FIRE: case COOKING_RANGE: /* Crafting(Gem Cutting, Glassblowing, Spinning) */ case GEM_CUTTING_OPAL: case GEM_CUTTING_JADE: case GEM_CUTTING_REDTOPAZ: case GEM_CUTTING_SAPPHIRE: case GEM_CUTTING_EMERALD: case GEM_CUTTING_RUBY: case GEM_CUTTING_DIAMOND: case CRAFTING_GLASSBLOWING: case CRAFTING_SPINNING: /* Fletching(Cutting, Stringing) */ case FLETCHING_BOW_CUTTING: case FLETCHING_STRING_NORMAL_SHORTBOW: case FLETCHING_STRING_OAK_SHORTBOW: case FLETCHING_STRING_WILLOW_SHORTBOW: case FLETCHING_STRING_MAPLE_SHORTBOW: case FLETCHING_STRING_YEW_SHORTBOW: case FLETCHING_STRING_MAGIC_SHORTBOW: case FLETCHING_STRING_NORMAL_LONGBOW: case FLETCHING_STRING_OAK_LONGBOW: case FLETCHING_STRING_WILLOW_LONGBOW: case FLETCHING_STRING_MAPLE_LONGBOW: case FLETCHING_STRING_YEW_LONGBOW: case FLETCHING_STRING_MAGIC_LONGBOW: /* Smithing(Anvil, Furnace, Cannonballs */ case SMITHING_ANVIL: case SMITHING_SMELTING: case SMITHING_CANNONBALL: /* Fishing */ case FISHING_NET: case FISHING_HARPOON: case FISHING_CAGE: case FISHING_POLE_CAST: case FISHING_KARAMBWAN: /* Mining(Normal) */ case MINING_BRONZE_PICKAXE: case MINING_IRON_PICKAXE: case MINING_STEEL_PICKAXE: case MINING_BLACK_PICKAXE: case MINING_MITHRIL_PICKAXE: case MINING_ADAMANT_PICKAXE: case MINING_RUNE_PICKAXE: case MINING_DRAGON_PICKAXE: case MINING_DRAGON_PICKAXE_ORN: case MINING_INFERNAL_PICKAXE: /* Mining(Motherlode) */ case MINING_MOTHERLODE_BRONZE: case MINING_MOTHERLODE_IRON: case MINING_MOTHERLODE_STEEL: case MINING_MOTHERLODE_BLACK: case MINING_MOTHERLODE_MITHRIL: case MINING_MOTHERLODE_ADAMANT: case MINING_MOTHERLODE_RUNE: case MINING_MOTHERLODE_DRAGON: case MINING_MOTHERLODE_DRAGON_ORN: case MINING_MOTHERLODE_INFERNAL: /* Herblore */ case HERBLORE_POTIONMAKING: case HERBLORE_MAKE_TAR: /* Magic */ case MAGIC_CHARGING_ORBS: notifyIdle = true; lastAnimating = null; break; } } @Subscribe public void onGameStateChanged(GameStateChanged gameStateChanged) { lastInteracting = null; GameState state = gameStateChanged.getGameState(); switch (state) { case LOGGING_IN: case HOPPING: case CONNECTION_LOST: ready = true; break; case LOGGED_IN: if (ready) { sixHourWarningTime = Instant.now().plus(SIX_HOUR_LOGOUT_WARNING_AFTER_DURATION); ready = false; } break; } } @Subscribe public void onGameTick(GameTick event) { final Player local = client.getLocalPlayer(); final Duration waitDuration = Duration.ofMillis(config.getTimeout()); if (!config.isEnabled() || client.getGameState() != GameState.LOGGED_IN || local == null) { return; } if (checkIdleLogout()) { sendNotification("[" + local.getName() + "] is about to log out from idling too long!"); } if (check6hrLogout()) { sendNotification("[" + local.getName() + "] is about to log out from being online for 6 hours!"); } if (checkAnimationIdle(waitDuration, local)) { sendNotification("[" + local.getName() + "] is now idle!"); } if (checkOutOfCombat(waitDuration, local)) { sendNotification("[" + local.getName() + "] is now out of combat!"); } if (checkLowHitpoints(waitDuration)) { sendNotification("[" + local.getName() + "] has low hitpoints!"); } if (checkLowPrayer(waitDuration)) { sendNotification("[" + local.getName() + "] has low prayer!"); } } private boolean checkLowHitpoints(Duration waitDuration) { if (client.getRealSkillLevel(Skill.HITPOINTS) > config.getHitpointsThreshold()) { if (client.getBoostedSkillLevel(Skill.HITPOINTS) <= config.getHitpointsThreshold()) { if (!notifyHitpoints && Instant.now().compareTo(lastHitpoints.plus(waitDuration)) >= 0) { notifyHitpoints = true; return true; } } else { lastHitpoints = Instant.now(); notifyHitpoints = false; } } return false; } private boolean checkLowPrayer(Duration waitDuration) { if (client.getRealSkillLevel(Skill.PRAYER) > config.getPrayerThreshold()) { if (client.getBoostedSkillLevel(Skill.PRAYER) <= config.getPrayerThreshold()) { if (!notifyPrayer && Instant.now().compareTo(lastPrayer.plus(waitDuration)) >= 0) { notifyPrayer = true; return true; } } else { lastPrayer = Instant.now(); notifyPrayer = false; } } return false; } private boolean checkOutOfCombat(Duration waitDuration, Player local) { Actor opponent = local.getInteracting(); boolean isPlayer = opponent instanceof Player; if (opponent != null && !isPlayer && opponent.getCombatLevel() > 0 && opponent.getHealth() != -1) { lastOpponent = opponent; } else if (opponent == null) { lastOpponent = null; } if (lastOpponent != null && opponent == lastOpponent) { lastInteracting = Instant.now(); } if (lastInteracting != null && Instant.now().compareTo(lastInteracting.plus(waitDuration)) >= 0) { lastInteracting = null; return true; } return false; } private boolean checkIdleLogout() { if (client.getMouseIdleTicks() > LOGOUT_WARNING_AFTER_TICKS && client.getKeyboardIdleTicks() > LOGOUT_WARNING_AFTER_TICKS) { if (notifyIdleLogout) { notifyIdleLogout = false; return true; } } else { notifyIdleLogout = true; } return false; } private boolean check6hrLogout() { if (Instant.now().compareTo(sixHourWarningTime) >= 0) { if (notify6HourLogout) { notify6HourLogout = false; return true; } } else { notify6HourLogout = true; } return false; } private boolean checkAnimationIdle(Duration waitDuration, Player local) { if (notifyIdle) { if (lastAnimating != null) { if (Instant.now().compareTo(lastAnimating.plus(waitDuration)) >= 0) { notifyIdle = false; lastAnimating = null; return true; } } else if (local.getAnimation() == IDLE) { lastAnimating = Instant.now(); } } return false; } private void sendNotification(String message) { if (!config.alertWhenFocused() && gui.isFocused()) { return; } if (config.requestFocus()) { gui.requestFocus(); } if (config.sendTrayNotification()) { notifier.notify(message); } } }
Fix idle animation plugin timer cancelling - Cancel animation idle timer when combat begins - Cancel combat idle timer when animation begins Signed-off-by: Tomas Slusny <[email protected]>
runelite-client/src/main/java/net/runelite/client/plugins/idlenotifier/IdleNotifierPlugin.java
Fix idle animation plugin timer cancelling
<ide><path>unelite-client/src/main/java/net/runelite/client/plugins/idlenotifier/IdleNotifierPlugin.java <ide> case HERBLORE_MAKE_TAR: <ide> /* Magic */ <ide> case MAGIC_CHARGING_ORBS: <add> resetTimers(); <ide> notifyIdle = true; <del> lastAnimating = null; <ide> break; <ide> } <ide> } <ide> && opponent.getCombatLevel() > 0 <ide> && opponent.getHealth() != -1) <ide> { <add> resetTimers(); <ide> lastOpponent = opponent; <ide> } <ide> else if (opponent == null) <ide> notifier.notify(message); <ide> } <ide> } <add> <add> private void resetTimers() <add> { <add> // Reset animation idle timer <add> notifyIdle = false; <add> lastAnimating = null; <add> <add> // Reset combat idle timer <add> lastOpponent = null; <add> lastInteracting = null; <add> } <ide> }
Java
apache-2.0
8ddc6d913a24180d604b798e68493716db45240f
0
google/data-transfer-project,google/data-transfer-project,google/data-transfer-project,google/data-transfer-project,google/data-transfer-project
/* * Copyright 2018 The Data Transfer Project Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.datatransferproject.datatransfer.flickr.photos; import com.flickr4java.flickr.Flickr; import com.flickr4java.flickr.FlickrException; import com.flickr4java.flickr.REST; import com.flickr4java.flickr.RequestContext; import com.flickr4java.flickr.auth.Auth; import com.flickr4java.flickr.photos.Photo; import com.flickr4java.flickr.photos.PhotoList; import com.flickr4java.flickr.photos.PhotosInterface; import com.flickr4java.flickr.photosets.Photoset; import com.flickr4java.flickr.photosets.Photosets; import com.flickr4java.flickr.photosets.PhotosetsInterface; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.RateLimiter; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; import org.datatransferproject.spi.transfer.provider.ExportResult; import org.datatransferproject.spi.transfer.provider.ExportResult.ResultType; import org.datatransferproject.spi.transfer.provider.Exporter; import org.datatransferproject.spi.transfer.types.ContinuationData; import org.datatransferproject.types.common.ExportInformation; import org.datatransferproject.types.common.IntPaginationToken; import org.datatransferproject.types.common.PaginationData; import org.datatransferproject.types.common.models.IdOnlyContainerResource; import org.datatransferproject.types.common.models.photos.PhotoAlbum; import org.datatransferproject.types.common.models.photos.PhotoModel; import org.datatransferproject.types.common.models.photos.PhotosContainerResource; import org.datatransferproject.types.transfer.auth.AppCredentials; import org.datatransferproject.types.transfer.auth.AuthData; import org.datatransferproject.types.transfer.serviceconfig.TransferServiceConfig; public class FlickrPhotosExporter implements Exporter<AuthData, PhotosContainerResource> { private static final int PHOTO_PER_PAGE = 50; private static final List<String> EXTRAS = ImmutableList.of("url_o", "o_dims", "original_format"); private static final int PHOTO_SETS_PER_PAGE = 500; private static final String PHOTOSET_EXTRAS = ""; private final PhotosetsInterface photosetsInterface; private final PhotosInterface photosInterface; private final Flickr flickr; private final RateLimiter perUserRateLimiter; public FlickrPhotosExporter(AppCredentials appCredentials, TransferServiceConfig serviceConfig) { this.flickr = new Flickr(appCredentials.getKey(), appCredentials.getSecret(), new REST()); this.photosetsInterface = flickr.getPhotosetsInterface(); this.photosInterface = flickr.getPhotosInterface(); this.perUserRateLimiter = serviceConfig.getPerUserRateLimiter(); } @VisibleForTesting FlickrPhotosExporter(Flickr flickr, TransferServiceConfig serviceConfig) { this.flickr = flickr; this.photosInterface = flickr.getPhotosInterface(); this.photosetsInterface = flickr.getPhotosetsInterface(); this.perUserRateLimiter = serviceConfig.getPerUserRateLimiter(); } @VisibleForTesting static PhotoModel toCommonPhoto(Photo p, String albumId) { Preconditions.checkArgument( !Strings.isNullOrEmpty(p.getOriginalSize().getSource()), "Photo [" + p.getId() + "] has a null authUrl"); return new PhotoModel( p.getTitle(), p.getOriginalSize().getSource(), p.getDescription(), toMimeType(p.getOriginalFormat()), p.getId(), albumId, false); } @VisibleForTesting static String toMimeType(String flickrFormat) { switch (flickrFormat) { case "jpg": case "jpeg": return "image/jpeg"; case "png": return "image/png"; case "gif": return "image/gif"; default: throw new IllegalArgumentException("Don't know how to map: " + flickrFormat); } } @Override public ExportResult<PhotosContainerResource> export( UUID jobId, AuthData authData, Optional<ExportInformation> exportInformation) { Auth auth; try { auth = FlickrUtils.getAuth(authData, flickr); } catch (FlickrException e) { return new ExportResult<>(e); } RequestContext.getRequestContext().setAuth(auth); PaginationData paginationData = exportInformation.isPresent() ? exportInformation.get().getPaginationData() : null; IdOnlyContainerResource resource = exportInformation.isPresent() ? (IdOnlyContainerResource) exportInformation.get().getContainerResource() : null; if (resource != null) { return getPhotos(resource, paginationData); } else { return getAlbums(paginationData, auth); } } private ExportResult<PhotosContainerResource> getPhotos( IdOnlyContainerResource resource, PaginationData paginationData) { String photoSetId = resource.getId(); int page = paginationData == null ? 1 : ((IntPaginationToken) paginationData).getStart(); PhotoList<Photo> photoSetList; try { if (Strings.isNullOrEmpty(photoSetId)) { RequestContext.getRequestContext().setExtras(EXTRAS); perUserRateLimiter.acquire(); photoSetList = photosInterface.getNotInSet(PHOTO_PER_PAGE, page); RequestContext.getRequestContext().setExtras(ImmutableList.of()); } else { perUserRateLimiter.acquire(); photoSetList = photosetsInterface.getPhotos( photoSetId, ImmutableSet.copyOf(EXTRAS), 0, PHOTO_PER_PAGE, page); } } catch (FlickrException e) { return new ExportResult<>(e); } boolean hasMore = photoSetList.getPage() != photoSetList.getPages() && !photoSetList.isEmpty(); Collection<PhotoModel> photos = photoSetList.stream().map(p -> toCommonPhoto(p, photoSetId)).collect(Collectors.toList()); PaginationData newPage = null; if (hasMore) { newPage = new IntPaginationToken(page + 1); } // Get result type ResultType resultType = ResultType.CONTINUE; if (newPage == null) { resultType = ResultType.END; } PhotosContainerResource photosContainerResource = new PhotosContainerResource(null, photos); return new ExportResult<>(resultType, photosContainerResource, new ContinuationData(newPage)); } private ExportResult<PhotosContainerResource> getAlbums( PaginationData paginationData, Auth auth) { ImmutableList.Builder<PhotoAlbum> albumBuilder = ImmutableList.builder(); List<IdOnlyContainerResource> subResources = new ArrayList<>(); int page = paginationData == null ? 1 : ((IntPaginationToken) paginationData).getStart(); Photosets photoSetList; try { perUserRateLimiter.acquire(); photoSetList = photosetsInterface.getList( auth.getUser().getId(), PHOTO_SETS_PER_PAGE, page, PHOTOSET_EXTRAS); } catch (FlickrException e) { return new ExportResult<>(e); } for (Photoset photoSet : photoSetList.getPhotosets()) { // Saving data to the album allows the target service to recreate the album structure albumBuilder.add( new PhotoAlbum(photoSet.getId(), photoSet.getTitle(), photoSet.getDescription())); // Adding subresources tells the framework to recall export to get all the photos subResources.add(new IdOnlyContainerResource(photoSet.getId())); } PaginationData newPage = null; boolean hasMore = photoSetList.getPage() != photoSetList.getPages() && !photoSetList.getPhotosets().isEmpty(); if (hasMore) { newPage = new IntPaginationToken(page + 1); } else { // No more albums to get, add a resource for albumless items subResources.add(new IdOnlyContainerResource("")); } PhotosContainerResource photosContainerResource = new PhotosContainerResource(albumBuilder.build(), null); ContinuationData continuationData = new ContinuationData(newPage); subResources.forEach(resource -> continuationData.addContainerResource(resource)); // Get result type ResultType resultType = ResultType.CONTINUE; if (newPage == null) { resultType = ResultType.END; } return new ExportResult<>(resultType, photosContainerResource, continuationData); } }
extensions/data-transfer/portability-data-transfer-flickr/src/main/java/org/datatransferproject/datatransfer/flickr/photos/FlickrPhotosExporter.java
/* * Copyright 2018 The Data Transfer Project Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.datatransferproject.datatransfer.flickr.photos; import com.flickr4java.flickr.Flickr; import com.flickr4java.flickr.FlickrException; import com.flickr4java.flickr.REST; import com.flickr4java.flickr.RequestContext; import com.flickr4java.flickr.auth.Auth; import com.flickr4java.flickr.photos.Photo; import com.flickr4java.flickr.photos.PhotoList; import com.flickr4java.flickr.photos.PhotosInterface; import com.flickr4java.flickr.photosets.Photoset; import com.flickr4java.flickr.photosets.Photosets; import com.flickr4java.flickr.photosets.PhotosetsInterface; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.util.concurrent.RateLimiter; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; import org.datatransferproject.spi.transfer.provider.ExportResult; import org.datatransferproject.spi.transfer.provider.ExportResult.ResultType; import org.datatransferproject.spi.transfer.provider.Exporter; import org.datatransferproject.spi.transfer.types.ContinuationData; import org.datatransferproject.types.common.ExportInformation; import org.datatransferproject.types.common.IntPaginationToken; import org.datatransferproject.types.common.PaginationData; import org.datatransferproject.types.common.models.IdOnlyContainerResource; import org.datatransferproject.types.common.models.photos.PhotoAlbum; import org.datatransferproject.types.common.models.photos.PhotoModel; import org.datatransferproject.types.common.models.photos.PhotosContainerResource; import org.datatransferproject.types.transfer.auth.AppCredentials; import org.datatransferproject.types.transfer.auth.AuthData; import org.datatransferproject.types.transfer.serviceconfig.TransferServiceConfig; public class FlickrPhotosExporter implements Exporter<AuthData, PhotosContainerResource> { private static final int PHOTO_PER_PAGE = 50; private static final List<String> EXTRAS = ImmutableList.of("url_o", "o_dims", "original_format"); private static final int PHOTO_SETS_PER_PAGE = 500; private static final String PHOTOSET_EXTRAS = ""; private final PhotosetsInterface photosetsInterface; private final PhotosInterface photosInterface; private final Flickr flickr; private final RateLimiter perUserRateLimiter; public FlickrPhotosExporter(AppCredentials appCredentials, TransferServiceConfig serviceConfig) { this.flickr = new Flickr(appCredentials.getKey(), appCredentials.getSecret(), new REST()); this.photosetsInterface = flickr.getPhotosetsInterface(); this.photosInterface = flickr.getPhotosInterface(); this.perUserRateLimiter = serviceConfig.getPerUserRateLimiter(); } @VisibleForTesting FlickrPhotosExporter(Flickr flickr, TransferServiceConfig serviceConfig) { this.flickr = flickr; this.photosInterface = flickr.getPhotosInterface(); this.photosetsInterface = flickr.getPhotosetsInterface(); this.perUserRateLimiter = serviceConfig.getPerUserRateLimiter(); } @VisibleForTesting static PhotoModel toCommonPhoto(Photo p, String albumId) { Preconditions.checkArgument( !Strings.isNullOrEmpty(p.getOriginalSize().getSource()), "Photo [" + p.getId() + "] has a null authUrl"); return new PhotoModel( p.getTitle(), p.getOriginalSize().getSource(), p.getDescription(), toMimeType(p.getOriginalFormat()), p.getId(), albumId, false); } @VisibleForTesting static String toMimeType(String flickrFormat) { switch (flickrFormat) { case "jpg": case "jpeg": return "image/jpeg"; case "png": return "image/png"; case "gif": return "image/gif"; default: throw new IllegalArgumentException("Don't know how to map: " + flickrFormat); } } @Override public ExportResult<PhotosContainerResource> export( UUID jobId, AuthData authData, Optional<ExportInformation> exportInformation) { Auth auth; try { auth = FlickrUtils.getAuth(authData, flickr); } catch (FlickrException e) { return new ExportResult<>(e); } RequestContext.getRequestContext().setAuth(auth); PaginationData paginationData = exportInformation.isPresent() ? exportInformation.get().getPaginationData() : null; IdOnlyContainerResource resource = exportInformation.isPresent() ? (IdOnlyContainerResource) exportInformation.get().getContainerResource() : null; if (resource != null) { return getPhotos(resource, paginationData); } else { return getAlbums(paginationData, auth); } } private ExportResult<PhotosContainerResource> getPhotos( IdOnlyContainerResource resource, PaginationData paginationData) { String photoSetId = resource.getId(); int page = paginationData == null ? 1 : ((IntPaginationToken) paginationData).getStart(); PhotoList<Photo> photoSetList; try { if (photoSetId == null) { RequestContext.getRequestContext().setExtras(EXTRAS); perUserRateLimiter.acquire(); photoSetList = photosInterface.getNotInSet(PHOTO_PER_PAGE, page); RequestContext.getRequestContext().setExtras(ImmutableList.of()); } else { perUserRateLimiter.acquire(); photoSetList = photosetsInterface.getPhotos( photoSetId, ImmutableSet.copyOf(EXTRAS), 0, PHOTO_PER_PAGE, page); } } catch (FlickrException e) { return new ExportResult<>(e); } boolean hasMore = photoSetList.getPage() != photoSetList.getPages() && !photoSetList.isEmpty(); Collection<PhotoModel> photos = photoSetList.stream().map(p -> toCommonPhoto(p, photoSetId)).collect(Collectors.toList()); PaginationData newPage = null; if (hasMore) { newPage = new IntPaginationToken(page + 1); } // Get result type ResultType resultType = ResultType.CONTINUE; if (newPage == null) { resultType = ResultType.END; } PhotosContainerResource photosContainerResource = new PhotosContainerResource(null, photos); return new ExportResult<>(resultType, photosContainerResource, new ContinuationData(newPage)); } private ExportResult<PhotosContainerResource> getAlbums( PaginationData paginationData, Auth auth) { ImmutableList.Builder<PhotoAlbum> albumBuilder = ImmutableList.builder(); List<IdOnlyContainerResource> subResources = new ArrayList<>(); int page = paginationData == null ? 1 : ((IntPaginationToken) paginationData).getStart(); Photosets photoSetList; try { perUserRateLimiter.acquire(); photoSetList = photosetsInterface.getList( auth.getUser().getId(), PHOTO_SETS_PER_PAGE, page, PHOTOSET_EXTRAS); } catch (FlickrException e) { return new ExportResult<>(e); } for (Photoset photoSet : photoSetList.getPhotosets()) { // Saving data to the album allows the target service to recreate the album structure albumBuilder.add( new PhotoAlbum(photoSet.getId(), photoSet.getTitle(), photoSet.getDescription())); // Adding subresources tells the framework to recall export to get all the photos subResources.add(new IdOnlyContainerResource(photoSet.getId())); } PaginationData newPage = null; boolean hasMore = photoSetList.getPage() != photoSetList.getPages() && !photoSetList.getPhotosets().isEmpty(); if (hasMore) { newPage = new IntPaginationToken(page + 1); } PhotosContainerResource photosContainerResource = new PhotosContainerResource(albumBuilder.build(), null); ContinuationData continuationData = new ContinuationData(newPage); subResources.forEach(resource -> continuationData.addContainerResource(resource)); // Get result type ResultType resultType = ResultType.CONTINUE; if (newPage == null) { resultType = ResultType.END; } return new ExportResult<>(resultType, photosContainerResource, continuationData); } }
Export albumless photos in the flickr exporter (#908)
extensions/data-transfer/portability-data-transfer-flickr/src/main/java/org/datatransferproject/datatransfer/flickr/photos/FlickrPhotosExporter.java
Export albumless photos in the flickr exporter (#908)
<ide><path>xtensions/data-transfer/portability-data-transfer-flickr/src/main/java/org/datatransferproject/datatransfer/flickr/photos/FlickrPhotosExporter.java <ide> PhotoList<Photo> photoSetList; <ide> <ide> try { <del> if (photoSetId == null) { <add> if (Strings.isNullOrEmpty(photoSetId)) { <ide> RequestContext.getRequestContext().setExtras(EXTRAS); <ide> perUserRateLimiter.acquire(); <ide> photoSetList = photosInterface.getNotInSet(PHOTO_PER_PAGE, page); <ide> photoSetList.getPage() != photoSetList.getPages() && !photoSetList.getPhotosets().isEmpty(); <ide> if (hasMore) { <ide> newPage = new IntPaginationToken(page + 1); <add> } else { <add> // No more albums to get, add a resource for albumless items <add> subResources.add(new IdOnlyContainerResource("")); <ide> } <ide> <ide> PhotosContainerResource photosContainerResource =
Java
agpl-3.0
323a71d12e12810376d99740cdc15078c7cd248e
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
966ae476-2e61-11e5-9284-b827eb9e62be
hello.java
9664ffde-2e61-11e5-9284-b827eb9e62be
966ae476-2e61-11e5-9284-b827eb9e62be
hello.java
966ae476-2e61-11e5-9284-b827eb9e62be
<ide><path>ello.java <del>9664ffde-2e61-11e5-9284-b827eb9e62be <add>966ae476-2e61-11e5-9284-b827eb9e62be
Java
mit
39156dc46a742483fb191fa8e0343c323348f68f
0
JOML-CI/JOML,FortressBuilder/JOML,roquendm/JOML,JOML-CI/JOML,roquendm/JOML,JOML-CI/JOML,FortressBuilder/JOML
/* * (C) Copyright 2015 Richard Greenlees * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software * and associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * 1) The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * */ package com.joml; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; import java.nio.FloatBuffer; import java.text.DecimalFormat; /** * Contains the definition of a 4x4 Matrix of floats, and associated functions to transform * it. The matrix is column-major to match OpenGL's interpretation, and it looks like this: * <p> * m00 m10 m20 m30</br> * m01 m11 m21 m31</br> * m02 m12 m22 m32</br> * m03 m13 m23 m33</br> * * @author Richard Greenlees * @author Kai Burjack */ public class Matrix4f implements Serializable, Externalizable { public float m00; public float m01; public float m02; public float m03; public float m10; public float m11; public float m12; public float m13; public float m20; public float m21; public float m22; public float m23; public float m30; public float m31; public float m32; public float m33; public Matrix4f() { super(); identity(); } /** * Create a new {@link Matrix4f} by setting its uppper left 3x3 submatrix to the values of the given {@link Matrix3f} * and the rest to identity. * * @param mat * the {@link Matrix3f} */ public Matrix4f(Matrix3f mat) { this.m00 = mat.m00; this.m01 = mat.m01; this.m02 = mat.m02; this.m10 = mat.m10; this.m11 = mat.m11; this.m12 = mat.m12; this.m20 = mat.m20; this.m21 = mat.m21; this.m22 = mat.m22; this.m33 = 1.0f; } /** Clones this matrix from the supplied matrix */ public Matrix4f(Matrix4f mat) { this.m00 = mat.m00; this.m01 = mat.m01; this.m02 = mat.m02; this.m03 = mat.m03; this.m10 = mat.m10; this.m11 = mat.m11; this.m12 = mat.m12; this.m13 = mat.m13; this.m20 = mat.m20; this.m21 = mat.m21; this.m22 = mat.m22; this.m23 = mat.m23; this.m30 = mat.m30; this.m31 = mat.m31; this.m32 = mat.m32; this.m33 = mat.m33; } /** Create a new 4x4 matrix using the supplied float values */ public Matrix4f(float m00, float m01, float m02, float m03, float m10, float m11, float m12, float m13, float m20, float m21, float m22, float m23, float m30, float m31, float m32, float m33) { this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m03 = m03; this.m10 = m10; this.m11 = m11; this.m12 = m12; this.m13 = m13; this.m20 = m20; this.m21 = m21; this.m22 = m22; this.m23 = m23; this.m30 = m30; this.m31 = m31; this.m32 = m32; this.m33 = m33; } public Matrix4f(FloatBuffer buffer) { m00 = buffer.get(); m01 = buffer.get(); m02 = buffer.get(); m03 = buffer.get(); m10 = buffer.get(); m11 = buffer.get(); m12 = buffer.get(); m13 = buffer.get(); m20 = buffer.get(); m21 = buffer.get(); m22 = buffer.get(); m23 = buffer.get(); m30 = buffer.get(); m31 = buffer.get(); m32 = buffer.get(); m33 = buffer.get(); } /** * Reset this matrix to the identity. * * @return this */ public Matrix4f identity() { this.m00 = 1.0f; this.m01 = 0.0f; this.m02 = 0.0f; this.m03 = 0.0f; this.m10 = 0.0f; this.m11 = 1.0f; this.m12 = 0.0f; this.m13 = 0.0f; this.m20 = 0.0f; this.m21 = 0.0f; this.m22 = 1.0f; this.m23 = 0.0f; this.m30 = 0.0f; this.m31 = 0.0f; this.m32 = 0.0f; this.m33 = 1.0f; return this; } /** * Set the values within this matrix to be the same as the supplied matrix. * * @return this */ public Matrix4f set(Matrix4f m1) { this.m00 = m1.m00; this.m01 = m1.m01; this.m02 = m1.m02; this.m03 = m1.m03; this.m10 = m1.m10; this.m11 = m1.m11; this.m12 = m1.m12; this.m13 = m1.m13; this.m20 = m1.m20; this.m21 = m1.m21; this.m22 = m1.m22; this.m23 = m1.m23; this.m30 = m1.m30; this.m31 = m1.m31; this.m32 = m1.m32; this.m33 = m1.m33; return this; } /** * Set the upper left 3x3 submatrix of this {@link Matrix4f} to the given {@link Matrix3f} and the rest to identity. * * @param mat * the {@link Matrix3f} * @return this */ public Matrix4f set(Matrix3f mat) { this.m00 = mat.m00; this.m01 = mat.m01; this.m02 = mat.m02; this.m03 = 0.0f; this.m10 = mat.m10; this.m11 = mat.m11; this.m12 = mat.m12; this.m13 = 0.0f; this.m20 = mat.m20; this.m21 = mat.m21; this.m22 = mat.m22; this.m23 = 0.0f; this.m30 = 0.0f; this.m31 = 0.0f; this.m32 = 0.0f; this.m33 = 1.0f; return this; } /** * Set the values of this matrix to the ones of the given javax.vecmath matrix. * * @param javaxVecmathMatrix * @return this */ public Matrix4f fromJavaxMatrix(javax.vecmath.Matrix4f javaxVecmathMatrix) { m00 = javaxVecmathMatrix.m00; m01 = javaxVecmathMatrix.m10; m02 = javaxVecmathMatrix.m20; m03 = javaxVecmathMatrix.m30; m10 = javaxVecmathMatrix.m01; m11 = javaxVecmathMatrix.m11; m12 = javaxVecmathMatrix.m21; m13 = javaxVecmathMatrix.m31; m20 = javaxVecmathMatrix.m02; m21 = javaxVecmathMatrix.m12; m22 = javaxVecmathMatrix.m22; m23 = javaxVecmathMatrix.m32; m30 = javaxVecmathMatrix.m03; m31 = javaxVecmathMatrix.m13; m32 = javaxVecmathMatrix.m23; m33 = javaxVecmathMatrix.m33; return this; } /** * Set the values of this matrix to the ones of the given org.lwjgl.util.vector.Matrix4f matrix. * * @param lwjglMatrix * @return this */ public Matrix4f fromLwjglMatrix(org.lwjgl.util.vector.Matrix4f lwjglMatrix) { m00 = lwjglMatrix.m00; m01 = lwjglMatrix.m01; m02 = lwjglMatrix.m02; m03 = lwjglMatrix.m03; m10 = lwjglMatrix.m10; m11 = lwjglMatrix.m11; m12 = lwjglMatrix.m12; m13 = lwjglMatrix.m13; m20 = lwjglMatrix.m20; m21 = lwjglMatrix.m21; m22 = lwjglMatrix.m22; m23 = lwjglMatrix.m23; m30 = lwjglMatrix.m30; m31 = lwjglMatrix.m31; m32 = lwjglMatrix.m32; m33 = lwjglMatrix.m33; return this; } /** * Set the values of this matrix to the ones of the given com.badlogic.gdx.math.Matrix4 matrix. * * @param gdxMatrix * @return this */ public Matrix4f fromGdxMatrix(com.badlogic.gdx.math.Matrix4 gdxMatrix) { m00 = gdxMatrix.val[0]; m01 = gdxMatrix.val[1]; m02 = gdxMatrix.val[2]; m03 = gdxMatrix.val[3]; m10 = gdxMatrix.val[4]; m11 = gdxMatrix.val[5]; m12 = gdxMatrix.val[6]; m13 = gdxMatrix.val[7]; m20 = gdxMatrix.val[8]; m21 = gdxMatrix.val[9]; m22 = gdxMatrix.val[10]; m23 = gdxMatrix.val[11]; m30 = gdxMatrix.val[12]; m31 = gdxMatrix.val[13]; m32 = gdxMatrix.val[14]; m33 = gdxMatrix.val[15]; return this; } /** * Multiply this matrix by the supplied <code>right</code> matrix and store the result in <code>this</code>. * * @param right the right matrix * @return this */ public Matrix4f mul(Matrix4f right) { mul(this, right, this); return this; } /** * Multiply this matrix by the supplied <code>right</code> matrix and store the result in <code>dest</code>. * * @param right the right matrix * @param dest the destination matrix * @return this */ public Matrix4f mul(Matrix4f right, Matrix4f dest) { mul(this, right, dest); return this; } /** * Multiply the supplied left matrix by the right and store the result into dest. */ public static void mul(Matrix4f left, Matrix4f right, Matrix4f dest) { if (left != dest && right != dest) { dest.m00 = left.m00 * right.m00 + left.m10 * right.m01 + left.m20 * right.m02 + left.m30 * right.m03; dest.m01 = left.m01 * right.m00 + left.m11 * right.m01 + left.m21 * right.m02 + left.m31 * right.m03; dest.m02 = left.m02 * right.m00 + left.m12 * right.m01 + left.m22 * right.m02 + left.m32 * right.m03; dest.m03 = left.m03 * right.m00 + left.m13 * right.m01 + left.m23 * right.m02 + left.m33 * right.m03; dest.m10 = left.m00 * right.m10 + left.m10 * right.m11 + left.m20 * right.m12 + left.m30 * right.m13; dest.m11 = left.m01 * right.m10 + left.m11 * right.m11 + left.m21 * right.m12 + left.m31 * right.m13; dest.m12 = left.m02 * right.m10 + left.m12 * right.m11 + left.m22 * right.m12 + left.m32 * right.m13; dest.m13 = left.m03 * right.m10 + left.m13 * right.m11 + left.m23 * right.m12 + left.m33 * right.m13; dest.m20 = left.m00 * right.m20 + left.m10 * right.m21 + left.m20 * right.m22 + left.m30 * right.m23; dest.m21 = left.m01 * right.m20 + left.m11 * right.m21 + left.m21 * right.m22 + left.m31 * right.m23; dest.m22 = left.m02 * right.m20 + left.m12 * right.m21 + left.m22 * right.m22 + left.m32 * right.m23; dest.m23 = left.m03 * right.m20 + left.m13 * right.m21 + left.m23 * right.m22 + left.m33 * right.m23; dest.m30 = left.m00 * right.m30 + left.m10 * right.m31 + left.m20 * right.m32 + left.m30 * right.m33; dest.m31 = left.m01 * right.m30 + left.m11 * right.m31 + left.m21 * right.m32 + left.m31 * right.m33; dest.m32 = left.m02 * right.m30 + left.m12 * right.m31 + left.m22 * right.m32 + left.m32 * right.m33; dest.m33 = left.m03 * right.m30 + left.m13 * right.m31 + left.m23 * right.m32 + left.m33 * right.m33; } else { dest.set(left.m00 * right.m00 + left.m10 * right.m01 + left.m20 * right.m02 + left.m30 * right.m03, left.m01 * right.m00 + left.m11 * right.m01 + left.m21 * right.m02 + left.m31 * right.m03, left.m02 * right.m00 + left.m12 * right.m01 + left.m22 * right.m02 + left.m32 * right.m03, left.m03 * right.m00 + left.m13 * right.m01 + left.m23 * right.m02 + left.m33 * right.m03, left.m00 * right.m10 + left.m10 * right.m11 + left.m20 * right.m12 + left.m30 * right.m13, left.m01 * right.m10 + left.m11 * right.m11 + left.m21 * right.m12 + left.m31 * right.m13, left.m02 * right.m10 + left.m12 * right.m11 + left.m22 * right.m12 + left.m32 * right.m13, left.m03 * right.m10 + left.m13 * right.m11 + left.m23 * right.m12 + left.m33 * right.m13, left.m00 * right.m20 + left.m10 * right.m21 + left.m20 * right.m22 + left.m30 * right.m23, left.m01 * right.m20 + left.m11 * right.m21 + left.m21 * right.m22 + left.m31 * right.m23, left.m02 * right.m20 + left.m12 * right.m21 + left.m22 * right.m22 + left.m32 * right.m23, left.m03 * right.m20 + left.m13 * right.m21 + left.m23 * right.m22 + left.m33 * right.m23, left.m00 * right.m30 + left.m10 * right.m31 + left.m20 * right.m32 + left.m30 * right.m33, left.m01 * right.m30 + left.m11 * right.m31 + left.m21 * right.m32 + left.m31 * right.m33, left.m02 * right.m30 + left.m12 * right.m31 + left.m22 * right.m32 + left.m32 * right.m33, left.m03 * right.m30 + left.m13 * right.m31 + left.m23 * right.m32 + left.m33 * right.m33); } } /** * Set the values within this matrix to the supplied float values. The matrix will look like this:<br><br> * * m00, m10, m20, m30<br> * m01, m11, m21, m31<br> * m02, m12, m22, m32<br> * m03, m13, m23, m33 * * @return this */ public Matrix4f set(float m00, float m01, float m02, float m03, float m10, float m11, float m12, float m13, float m20, float m21, float m22, float m23, float m30, float m31, float m32, float m33) { this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m03 = m03; this.m10 = m10; this.m11 = m11; this.m12 = m12; this.m13 = m13; this.m20 = m20; this.m21 = m21; this.m22 = m22; this.m23 = m23; this.m30 = m30; this.m31 = m31; this.m32 = m32; this.m33 = m33; return this; } /** * Set the values in the matrix using a float array. The results will look like this:<br><br> * * 0, 4, 8, 12<br> * 1, 5, 9, 13<br> * 2, 6, 10, 14<br> * 3, 7, 11, 15<br> * * @return this */ public Matrix4f set(float m[], int off) { m00 = m[off+0]; m01 = m[off+1]; m02 = m[off+2]; m03 = m[off+3]; m10 = m[off+4]; m11 = m[off+5]; m12 = m[off+6]; m13 = m[off+7]; m20 = m[off+8]; m21 = m[off+9]; m22 = m[off+10]; m23 = m[off+11]; m30 = m[off+12]; m31 = m[off+13]; m32 = m[off+14]; m33 = m[off+15]; return this; } /** * Set the values in the matrix using a float array. The results will look like this:<br><br> * * 0, 4, 8, 12<br> * 1, 5, 9, 13<br> * 2, 6, 10, 14<br> * 3, 7, 11, 15<br> * * @see #set(float[], int) * * @return this */ public Matrix4f set(float m[]) { return set(m, 0); } /** * Set the values in the matrix using a FloatBuffer. The results will look like this:<br><br> * * 0, 4, 8, 12<br> * 1, 5, 9, 13<br> * 2, 6, 10, 14<br> * 3, 7, 11, 15<br> * * @return this */ public Matrix4f set(FloatBuffer buffer) { m00 = buffer.get(); m01 = buffer.get(); m02 = buffer.get(); m03 = buffer.get(); m10 = buffer.get(); m11 = buffer.get(); m12 = buffer.get(); m13 = buffer.get(); m20 = buffer.get(); m21 = buffer.get(); m22 = buffer.get(); m23 = buffer.get(); m30 = buffer.get(); m31 = buffer.get(); m32 = buffer.get(); m33 = buffer.get(); return this; } /** Returns the determinant of this matrix */ public float determinant() { return (m00 * m11 - m01 * m10) * (m22 * m33 - m23 * m32) - (m00 * m12 - m02 * m10) * (m21 * m33 - m23 * m31) + (m00 * m13 - m03 * m10) * (m21 * m32 - m22 * m31) + (m01 * m12 - m02 * m11) * (m20 * m33 - m23 * m30) - (m01 * m13 - m03 * m11) * (m20 * m32 - m22 * m30) + (m02 * m13 - m03 * m12) * (m20 * m31 - m21 * m30); } /** Returns the determinant of the supplied matrix */ public static float determinant(Matrix4f source) { return (source.m00 * source.m11 - source.m01 * source.m10) * (source.m22 * source.m33 - source.m23 * source.m32) - (source.m00 * source.m12 - source.m02 * source.m10) * (source.m21 * source.m33 - source.m23 * source.m31) + (source.m00 * source.m13 - source.m03 * source.m10) * (source.m21 * source.m32 - source.m22 * source.m31) + (source.m01 * source.m12 - source.m02 * source.m11) * (source.m20 * source.m33 - source.m23 * source.m30) - (source.m01 * source.m13 - source.m03 * source.m11) * (source.m20 * source.m32 - source.m22 * source.m30) + (source.m02 * source.m13 - source.m03 * source.m12) * (source.m20 * source.m31 - source.m21 * source.m30); } /** * Invert this matrix and write the result into <code>dest</code>. * * @param dest * @return this */ public Matrix4f invert(Matrix4f dest) { invert(this, dest); return this; } /** * Invert this matrix. * * @return this */ public Matrix4f invert() { return invert(this); } /** * Invert the supplied matrix and store the result in <code>dest</code>. * * @param source * the matrix to invert * @param dest * the matrix to hold the result */ public static void invert(Matrix4f source, Matrix4f dest) { float s = source.determinant(); if (s == 0.0f) { dest.set(source); return; } s = 1.0f / s; if (source != dest) { dest.m00 = (source.m11 * (source.m22 * source.m33 - source.m23 * source.m32) + source.m12 * (source.m23 * source.m31 - source.m21 * source.m33) + source.m13 * (source.m21 * source.m32 - source.m22 * source.m31)) * s; dest.m01 = (source.m21 * (source.m02 * source.m33 - source.m03 * source.m32) + source.m22 * (source.m03 * source.m31 - source.m01 * source.m33) + source.m23 * (source.m01 * source.m32 - source.m02 * source.m31)) * s; dest.m02 = (source.m31 * (source.m02 * source.m13 - source.m03 * source.m12) + source.m32 * (source.m03 * source.m11 - source.m01 * source.m13) + source.m33 * (source.m01 * source.m12 - source.m02 * source.m11)) * s; dest.m03 = (source.m01 * (source.m13 * source.m22 - source.m12 * source.m23) + source.m02 * (source.m11 * source.m23 - source.m13 * source.m21) + source.m03 * (source.m12 * source.m21 - source.m11 * source.m22)) * s; dest.m10 = (source.m12 * (source.m20 * source.m33 - source.m23 * source.m30) + source.m13 * (source.m22 * source.m30 - source.m20 * source.m32) + source.m10 * (source.m23 * source.m32 - source.m22 * source.m33)) * s; dest.m11 = (source.m22 * (source.m00 * source.m33 - source.m03 * source.m30) + source.m23 * (source.m02 * source.m30 - source.m00 * source.m32) + source.m20 * (source.m03 * source.m32 - source.m02 * source.m33)) * s; dest.m12 = (source.m32 * (source.m00 * source.m13 - source.m03 * source.m10) + source.m33 * (source.m02 * source.m10 - source.m00 * source.m12) + source.m30 * (source.m03 * source.m12 - source.m02 * source.m13)) * s; dest.m13 = (source.m02 * (source.m13 * source.m20 - source.m10 * source.m23) + source.m03 * (source.m10 * source.m22 - source.m12 * source.m20) + source.m00 * (source.m12 * source.m23 - source.m13 * source.m22)) * s; dest.m20 = (source.m13 * (source.m20 * source.m31 - source.m21 * source.m30) + source.m10 * (source.m21 * source.m33 - source.m23 * source.m31) + source.m11 * (source.m23 * source.m30 - source.m20 * source.m33)) * s; dest.m21 = (source.m23 * (source.m00 * source.m31 - source.m01 * source.m30) + source.m20 * (source.m01 * source.m33 - source.m03 * source.m31) + source.m21 * (source.m03 * source.m30 - source.m00 * source.m33)) * s; dest.m22 = (source.m33 * (source.m00 * source.m11 - source.m01 * source.m10) + source.m30 * (source.m01 * source.m13 - source.m03 * source.m11) + source.m31 * (source.m03 * source.m10 - source.m00 * source.m13)) * s; dest.m23 = (source.m03 * (source.m11 * source.m20 - source.m10 * source.m21) + source.m00 * (source.m13 * source.m21 - source.m11 * source.m23) + source.m01 * (source.m10 * source.m23 - source.m13 * source.m20)) * s; dest.m30 = (source.m10 * (source.m22 * source.m31 - source.m21 * source.m32) + source.m11 * (source.m20 * source.m32 - source.m22 * source.m30) + source.m12 * (source.m21 * source.m30 - source.m20 * source.m31)) * s; dest.m31 = (source.m20 * (source.m02 * source.m31 - source.m01 * source.m32) + source.m21 * (source.m00 * source.m32 - source.m02 * source.m30) + source.m22 * (source.m01 * source.m30 - source.m00 * source.m31)) * s; dest.m32 = (source.m30 * (source.m02 * source.m11 - source.m01 * source.m12) + source.m31 * (source.m00 * source.m12 - source.m02 * source.m10) + source.m32 * (source.m01 * source.m10 - source.m00 * source.m11)) * s; dest.m33 = (source.m00 * (source.m11 * source.m22 - source.m12 * source.m21) + source.m01 * (source.m12 * source.m20 - source.m10 * source.m22) + source.m02 * (source.m10 * source.m21 - source.m11 * source.m20)) * s; } else { dest.set((source.m11 * (source.m22 * source.m33 - source.m23 * source.m32) + source.m12 * (source.m23 * source.m31 - source.m21 * source.m33) + source.m13 * (source.m21 * source.m32 - source.m22 * source.m31)) * s, (source.m21 * (source.m02 * source.m33 - source.m03 * source.m32) + source.m22 * (source.m03 * source.m31 - source.m01 * source.m33) + source.m23 * (source.m01 * source.m32 - source.m02 * source.m31)) * s, (source.m31 * (source.m02 * source.m13 - source.m03 * source.m12) + source.m32 * (source.m03 * source.m11 - source.m01 * source.m13) + source.m33 * (source.m01 * source.m12 - source.m02 * source.m11)) * s, (source.m01 * (source.m13 * source.m22 - source.m12 * source.m23) + source.m02 * (source.m11 * source.m23 - source.m13 * source.m21) + source.m03 * (source.m12 * source.m21 - source.m11 * source.m22)) * s, (source.m12 * (source.m20 * source.m33 - source.m23 * source.m30) + source.m13 * (source.m22 * source.m30 - source.m20 * source.m32) + source.m10 * (source.m23 * source.m32 - source.m22 * source.m33)) * s, (source.m22 * (source.m00 * source.m33 - source.m03 * source.m30) + source.m23 * (source.m02 * source.m30 - source.m00 * source.m32) + source.m20 * (source.m03 * source.m32 - source.m02 * source.m33)) * s, (source.m32 * (source.m00 * source.m13 - source.m03 * source.m10) + source.m33 * (source.m02 * source.m10 - source.m00 * source.m12) + source.m30 * (source.m03 * source.m12 - source.m02 * source.m13)) * s, (source.m02 * (source.m13 * source.m20 - source.m10 * source.m23) + source.m03 * (source.m10 * source.m22 - source.m12 * source.m20) + source.m00 * (source.m12 * source.m23 - source.m13 * source.m22)) * s, (source.m13 * (source.m20 * source.m31 - source.m21 * source.m30) + source.m10 * (source.m21 * source.m33 - source.m23 * source.m31) + source.m11 * (source.m23 * source.m30 - source.m20 * source.m33)) * s, (source.m23 * (source.m00 * source.m31 - source.m01 * source.m30) + source.m20 * (source.m01 * source.m33 - source.m03 * source.m31) + source.m21 * (source.m03 * source.m30 - source.m00 * source.m33)) * s, (source.m33 * (source.m00 * source.m11 - source.m01 * source.m10) + source.m30 * (source.m01 * source.m13 - source.m03 * source.m11) + source.m31 * (source.m03 * source.m10 - source.m00 * source.m13)) * s, (source.m03 * (source.m11 * source.m20 - source.m10 * source.m21) + source.m00 * (source.m13 * source.m21 - source.m11 * source.m23) + source.m01 * (source.m10 * source.m23 - source.m13 * source.m20)) * s, (source.m10 * (source.m22 * source.m31 - source.m21 * source.m32) + source.m11 * (source.m20 * source.m32 - source.m22 * source.m30) + source.m12 * (source.m21 * source.m30 - source.m20 * source.m31)) * s, (source.m20 * (source.m02 * source.m31 - source.m01 * source.m32) + source.m21 * (source.m00 * source.m32 - source.m02 * source.m30) + source.m22 * (source.m01 * source.m30 - source.m00 * source.m31)) * s, (source.m30 * (source.m02 * source.m11 - source.m01 * source.m12) + source.m31 * (source.m00 * source.m12 - source.m02 * source.m10) + source.m32 * (source.m01 * source.m10 - source.m00 * source.m11)) * s, (source.m00 * (source.m11 * source.m22 - source.m12 * source.m21) + source.m01 * (source.m12 * source.m20 - source.m10 * source.m22) + source.m02 * (source.m10 * source.m21 - source.m11 * source.m20)) * s ); } } /** * Multiply this matrix by the supplied scalar value. * * @param scalar * the scalar value to multiply each matrix element by * @return this */ public Matrix4f mul(float scalar) { m00 *= scalar; m01 *= scalar; m02 *= scalar; m03 *= scalar; m10 *= scalar; m11 *= scalar; m12 *= scalar; m13 *= scalar; m20 *= scalar; m21 *= scalar; m22 *= scalar; m23 *= scalar; m30 *= scalar; m31 *= scalar; m32 *= scalar; m33 *= scalar; return this; } /** * Multiply the supplied <code>source</code> matrix by the supplied * <code>scalar</code> and store the result in <code>dest</code>. * * @param source * the source matrix * @param scalar * the scalar * @param dest * willd hold the result */ public static void mul(Matrix4f source, float scalar, Matrix4f dest) { dest.m00 = source.m00 * scalar; dest.m01 = source.m01 * scalar; dest.m02 = source.m02 * scalar; dest.m03 = source.m03 * scalar; dest.m10 = source.m10 * scalar; dest.m11 = source.m11 * scalar; dest.m12 = source.m12 * scalar; dest.m13 = source.m13 * scalar; dest.m20 = source.m20 * scalar; dest.m21 = source.m21 * scalar; dest.m22 = source.m22 * scalar; dest.m23 = source.m23 * scalar; dest.m30 = source.m30 * scalar; dest.m31 = source.m31 * scalar; dest.m32 = source.m32 * scalar; dest.m33 = source.m33 * scalar; } /** * Transpose this matrix and store the result in <code>dest</code>. * * @param dest * @return this */ public Matrix4f transpose(Matrix4f dest) { transpose(this, dest); return this; } /** * Transpose this matrix. Modifies the matrix directly. * * @return this */ public Matrix4f transpose() { return transpose(this); } /** * Transpose the original matrix and store the results into the destination Matrix4f. * * @param original * the matrix to transpose * @param dest * will contain the result */ public static void transpose(Matrix4f original, Matrix4f dest) { if (original != dest) { dest.m00 = original.m00; dest.m01 = original.m10; dest.m02 = original.m20; dest.m03 = original.m30; dest.m10 = original.m01; dest.m11 = original.m11; dest.m12 = original.m21; dest.m13 = original.m31; dest.m20 = original.m02; dest.m21 = original.m12; dest.m22 = original.m22; dest.m23 = original.m32; dest.m30 = original.m03; dest.m31 = original.m13; dest.m32 = original.m23; dest.m33 = original.m33; } else { dest.set(original.m00, original.m10, original.m20, original.m30, original.m01, original.m11, original.m21, original.m31, original.m02, original.m12, original.m22, original.m32, original.m03, original.m13, original.m23, original.m33); } } /** * Set this matrix to be a simple translation matrix. * <p> * The resulting matrix can be multiplied against another transformation * matrix to obtain an additional translation. * * @return this */ public Matrix4f translation(float x, float y, float z) { identity(); this.m30 = x; this.m31 = y; this.m32 = z; return this; } /** * Set this matrix to be a simple translation matrix. * <p> * The resulting matrix can be multiplied against another transformation * matrix to obtain an additional translation. * * @return this */ public Matrix4f translation(Vector3f position) { return translation(position.x, position.y, position.z); } public String toString() { DecimalFormat formatter = new DecimalFormat("0.000E0"); return "Matrix4f { " + formatter.format(this.m00) + ", " + formatter.format(this.m10) + ", " + formatter.format(this.m20) + ", " + formatter.format(this.m30) + ",\n" + " " + formatter.format(this.m01) + ", " + formatter.format(this.m11) + ", " + formatter.format(this.m21) + ", " + formatter.format(this.m31) + ",\n" + " " + formatter.format(this.m02) + ", " + formatter.format(this.m12) + ", " + formatter.format(this.m22) + ", " + formatter.format(this.m32) + ",\n" + " " + formatter.format(this.m03) + ", " + formatter.format(this.m13) + ", " + formatter.format(this.m23) + ", " + formatter.format(this.m33) + " }\n"; } /** * Get the current values of <code>this</code> matrix and store them into * <code>dest</code>. * <p> * This is the reverse method of {@link #set(Matrix4f)} and allows to obtain * intermediate calculation results when chaining multiple transformations. * * @param dest * the destination matrix * @return this */ public Matrix4f get(Matrix4f dest) { dest.set(this); return this; } /** * Store this matrix into the supplied {@link FloatBuffer}. * <p> * This method will increment the position of the given FloatBuffer by 16, if it returns normally. * * @return this */ public Matrix4f get(FloatBuffer buffer) { buffer.put(this.m00); buffer.put(this.m01); buffer.put(this.m02); buffer.put(this.m03); buffer.put(this.m10); buffer.put(this.m11); buffer.put(this.m12); buffer.put(this.m13); buffer.put(this.m20); buffer.put(this.m21); buffer.put(this.m22); buffer.put(this.m23); buffer.put(this.m30); buffer.put(this.m31); buffer.put(this.m32); buffer.put(this.m33); return this; } /** * Store the values of this matrix into the given javax.vecmath.Matrix4f. * * @param javaxVecmathMatrix * @return this */ public Matrix4f toJavaxMatrix(javax.vecmath.Matrix4f javaxVecmathMatrix) { javaxVecmathMatrix.m00 = m00; javaxVecmathMatrix.m10 = m01; javaxVecmathMatrix.m20 = m02; javaxVecmathMatrix.m30 = m03; javaxVecmathMatrix.m01 = m10; javaxVecmathMatrix.m11 = m11; javaxVecmathMatrix.m21 = m12; javaxVecmathMatrix.m31 = m13; javaxVecmathMatrix.m02 = m20; javaxVecmathMatrix.m12 = m21; javaxVecmathMatrix.m22 = m22; javaxVecmathMatrix.m32 = m23; javaxVecmathMatrix.m03 = m30; javaxVecmathMatrix.m13 = m31; javaxVecmathMatrix.m23 = m32; javaxVecmathMatrix.m33 = m33; return this; } /** * Store the values of this matrix into the given org.lwjgl.util.vector.Matrix4f. * * @param lwjglMatrix * @return this */ public Matrix4f toLwjglMatrix(org.lwjgl.util.vector.Matrix4f lwjglMatrix) { lwjglMatrix.m00 = m00; lwjglMatrix.m01 = m01; lwjglMatrix.m02 = m02; lwjglMatrix.m03 = m03; lwjglMatrix.m10 = m10; lwjglMatrix.m11 = m11; lwjglMatrix.m12 = m12; lwjglMatrix.m13 = m13; lwjglMatrix.m20 = m20; lwjglMatrix.m21 = m21; lwjglMatrix.m22 = m22; lwjglMatrix.m23 = m23; lwjglMatrix.m30 = m30; lwjglMatrix.m31 = m31; lwjglMatrix.m32 = m32; lwjglMatrix.m33 = m33; return this; } /** * Store the values of this matrix into the given com.badlogic.gdx.math.Matrix4. * * @param gdxMatrix * @return this */ public Matrix4f toGdxMatrix(com.badlogic.gdx.math.Matrix4 gdxMatrix) { gdxMatrix.val[0] = m00; gdxMatrix.val[1] = m01; gdxMatrix.val[2] = m02; gdxMatrix.val[3] = m03; gdxMatrix.val[4] = m10; gdxMatrix.val[5] = m11; gdxMatrix.val[6] = m12; gdxMatrix.val[7] = m13; gdxMatrix.val[8] = m20; gdxMatrix.val[9] = m21; gdxMatrix.val[10] = m22; gdxMatrix.val[11] = m23; gdxMatrix.val[12] = m30; gdxMatrix.val[13] = m31; gdxMatrix.val[14] = m32; gdxMatrix.val[15] = m33; return this; } /** * Store this matrix into the supplied float array. * * @return this */ public Matrix4f get(float[] arr, int offset) { arr[offset+0] = this.m00; arr[offset+1] = this.m01; arr[offset+2] = this.m02; arr[offset+3] = this.m03; arr[offset+4] = this.m10; arr[offset+5] = this.m11; arr[offset+6] = this.m12; arr[offset+7] = this.m13; arr[offset+8] = this.m20; arr[offset+9] = this.m21; arr[offset+10] = this.m22; arr[offset+11] = this.m23; arr[offset+12] = this.m30; arr[offset+13] = this.m31; arr[offset+14] = this.m32; arr[offset+15] = this.m33; return this; } /** * Set all the values within this matrix to <code>0</code>. * * @return this */ public Matrix4f zero() { identity(); this.m00 = 0.0f; this.m11 = 0.0f; this.m22 = 0.0f; this.m33 = 0.0f; return this; } /** * Set this matrix to be a simple scale matrix. * * @param x * the scale in x * @param y * the scale in y * @param z * the scale in z * @return this */ public Matrix4f scaling(float x, float y, float z) { identity(); m00 = x; m11 = y; m22 = z; return this; } /** * Set this matrix to be a simple scale matrix. * * @param scale * the scale applied to each dimension * @return this */ public Matrix4f scaling(Vector3f scale) { identity(); m00 = scale.x; m11 = scale.y; m22 = scale.z; return this; } /** * Set the given matrix <code>dest</code> to be a simple scale matrix. * * @param scale * the scale applied to each dimension * @param dest * will hold the result */ public static void scaling(Vector3f scale, Matrix4f dest) { dest.identity(); dest.m00 = scale.x; dest.m11 = scale.y; dest.m22 = scale.z; } /** * Set this matrix to be a simple scale matrix. * * @param x * the scale in x * @param y * the scale in y * @param z * the scale in z * @return this */ public Matrix4f scaling(float x, float y, float z, Matrix4f dest) { dest.identity(); dest.m00 = x; dest.m11 = y; dest.m22 = z; return this; } /** * Set this matrix to a rotation matrix which rotates the given radians about a given axis. * * @param angle * the angle in degrees * @param axis * the axis to rotate about * @return this */ public Matrix4f rotation(float angle, Vector3f axis) { return rotation(angle, axis.x, axis.y, axis.z); } /** * Set this matrix to a rotation matrix which rotates the given radians about a given axis. * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle">http://en.wikipedia.org</a> * * @return this */ public Matrix4f rotation(float angle, float x, float y, float z) { float cos = (float) Math.cos(angle); float sin = (float) Math.sin(angle); float C = 1.0f - cos; m00 = cos + x * x * C; m10 = x * y * C - z * sin; m20 = x * z * C + y * sin; m30 = 0.0f; m01 = y * x * C + z * sin; m11 = cos + y * y * C; m21 = y * z * C - x * sin; m31 = 0.0f; m02 = z * x * C - y * sin; m12 = z * y * C + x * sin; m22 = cos + z * z * C; m32 = 0.0f; m03 = 0.0f; m13 = 0.0f; m23 = 0.0f; m33 = 1.0f; return this; } /** * Set the destination matrix to a rotation matrix which rotates the given radians about a given axis. * The result will be stored in <code>dest</code>. * * @param angle * the angle in degrees * @param axis * the axis to rotate about * @param dest * will hold the result */ public static void rotation(float angle, Vector3f axis, Matrix4f dest) { dest.rotation(angle, axis); } /** * Set the upper 3x3 matrix of this {@link Matrix4f} to the given {@link Matrix3f} and the rest to the identity. * * @param mat * the 3x3 matrix * @param dest * the destination matrix whose upper left 3x3 submatrix will be set to <code>mat</code> * @return this */ public Matrix4f fromMatrix3(Matrix3f mat) { fromMatrix3(mat, this); return this; } /** * Set the upper 3x3 matrix of the given <code>dest</code> {@link Matrix4f} * to the given {@link Matrix3f} and the rest to the identity. * * @param mat * the 3x3 matrix * @param dest * the destination matrix whose upper left 3x3 submatrix will be set to <code>mat</code> */ public static void fromMatrix3(Matrix3f mat, Matrix4f dest) { dest.m00 = mat.m00; dest.m01 = mat.m01; dest.m02 = mat.m02; dest.m03 = 0.0f; dest.m10 = mat.m10; dest.m11 = mat.m11; dest.m12 = mat.m12; dest.m13 = 0.0f; dest.m20 = mat.m20; dest.m21 = mat.m21; dest.m22 = mat.m22; dest.m23 = 0.0f; dest.m30 = 0.0f; dest.m31 = 0.0f; dest.m32 = 0.0f; dest.m33 = 1.0f; } /** * Transform/multiply the given vector by this matrix and store the result in that vector. * * @see Vector4f#mul(Matrix4f) * * @param v * the vector to transform and to hold the final result * @return this */ public Matrix4f transform(Vector4f v) { v.mul(this); return this; } /** * Transform/multiply the given vector by this matrix and store the result in <code>dest</code>. * * @see Vector4f#mul(Matrix4f, Vector4f) * * @param v * the vector to transform * @param dest * will contain the result * @return this */ public Matrix4f transform(Vector4f v, Vector4f dest) { v.mul(this, dest); return this; } /** * Transform/multiply the given vector by the given matrix and store the result in that vector. * * @see Vector4f#mul(Matrix4f) * * @param mat * the matrix * @param v * the vector to transform and to hold the final result */ public static void transform(Matrix4f mat, Vector4f v) { v.mul(mat); } /** * Apply scaling to this matrix by scaling the unit axes by the given x, * y and z factors. * <p> * If <code>M</code> is <code>this</code> matrix and <code>S</code> the scaling matrix, * then the new matrix will be <code>M * S</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * S * v</code> * , the scaling will be applied first! * * @param x * the factor of the x component * @param y * the factor of the y component * @param z * the factor of the z component * @return this */ public Matrix4f scale(float x, float y, float z) { // scale matrix elements: // m00 = x, m11 = y, m22 = z // m33 = 1 // all others = 0 m00 = m00 * x; m01 = m01 * x; m02 = m02 * x; m03 = m03 * x; m10 = m10 * y; m11 = m11 * y; m12 = m12 * y; m13 = m13 * y; m20 = m20 * z; m21 = m21 * z; m22 = m22 * z; m23 = m23 * z; return this; } /** * Apply scaling to this matrix by uniformly scaling all unit axes by the given <code>xyz</code> factor. * <p> * If <code>M</code> is <code>this</code> matrix and <code>S</code> the scaling matrix, * then the new matrix will be <code>M * S</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * S * v</code> * , the scaling will be applied first! * * @see #scale(float, float, float) * * @param xyz * the factor for all components * @return this */ public Matrix4f scale(float xyz) { return scale(xyz, xyz, xyz); } /** * Apply rotation about the X axis to this matrix by rotating the given amount of degrees. * <p> * If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, * then the new matrix will be <code>M * R</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * R * v</code> * , the rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations">http://en.wikipedia.org</a> * * @param ang * the angle in degrees * @return this */ public Matrix4f rotateX(float ang) { float cos = (float) Math.cos(Math.toRadians(ang)); float sin = (float) Math.sin(Math.toRadians(ang)); float rm11 = cos; float rm21 = -sin; float rm12 = sin; float rm22 = cos; // add temporaries for dependent values float nm10 = m10 * rm11 + m20 * rm12; float nm11 = m11 * rm11 + m21 * rm12; float nm12 = m12 * rm11 + m22 * rm12; float nm13 = m13 * rm11 + m23 * rm12; // set non-dependent values directly m20 = m10 * rm21 + m20 * rm22; m21 = m11 * rm21 + m21 * rm22; m22 = m12 * rm21 + m22 * rm22; m23 = m13 * rm21 + m23 * rm22; // set other values m10 = nm10; m11 = nm11; m12 = nm12; m13 = nm13; return this; } /** * Apply rotation about the Y axis to this matrix by rotating the given amount of degrees. * <p> * If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, * then the new matrix will be <code>M * R</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * R * v</code> * , the rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations">http://en.wikipedia.org</a> * * @param ang * the angle in degrees * @return this */ public Matrix4f rotateY(float ang) { float cos = (float) Math.cos(Math.toRadians(ang)); float sin = (float) Math.sin(Math.toRadians(ang)); float rm00 = cos; float rm20 = sin; float rm02 = -sin; float rm22 = cos; // add temporaries for dependent values float nm00 = m00 * rm00 + m20 * rm02; float nm01 = m01 * rm00 + m21 * rm02; float nm02 = m02 * rm00 + m22 * rm02; float nm03 = m03 * rm00 + m23 * rm02; // set non-dependent values directly m20 = m00 * rm20 + m20 * rm22; m21 = m01 * rm20 + m21 * rm22; m22 = m02 * rm20 + m22 * rm22; m23 = m03 * rm20 + m23 * rm22; // set other values m00 = nm00; m01 = nm01; m02 = nm02; m03 = nm03; return this; } /** * Apply rotation about the Z axis to this matrix by rotating the given amount of degrees. * <p> * If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, * then the new matrix will be <code>M * R</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * R * v</code> * , the rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations">http://en.wikipedia.org</a> * * @param ang * the angle in degrees * @return this */ public Matrix4f rotateZ(float ang) { float cos = (float) Math.cos(Math.toRadians(ang)); float sin = (float) Math.sin(Math.toRadians(ang)); float rm00 = cos; float rm10 = -sin; float rm01 = sin; float rm11 = cos; // add temporaries for dependent values float nm00 = m00 * rm00 + m10 * rm01; float nm01 = m01 * rm00 + m11 * rm01; float nm02 = m02 * rm00 + m12 * rm01; float nm03 = m03 * rm00 + m13 * rm01; float nm10 = m00 * rm10 + m10 * rm11; float nm11 = m01 * rm10 + m11 * rm11; float nm12 = m02 * rm10 + m12 * rm11; float nm13 = m03 * rm10 + m13 * rm11; // set other values m00 = nm00; m01 = nm01; m02 = nm02; m03 = nm03; m10 = nm10; m11 = nm11; m12 = nm12; m13 = nm13; return this; } /** * Apply rotation to this matrix by rotating the given amount of degrees * about the given axis specified as x, y and z components. * <p> * If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, * then the new matrix will be <code>M * R</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * R * v</code> * , the rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle">http://en.wikipedia.org</a> * * @param ang * the angle in degrees * @param x * the x component of the axis * @param y * the y component of the axis * @param z * the z component of the axis * @return this */ public Matrix4f rotate(float ang, float x, float y, float z) { float s = (float) Math.sin(Math.toRadians(ang)); float c = (float) Math.cos(Math.toRadians(ang)); float C = 1.0f - c; // rotation matrix elements: // m30, m31, m32, m03, m13, m23 = 0 // m33 = 1 float rm00 = x * x * C + c; float rm01 = y * x * C + z * s; float rm02 = z * x * C - y * s; float rm10 = x * y * C - z * s; float rm11 = y * y * C + c; float rm12 = z * y * C + x * s; float rm20 = x * z * C + y * s; float rm21 = y * z * C - x * s; float rm22 = z * z * C + c; // add temporaries for dependent values float nm00 = m00 * rm00 + m10 * rm01 + m20 * rm02; float nm01 = m01 * rm00 + m11 * rm01 + m21 * rm02; float nm02 = m02 * rm00 + m12 * rm01 + m22 * rm02; float nm03 = m03 * rm00 + m13 * rm01 + m23 * rm02; float nm10 = m00 * rm10 + m10 * rm11 + m20 * rm12; float nm11 = m01 * rm10 + m11 * rm11 + m21 * rm12; float nm12 = m02 * rm10 + m12 * rm11 + m22 * rm12; float nm13 = m03 * rm10 + m13 * rm11 + m23 * rm12; // set non-dependent values directly m20 = m00 * rm20 + m10 * rm21 + m20 * rm22; m21 = m01 * rm20 + m11 * rm21 + m21 * rm22; m22 = m02 * rm20 + m12 * rm21 + m22 * rm22; m23 = m03 * rm20 + m13 * rm21 + m23 * rm22; // set other values m00 = nm00; m01 = nm01; m02 = nm02; m03 = nm03; m10 = nm10; m11 = nm11; m12 = nm12; m13 = nm13; return this; } /** * Apply a translation to this matrix by translating by the given number of * units in x, y and z. * <p> * If <code>M</code> is <code>this</code> matrix and <code>T</code> the translation * matrix, then the new matrix will be <code>M * T</code>. So when * transforming a vector <code>v</code> with the new matrix by using * <code>M * T * v</code>, the translation will be applied first! * * @param point * @return this */ public Matrix4f translate(Vector3f point) { return translate(point.x, point.y, point.z); } /** * Apply a translation to this matrix by translating by the given number of * units in x, y and z. * <p> * If <code>M</code> is <code>this</code> matrix and <code>T</code> the translation * matrix, then the new matrix will be <code>M * T</code>. So when * transforming a vector <code>v</code> with the new matrix by using * <code>M * T * v</code>, the translation will be applied first! * * @param x * @param y * @param z * @return this */ public Matrix4f translate(float x, float y, float z) { Matrix4f c = this; // translation matrix elements: // m00, m11, m22, m33 = 1 // m30 = x, m31 = y, m32 = z // all others = 0 c.m30 = c.m00 * x + c.m10 * y + c.m20 * z + c.m30; c.m31 = c.m01 * x + c.m11 * y + c.m21 * z + c.m31; c.m32 = c.m02 * x + c.m12 * y + c.m22 * z + c.m32; c.m33 = c.m03 * x + c.m13 * y + c.m23 * z + c.m33; return this; } public void writeExternal(ObjectOutput out) throws IOException { out.writeFloat(m00); out.writeFloat(m01); out.writeFloat(m02); out.writeFloat(m03); out.writeFloat(m10); out.writeFloat(m11); out.writeFloat(m12); out.writeFloat(m13); out.writeFloat(m20); out.writeFloat(m21); out.writeFloat(m22); out.writeFloat(m23); out.writeFloat(m30); out.writeFloat(m31); out.writeFloat(m32); out.writeFloat(m33); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { m00 = in.readFloat(); m01 = in.readFloat(); m02 = in.readFloat(); m03 = in.readFloat(); m10 = in.readFloat(); m11 = in.readFloat(); m12 = in.readFloat(); m13 = in.readFloat(); m20 = in.readFloat(); m21 = in.readFloat(); m22 = in.readFloat(); m23 = in.readFloat(); m30 = in.readFloat(); m31 = in.readFloat(); m32 = in.readFloat(); m33 = in.readFloat(); } /** * Apply an orthographic projection transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>O</code> the orthographic projection matrix, * then the new matrix will be <code>M * O</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * O * v</code> * , the orthographic projection transformation will be applied first! * <p> * Reference: <a href="http://www.songho.ca/opengl/gl_projectionmatrix.html">http://www.songho.ca</a> * * @param left * the distance from the center to the left frustum edge * @param right * the distance from the center to the right frustum edge * @param bottom * the distance from the center to the bottom frustum edge * @param top * the distance from the center to the top frustum edge * @param zNear * near clipping plane distance * @param zFar * far clipping plane distance * @return this */ public Matrix4f ortho(float left, float right, float bottom, float top, float zNear, float zFar) { // calculate right matrix elements float rm00 = 2.0f / (right - left); float rm11 = 2.0f / (top - bottom); float rm22 = -2.0f / (zFar - zNear); float rm30 = -(right + left) / (right - left); float rm31 = -(top + bottom) / (top - bottom); float rm32 = -(zFar + zNear) / (zFar - zNear); // perform optimized multiplication // compute the last column first, because other rows do not depend on it m30 = m00 * rm30 + m10 * rm31 + m20 * rm32 + m30; m31 = m01 * rm30 + m11 * rm31 + m21 * rm32 + m31; m32 = m02 * rm30 + m12 * rm31 + m22 * rm32 + m32; m33 = m03 * rm30 + m13 * rm31 + m23 * rm32 + m33; m00 = m00 * rm00; m01 = m01 * rm00; m02 = m02 * rm00; m03 = m03 * rm00; m10 = m10 * rm11; m11 = m11 * rm11; m12 = m12 * rm11; m13 = m13 * rm11; m20 = m20 * rm22; m21 = m21 * rm22; m22 = m22 * rm22; m23 = m23 * rm22; return this; } /** * Set this matrix to be an orthographic projection transformation. * <p> * Reference: <a href="http://www.songho.ca/opengl/gl_projectionmatrix.html">http://www.songho.ca</a> * * @param left * the distance from the center to the left frustum edge * @param right * the distance from the center to the right frustum edge * @param bottom * the distance from the center to the bottom frustum edge * @param top * the distance from the center to the top frustum edge * @param zNear * near clipping plane distance * @param zFar * far clipping plane distance * @return this */ public Matrix4f setOrtho(float left, float right, float bottom, float top, float zNear, float zFar) { m00 = 2.0f / (right - left); m01 = 0.0f; m02 = 0.0f; m03 = 0.0f; m10 = 0.0f; m11 = 2.0f / (top - bottom); m12 = 0.0f; m13 = 0.0f; m20 = 0.0f; m21 = 0.0f; m22 = -2.0f / (zFar - zNear); m23 = 0.0f; m30 = -(right + left) / (right - left); m31 = -(top + bottom) / (top - bottom); m32 = -(zFar + zNear) / (zFar - zNear); m33 = 1.0f; return this; } /** * Apply a rotation transformation to this matrix to make <code>-z</code> point along <code>dir</code>. * <p> * If <code>M</code> is <code>this</code> matrix and <code>L</code> the lookalong rotation matrix, * then the new matrix will be <code>M * L</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * L * v</code> * , the lookalong rotation transformation will be applied first! * * @param dir * the direction in space to look along * @param up * the direction of 'up' * @return this */ public Matrix4f lookAlong(Vector3f dir, Vector3f up) { return lookAlong(dir.x, dir.y, dir.z, up.x, up.y, up.z); } /** * Apply a rotation transformation to this matrix to make <code>-z</code> point along <code>dir</code>. * <p> * If <code>M</code> is <code>this</code> matrix and <code>L</code> the lookalong rotation matrix, * then the new matrix will be <code>M * L</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * L * v</code> * , the lookalong rotation transformation will be applied first! * <p> * @return this */ public Matrix4f lookAlong(float dirX, float dirY, float dirZ, float upX, float upY, float upZ) { // Normalize direction float dirLength = (float) Math.sqrt(dirX * dirX + dirY * dirY + dirZ * dirZ); dirX /= dirLength; dirY /= dirLength; dirZ /= dirLength; float upLength = (float) Math.sqrt(upX * upX + upY * upY + upZ * upZ); upX /= upLength; upY /= upLength; upZ /= upLength; // right = direction x up float rightX, rightY, rightZ; rightX = dirY * upZ - dirZ * upY; rightY = dirZ * upX - dirX * upZ; rightZ = dirX * upY - dirY * upX; // up = right x direction upX = rightY * dirZ - rightZ * dirY; upY = rightZ * dirX - rightX * dirZ; upZ = rightX * dirY - rightY * dirX; // calculate right matrix elements float rm00 = rightX; float rm01 = upX; float rm02 = -dirX; float rm10 = rightY; float rm11 = upY; float rm12 = -dirY; float rm20 = rightZ; float rm21 = upZ; float rm22 = -dirZ; // perform optimized matrix multiplication // introduce temporaries for dependent results float m00 = this.m00 * rm00 + m10 * rm01 + m20 * rm02; float m01 = this.m01 * rm00 + m11 * rm01 + m21 * rm02; float m02 = this.m02 * rm00 + m12 * rm01 + m22 * rm02; float m03 = this.m03 * rm00 + m13 * rm01 + m23 * rm02; float m10 = this.m00 * rm10 + this.m10 * rm11 + m20 * rm12; float m11 = this.m01 * rm10 + this.m11 * rm11 + m21 * rm12; float m12 = this.m02 * rm10 + this.m12 * rm11 + m22 * rm12; float m13 = this.m03 * rm10 + this.m13 * rm11 + m23 * rm12; m20 = this.m00 * rm20 + this.m10 * rm21 + this.m20 * rm22; m21 = this.m01 * rm20 + this.m11 * rm21 + this.m21 * rm22; m22 = this.m02 * rm20 + this.m12 * rm21 + this.m22 * rm22; m23 = this.m03 * rm20 + this.m13 * rm21 + this.m23 * rm22; // set the rest of the matrix elements this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m03 = m03; this.m10 = m10; this.m11 = m11; this.m12 = m12; this.m13 = m13; return this; } /** * Set this matrix to be a "lookat" transformation. * * @param eye * the position of the camera * @param center * the point in space to look at * @param up * the direction of 'up' * @return this */ public Matrix4f setLookAt(Vector3f eye, Vector3f center, Vector3f up) { return setLookAt(eye.x, eye.y, eye.z, center.x, center.y, center.z, up.x, up.y, up.z); } /** * Set this matrix to be a "lookat" transformation. * <p> * @return this */ public Matrix4f setLookAt(float eyeX, float eyeY, float eyeZ, float centerX, float centerY, float centerZ, float upX, float upY, float upZ) { // Compute direction from position to lookAt float dirX, dirY, dirZ; dirX = centerX - eyeX; dirY = centerY - eyeY; dirZ = centerZ - eyeZ; // Normalize direction float dirLength = (float) Math.sqrt( (eyeX - centerX) * (eyeX - centerX) + (eyeY - centerY) * (eyeY - centerY) + (eyeZ - centerZ) * (eyeZ - centerZ)); dirX /= dirLength; dirY /= dirLength; dirZ /= dirLength; // Normalize up float upLength = (float) Math.sqrt(upX * upX + upY * upY + upZ * upZ); upX /= upLength; upY /= upLength; upZ /= upLength; // right = direction x up float rightX, rightY, rightZ; rightX = dirY * upZ - dirZ * upY; rightY = dirZ * upX - dirX * upZ; rightZ = dirX * upY - dirY * upX; // up = right x direction upX = rightY * dirZ - rightZ * dirY; upY = rightZ * dirX - rightX * dirZ; upZ = rightX * dirY - rightY * dirX; m00 = rightX; m01 = upX; m02 = -dirX; m03 = 0.0f; m10 = rightY; m11 = upY; m12 = -dirY; m13 = 0.0f; m20 = rightZ; m21 = upZ; m22 = -dirZ; m23 = 0.0f; m30 = -rightX * eyeX - rightY * eyeY - rightZ * eyeZ; m31 = -upX * eyeX - upY * eyeY - upZ * eyeZ; m32 = dirX * eyeX + dirY * eyeY + dirZ * eyeZ; m33 = 1.0f; return this; } /** * Apply a "lookat" transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>L</code> the lookat matrix, * then the new matrix will be <code>M * L</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * L * v</code> * , the lookat transformation will be applied first! * * @param eye * the position of the camera * @param center * the point in space to look at * @param up * the direction of 'up' * @return this */ public Matrix4f lookAt(Vector3f eye, Vector3f center, Vector3f up) { return lookAt(eye.x, eye.y, eye.z, center.x, center.y, center.z, up.x, up.y, up.z); } /** * Apply a "lookat" transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>L</code> the lookat matrix, * then the new matrix will be <code>M * L</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * L * v</code> * , the lookat transformation will be applied first! * <p> * @return this */ public Matrix4f lookAt(float eyeX, float eyeY, float eyeZ, float centerX, float centerY, float centerZ, float upX, float upY, float upZ) { // Compute direction from position to lookAt float dirX, dirY, dirZ; dirX = centerX - eyeX; dirY = centerY - eyeY; dirZ = centerZ - eyeZ; // Normalize direction float dirLength = (float) Math.sqrt( (eyeX - centerX) * (eyeX - centerX) + (eyeY - centerY) * (eyeY - centerY) + (eyeZ - centerZ) * (eyeZ - centerZ)); dirX /= dirLength; dirY /= dirLength; dirZ /= dirLength; // Normalize up float upLength = (float) Math.sqrt(upX * upX + upY * upY + upZ * upZ); upX /= upLength; upY /= upLength; upZ /= upLength; // right = direction x up float rightX, rightY, rightZ; rightX = dirY * upZ - dirZ * upY; rightY = dirZ * upX - dirX * upZ; rightZ = dirX * upY - dirY * upX; // up = right x direction upX = rightY * dirZ - rightZ * dirY; upY = rightZ * dirX - rightX * dirZ; upZ = rightX * dirY - rightY * dirX; // calculate right matrix elements float rm00 = rightX; float rm01 = upX; float rm02 = -dirX; float rm10 = rightY; float rm11 = upY; float rm12 = -dirY; float rm20 = rightZ; float rm21 = upZ; float rm22 = -dirZ; float rm30 = -rightX * eyeX - rightY * eyeY - rightZ * eyeZ; float rm31 = -upX * eyeX - upY * eyeY - upZ * eyeZ; float rm32 = dirX * eyeX + dirY * eyeY + dirZ * eyeZ; // perform optimized matrix multiplication // compute last column first, because others do not depend on it m30 = m00 * rm30 + m10 * rm31 + m20 * rm32 + m30; m31 = m01 * rm30 + m11 * rm31 + m21 * rm32 + m31; m32 = m02 * rm30 + m12 * rm31 + m22 * rm32 + m32; m33 = m03 * rm30 + m13 * rm31 + m23 * rm32 + m33; // introduce temporaries for dependent results float m00 = this.m00 * rm00 + m10 * rm01 + m20 * rm02; float m01 = this.m01 * rm00 + m11 * rm01 + m21 * rm02; float m02 = this.m02 * rm00 + m12 * rm01 + m22 * rm02; float m03 = this.m03 * rm00 + m13 * rm01 + m23 * rm02; float m10 = this.m00 * rm10 + this.m10 * rm11 + m20 * rm12; float m11 = this.m01 * rm10 + this.m11 * rm11 + m21 * rm12; float m12 = this.m02 * rm10 + this.m12 * rm11 + m22 * rm12; float m13 = this.m03 * rm10 + this.m13 * rm11 + m23 * rm12; m20 = this.m00 * rm20 + this.m10 * rm21 + this.m20 * rm22; m21 = this.m01 * rm20 + this.m11 * rm21 + this.m21 * rm22; m22 = this.m02 * rm20 + this.m12 * rm21 + this.m22 * rm22; m23 = this.m03 * rm20 + this.m13 * rm21 + this.m23 * rm22; // set the rest of the matrix elements this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m03 = m03; this.m10 = m10; this.m11 = m11; this.m12 = m12; this.m13 = m13; return this; } /** * Apply a symmetric perspective projection frustum transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>P</code> the perspective projection matrix, * then the new matrix will be <code>M * P</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * P * v</code> * , the perspective projection will be applied first! * <p> * @param fovy * the vertical field of view in degrees * @param aspect * the aspect ratio (i.e. width / height) * @param zNear * near clipping plane distance * @param zFar * far clipping plane distance * @return this */ public Matrix4f perspective(float fovy, float aspect, float zNear, float zFar) { float h = (float) Math.tan(Math.toRadians(fovy) * 0.5f) * zNear; float w = h * aspect; float fl = -w; float fr = +w; float fb = -h; float ft = +h; return frustum(fl, fr, fb, ft, zNear, zFar); } /** * Set this matrix to be a symmetric perspective projection frustum transformation. * <p> * @param fovy * the vertical field of view in degrees * @param aspect * the aspect ratio (i.e. width / height) * @param zNear * near clipping plane distance * @param zFar * far clipping plane distance * @return this */ public Matrix4f setPerspective(float fovy, float aspect, float zNear, float zFar) { float h = (float) Math.tan(Math.toRadians(fovy) * 0.5f) * zNear; float w = h * aspect; float fl = -w; float fr = +w; float fb = -h; float ft = +h; return setFrustum(fl, fr, fb, ft, zNear, zFar); } /** * Apply an arbitrary perspective projection frustum transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>F</code> the frustum matrix, * then the new matrix will be <code>M * F</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * F * v</code> * , the frustum transformation will be applied first! * <p> * Reference: <a href="http://www.songho.ca/opengl/gl_projectionmatrix.html">http://www.songho.ca</a> * * @param left * the distance along the x-axis to the left frustum edge * @param right * the distance along the x-axis to the right frustum edge * @param bottom * the distance along the y-axis to the bottom frustum edge * @param top * the distance along the y-axis to the top frustum edge * @param zNear * the distance along the z-axis to the near clipping plane * @param zFar * the distance along the z-axis to the far clipping plane * @return this */ public Matrix4f frustum(float left, float right, float bottom, float top, float zNear, float zFar) { // calculate right matrix elements float rm00 = 2.0f * zNear / (right - left); float rm11 = 2.0f * zNear / (top - bottom); float rm20 = (right + left) / (right - left); float rm21 = (top + bottom) / (top - bottom); float rm22 = -(zFar + zNear) / (zFar - zNear); float rm32 = -2.0f * zFar * zNear / (zFar - zNear); // perform optimized matrix multiplication float m20 = m00 * rm20 + m10 * rm21 + this.m20 * rm22 - m30; float m21 = m01 * rm20 + m11 * rm21 + this.m21 * rm22 - m31; float m22 = m02 * rm20 + m12 * rm21 + this.m22 * rm22 - m32; float m23 = m03 * rm20 + m13 * rm21 + this.m23 * rm22 - m33; m00 = m00 * rm00; m01 = m01 * rm00; m02 = m02 * rm00; m03 = m03 * rm00; m10 = m10 * rm11; m11 = m11 * rm11; m12 = m12 * rm11; m13 = m13 * rm11; m30 = this.m20 * rm32; m31 = this.m21 * rm32; m32 = this.m22 * rm32; m33 = this.m23 * rm32; this.m20 = m20; this.m21 = m21; this.m22 = m22; this.m23 = m23; return this; } /** * Set this matrix to be an arbitrary perspective projection frustum transformation. * <p> * Reference: <a href="http://www.songho.ca/opengl/gl_projectionmatrix.html">http://www.songho.ca</a> * * @param left * the distance along the x-axis to the left frustum edge * @param right * the distance along the x-axis to the right frustum edge * @param bottom * the distance along the y-axis to the bottom frustum edge * @param top * the distance along the y-axis to the top frustum edge * @param zNear * the distance along the z-axis to the near clipping plane * @param zFar * the distance along the z-axis to the far clipping plane * @return this */ public Matrix4f setFrustum(float left, float right, float bottom, float top, float zNear, float zFar) { // calculate right matrix elements m00 = 2.0f * zNear / (right - left); m01 = 0.0f; m02 = 0.0f; m03 = 0.0f; m10 = 0.0f; m11 = 2.0f * zNear / (top - bottom); m12 = 0.0f; m13 = 0.0f; m20 = (right + left) / (right - left); m21 = (top + bottom) / (top - bottom); m22 = -(zFar + zNear) / (zFar - zNear); m23 = 0.0f; m30 = 0.0f; m31 = 0.0f; m32 = -2.0f * zFar * zNear / (zFar - zNear); m33 = 1.0f; return this; } /** * Apply the rotation transformation of the given {@link Quaternion} to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>Q</code> the rotation matrix obtained from the given quaternion, * then the new matrix will be <code>M * Q</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * Q * v</code> * , the quaternion rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Quaternion">http://en.wikipedia.org</a> * * @param quat * the {@link Quaternion} * @return this */ public Matrix4f rotate(Quaternion quat) { float q00 = 2.0f * quat.x * quat.x; float q11 = 2.0f * quat.y * quat.y; float q22 = 2.0f * quat.z * quat.z; float q01 = 2.0f * quat.x * quat.y; float q02 = 2.0f * quat.x * quat.z; float q03 = 2.0f * quat.x * quat.w; float q12 = 2.0f * quat.y * quat.z; float q13 = 2.0f * quat.y * quat.w; float q23 = 2.0f * quat.z * quat.w; float rm00 = 1.0f - q11 - q22; float rm01 = q01 + q23; float rm02 = q02 - q13; float rm10 = q01 - q23; float rm11 = 1.0f - q22 - q00; float rm12 = q12 + q03; float rm20 = q02 + q13; float rm21 = q12 - q03; float rm22 = 1.0f - q11 - q00; float nm00 = m00 * rm00 + m10 * rm01 + m20 * rm02; float nm01 = m01 * rm00 + m11 * rm01 + m21 * rm02; float nm02 = m02 * rm00 + m12 * rm01 + m22 * rm02; float nm03 = m03 * rm00 + m13 * rm01 + m23 * rm02; float nm10 = m00 * rm10 + m10 * rm11 + m20 * rm12; float nm11 = m01 * rm10 + m11 * rm11 + m21 * rm12; float nm12 = m02 * rm10 + m12 * rm11 + m22 * rm12; float nm13 = m03 * rm10 + m13 * rm11 + m23 * rm12; m20 = m00 * rm20 + m10 * rm21 + m20 * rm22; m21 = m01 * rm20 + m11 * rm21 + m21 * rm22; m22 = m02 * rm20 + m12 * rm21 + m22 * rm22; m23 = m03 * rm20 + m13 * rm21 + m23 * rm22; this.m00 = nm00; this.m01 = nm01; this.m02 = nm02; this.m03 = nm03; this.m10 = nm10; this.m11 = nm11; this.m12 = nm12; this.m13 = nm13; return this; } /** * Apply the rotation transformation of the given {@link AngleAxis4f} to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>A</code> the rotation matrix obtained from the given angle-axis, * then the new matrix will be <code>M * A</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * A * v</code> * , the angle-axis rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Axis_and_angle">http://en.wikipedia.org</a> * * @param axisAngle * the {@link AngleAxis4f} (needs to be {@link AngleAxis4f#normalize() normalized}) * @return this */ public Matrix4f rotate(AngleAxis4f axisAngle) { return rotate(axisAngle.angle, axisAngle.x, axisAngle.y, axisAngle.z); } }
src/com/joml/Matrix4f.java
/* * (C) Copyright 2015 Richard Greenlees * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software * and associated documentation files (the "Software"), to deal in the Software without restriction, * including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * 1) The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * */ package com.joml; import java.io.Externalizable; import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.io.Serializable; import java.nio.FloatBuffer; import java.text.DecimalFormat; /** * Contains the definition of a 4x4 Matrix of floats, and associated functions to transform * it. The matrix is column-major to match OpenGL's interpretation, and it looks like this: * <p> * m00 m10 m20 m30</br> * m01 m11 m21 m31</br> * m02 m12 m22 m32</br> * m03 m13 m23 m33</br> * * @author Richard Greenlees * @author Kai Burjack */ public class Matrix4f implements Serializable, Externalizable { public float m00; public float m01; public float m02; public float m03; public float m10; public float m11; public float m12; public float m13; public float m20; public float m21; public float m22; public float m23; public float m30; public float m31; public float m32; public float m33; public Matrix4f() { super(); identity(); } /** * Create a new {@link Matrix4f} by setting its uppper left 3x3 submatrix to the values of the given {@link Matrix3f} * and the rest to identity. * * @param mat * the {@link Matrix3f} */ public Matrix4f(Matrix3f mat) { this.m00 = mat.m00; this.m01 = mat.m01; this.m02 = mat.m02; this.m10 = mat.m10; this.m11 = mat.m11; this.m12 = mat.m12; this.m20 = mat.m20; this.m21 = mat.m21; this.m22 = mat.m22; this.m33 = 1.0f; } /** Clones this matrix from the supplied matrix */ public Matrix4f(Matrix4f mat) { this.m00 = mat.m00; this.m01 = mat.m01; this.m02 = mat.m02; this.m03 = mat.m03; this.m10 = mat.m10; this.m11 = mat.m11; this.m12 = mat.m12; this.m13 = mat.m13; this.m20 = mat.m20; this.m21 = mat.m21; this.m22 = mat.m22; this.m23 = mat.m23; this.m30 = mat.m30; this.m31 = mat.m31; this.m32 = mat.m32; this.m33 = mat.m33; } /** Create a new 4x4 matrix using the supplied float values */ public Matrix4f(float m00, float m01, float m02, float m03, float m10, float m11, float m12, float m13, float m20, float m21, float m22, float m23, float m30, float m31, float m32, float m33) { this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m03 = m03; this.m10 = m10; this.m11 = m11; this.m12 = m12; this.m13 = m13; this.m20 = m20; this.m21 = m21; this.m22 = m22; this.m23 = m23; this.m30 = m30; this.m31 = m31; this.m32 = m32; this.m33 = m33; } public Matrix4f(FloatBuffer buffer) { m00 = buffer.get(); m01 = buffer.get(); m02 = buffer.get(); m03 = buffer.get(); m10 = buffer.get(); m11 = buffer.get(); m12 = buffer.get(); m13 = buffer.get(); m20 = buffer.get(); m21 = buffer.get(); m22 = buffer.get(); m23 = buffer.get(); m30 = buffer.get(); m31 = buffer.get(); m32 = buffer.get(); m33 = buffer.get(); } /** * Reset this matrix to the identity. * * @return this */ public Matrix4f identity() { this.m00 = 1.0f; this.m01 = 0.0f; this.m02 = 0.0f; this.m03 = 0.0f; this.m10 = 0.0f; this.m11 = 1.0f; this.m12 = 0.0f; this.m13 = 0.0f; this.m20 = 0.0f; this.m21 = 0.0f; this.m22 = 1.0f; this.m23 = 0.0f; this.m30 = 0.0f; this.m31 = 0.0f; this.m32 = 0.0f; this.m33 = 1.0f; return this; } /** * Set the values within this matrix to be the same as the supplied matrix. * * @return this */ public Matrix4f set(Matrix4f m1) { this.m00 = m1.m00; this.m01 = m1.m01; this.m02 = m1.m02; this.m03 = m1.m03; this.m10 = m1.m10; this.m11 = m1.m11; this.m12 = m1.m12; this.m13 = m1.m13; this.m20 = m1.m20; this.m21 = m1.m21; this.m22 = m1.m22; this.m23 = m1.m23; this.m30 = m1.m30; this.m31 = m1.m31; this.m32 = m1.m32; this.m33 = m1.m33; return this; } /** * Set the upper left 3x3 submatrix of this {@link Matrix4f} to the given {@link Matrix3f} and the rest to identity. * * @param mat * the {@link Matrix3f} * @return this */ public Matrix4f set(Matrix3f mat) { this.m00 = mat.m00; this.m01 = mat.m01; this.m02 = mat.m02; this.m03 = 0.0f; this.m10 = mat.m10; this.m11 = mat.m11; this.m12 = mat.m12; this.m13 = 0.0f; this.m20 = mat.m20; this.m21 = mat.m21; this.m22 = mat.m22; this.m23 = 0.0f; this.m30 = 0.0f; this.m31 = 0.0f; this.m32 = 0.0f; this.m33 = 1.0f; return this; } /** * Set the values of this matrix to the ones of the given javax.vecmath matrix. * * @param javaxVecmathMatrix * @return this */ public Matrix4f fromJavaxMatrix(javax.vecmath.Matrix4f javaxVecmathMatrix) { m00 = javaxVecmathMatrix.m00; m01 = javaxVecmathMatrix.m10; m02 = javaxVecmathMatrix.m20; m03 = javaxVecmathMatrix.m30; m10 = javaxVecmathMatrix.m01; m11 = javaxVecmathMatrix.m11; m12 = javaxVecmathMatrix.m21; m13 = javaxVecmathMatrix.m31; m20 = javaxVecmathMatrix.m02; m21 = javaxVecmathMatrix.m12; m22 = javaxVecmathMatrix.m22; m23 = javaxVecmathMatrix.m32; m30 = javaxVecmathMatrix.m03; m31 = javaxVecmathMatrix.m13; m32 = javaxVecmathMatrix.m23; m33 = javaxVecmathMatrix.m33; return this; } /** * Set the values of this matrix to the ones of the given org.lwjgl.util.vector.Matrix4f matrix. * * @param lwjglMatrix * @return this */ public Matrix4f fromLwjglMatrix(org.lwjgl.util.vector.Matrix4f lwjglMatrix) { m00 = lwjglMatrix.m00; m01 = lwjglMatrix.m01; m02 = lwjglMatrix.m02; m03 = lwjglMatrix.m03; m10 = lwjglMatrix.m10; m11 = lwjglMatrix.m11; m12 = lwjglMatrix.m12; m13 = lwjglMatrix.m13; m20 = lwjglMatrix.m20; m21 = lwjglMatrix.m21; m22 = lwjglMatrix.m22; m23 = lwjglMatrix.m23; m30 = lwjglMatrix.m30; m31 = lwjglMatrix.m31; m32 = lwjglMatrix.m32; m33 = lwjglMatrix.m33; return this; } /** * Set the values of this matrix to the ones of the given com.badlogic.gdx.math.Matrix4 matrix. * * @param gdxMatrix * @return this */ public Matrix4f fromGdxMatrix(com.badlogic.gdx.math.Matrix4 gdxMatrix) { m00 = gdxMatrix.val[0]; m01 = gdxMatrix.val[1]; m02 = gdxMatrix.val[2]; m03 = gdxMatrix.val[3]; m10 = gdxMatrix.val[4]; m11 = gdxMatrix.val[5]; m12 = gdxMatrix.val[6]; m13 = gdxMatrix.val[7]; m20 = gdxMatrix.val[8]; m21 = gdxMatrix.val[9]; m22 = gdxMatrix.val[10]; m23 = gdxMatrix.val[11]; m30 = gdxMatrix.val[12]; m31 = gdxMatrix.val[13]; m32 = gdxMatrix.val[14]; m33 = gdxMatrix.val[15]; return this; } /** * Multiply this matrix by the supplied <code>right</code> matrix and store the result in <code>this</code>. * * @param right the right matrix * @return this */ public Matrix4f mul(Matrix4f right) { mul(this, right, this); return this; } /** * Multiply this matrix by the supplied <code>right</code> matrix and store the result in <code>dest</code>. * * @param right the right matrix * @param dest the destination matrix * @return this */ public Matrix4f mul(Matrix4f right, Matrix4f dest) { mul(this, right, dest); return this; } /** * Multiply the supplied left matrix by the right and store the result into dest. */ public static void mul(Matrix4f left, Matrix4f right, Matrix4f dest) { if (left != dest && right != dest) { dest.m00 = left.m00 * right.m00 + left.m10 * right.m01 + left.m20 * right.m02 + left.m30 * right.m03; dest.m01 = left.m01 * right.m00 + left.m11 * right.m01 + left.m21 * right.m02 + left.m31 * right.m03; dest.m02 = left.m02 * right.m00 + left.m12 * right.m01 + left.m22 * right.m02 + left.m32 * right.m03; dest.m03 = left.m03 * right.m00 + left.m13 * right.m01 + left.m23 * right.m02 + left.m33 * right.m03; dest.m10 = left.m00 * right.m10 + left.m10 * right.m11 + left.m20 * right.m12 + left.m30 * right.m13; dest.m11 = left.m01 * right.m10 + left.m11 * right.m11 + left.m21 * right.m12 + left.m31 * right.m13; dest.m12 = left.m02 * right.m10 + left.m12 * right.m11 + left.m22 * right.m12 + left.m32 * right.m13; dest.m13 = left.m03 * right.m10 + left.m13 * right.m11 + left.m23 * right.m12 + left.m33 * right.m13; dest.m20 = left.m00 * right.m20 + left.m10 * right.m21 + left.m20 * right.m22 + left.m30 * right.m23; dest.m21 = left.m01 * right.m20 + left.m11 * right.m21 + left.m21 * right.m22 + left.m31 * right.m23; dest.m22 = left.m02 * right.m20 + left.m12 * right.m21 + left.m22 * right.m22 + left.m32 * right.m23; dest.m23 = left.m03 * right.m20 + left.m13 * right.m21 + left.m23 * right.m22 + left.m33 * right.m23; dest.m30 = left.m00 * right.m30 + left.m10 * right.m31 + left.m20 * right.m32 + left.m30 * right.m33; dest.m31 = left.m01 * right.m30 + left.m11 * right.m31 + left.m21 * right.m32 + left.m31 * right.m33; dest.m32 = left.m02 * right.m30 + left.m12 * right.m31 + left.m22 * right.m32 + left.m32 * right.m33; dest.m33 = left.m03 * right.m30 + left.m13 * right.m31 + left.m23 * right.m32 + left.m33 * right.m33; } else { dest.set(left.m00 * right.m00 + left.m10 * right.m01 + left.m20 * right.m02 + left.m30 * right.m03, left.m01 * right.m00 + left.m11 * right.m01 + left.m21 * right.m02 + left.m31 * right.m03, left.m02 * right.m00 + left.m12 * right.m01 + left.m22 * right.m02 + left.m32 * right.m03, left.m03 * right.m00 + left.m13 * right.m01 + left.m23 * right.m02 + left.m33 * right.m03, left.m00 * right.m10 + left.m10 * right.m11 + left.m20 * right.m12 + left.m30 * right.m13, left.m01 * right.m10 + left.m11 * right.m11 + left.m21 * right.m12 + left.m31 * right.m13, left.m02 * right.m10 + left.m12 * right.m11 + left.m22 * right.m12 + left.m32 * right.m13, left.m03 * right.m10 + left.m13 * right.m11 + left.m23 * right.m12 + left.m33 * right.m13, left.m00 * right.m20 + left.m10 * right.m21 + left.m20 * right.m22 + left.m30 * right.m23, left.m01 * right.m20 + left.m11 * right.m21 + left.m21 * right.m22 + left.m31 * right.m23, left.m02 * right.m20 + left.m12 * right.m21 + left.m22 * right.m22 + left.m32 * right.m23, left.m03 * right.m20 + left.m13 * right.m21 + left.m23 * right.m22 + left.m33 * right.m23, left.m00 * right.m30 + left.m10 * right.m31 + left.m20 * right.m32 + left.m30 * right.m33, left.m01 * right.m30 + left.m11 * right.m31 + left.m21 * right.m32 + left.m31 * right.m33, left.m02 * right.m30 + left.m12 * right.m31 + left.m22 * right.m32 + left.m32 * right.m33, left.m03 * right.m30 + left.m13 * right.m31 + left.m23 * right.m32 + left.m33 * right.m33); } } /** * Set the values within this matrix to the supplied float values. The matrix will look like this:<br><br> * * m00, m10, m20, m30<br> * m01, m11, m21, m31<br> * m02, m12, m22, m32<br> * m03, m13, m23, m33 * * @return this */ public Matrix4f set(float m00, float m01, float m02, float m03, float m10, float m11, float m12, float m13, float m20, float m21, float m22, float m23, float m30, float m31, float m32, float m33) { this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m03 = m03; this.m10 = m10; this.m11 = m11; this.m12 = m12; this.m13 = m13; this.m20 = m20; this.m21 = m21; this.m22 = m22; this.m23 = m23; this.m30 = m30; this.m31 = m31; this.m32 = m32; this.m33 = m33; return this; } /** * Set the values in the matrix using a float array. The results will look like this:<br><br> * * 0, 4, 8, 12<br> * 1, 5, 9, 13<br> * 2, 6, 10, 14<br> * 3, 7, 11, 15<br> * * @return this */ public Matrix4f set(float m[], int off) { m00 = m[off+0]; m01 = m[off+1]; m02 = m[off+2]; m03 = m[off+3]; m10 = m[off+4]; m11 = m[off+5]; m12 = m[off+6]; m13 = m[off+7]; m20 = m[off+8]; m21 = m[off+9]; m22 = m[off+10]; m23 = m[off+11]; m30 = m[off+12]; m31 = m[off+13]; m32 = m[off+14]; m33 = m[off+15]; return this; } /** * Set the values in the matrix using a float array. The results will look like this:<br><br> * * 0, 4, 8, 12<br> * 1, 5, 9, 13<br> * 2, 6, 10, 14<br> * 3, 7, 11, 15<br> * * @see #set(float[], int) * * @return this */ public Matrix4f set(float m[]) { return set(m, 0); } /** * Set the values in the matrix using a FloatBuffer. The results will look like this:<br><br> * * 0, 4, 8, 12<br> * 1, 5, 9, 13<br> * 2, 6, 10, 14<br> * 3, 7, 11, 15<br> * * @return this */ public Matrix4f set(FloatBuffer buffer) { m00 = buffer.get(); m01 = buffer.get(); m02 = buffer.get(); m03 = buffer.get(); m10 = buffer.get(); m11 = buffer.get(); m12 = buffer.get(); m13 = buffer.get(); m20 = buffer.get(); m21 = buffer.get(); m22 = buffer.get(); m23 = buffer.get(); m30 = buffer.get(); m31 = buffer.get(); m32 = buffer.get(); m33 = buffer.get(); return this; } /** Returns the determinant of this matrix */ public float determinant() { return (m00 * m11 - m01 * m10) * (m22 * m33 - m23 * m32) - (m00 * m12 - m02 * m10) * (m21 * m33 - m23 * m31) + (m00 * m13 - m03 * m10) * (m21 * m32 - m22 * m31) + (m01 * m12 - m02 * m11) * (m20 * m33 - m23 * m30) - (m01 * m13 - m03 * m11) * (m20 * m32 - m22 * m30) + (m02 * m13 - m03 * m12) * (m20 * m31 - m21 * m30); } /** Returns the determinant of the supplied matrix */ public static float determinant(Matrix4f source) { return (source.m00 * source.m11 - source.m01 * source.m10) * (source.m22 * source.m33 - source.m23 * source.m32) - (source.m00 * source.m12 - source.m02 * source.m10) * (source.m21 * source.m33 - source.m23 * source.m31) + (source.m00 * source.m13 - source.m03 * source.m10) * (source.m21 * source.m32 - source.m22 * source.m31) + (source.m01 * source.m12 - source.m02 * source.m11) * (source.m20 * source.m33 - source.m23 * source.m30) - (source.m01 * source.m13 - source.m03 * source.m11) * (source.m20 * source.m32 - source.m22 * source.m30) + (source.m02 * source.m13 - source.m03 * source.m12) * (source.m20 * source.m31 - source.m21 * source.m30); } /** * Invert this matrix and write the result into <code>dest</code>. * * @param dest * @return this */ public Matrix4f invert(Matrix4f dest) { invert(this, dest); return this; } /** * Invert this matrix. * * @return this */ public Matrix4f invert() { return invert(this); } /** * Invert the supplied matrix and store the result in <code>dest</code>. * * @param source * the matrix to invert * @param dest * the matrix to hold the result */ public static void invert(Matrix4f source, Matrix4f dest) { float s = source.determinant(); if (s == 0.0f) { dest.set(source); return; } s = 1.0f / s; if (source != dest) { dest.m00 = (source.m11 * (source.m22 * source.m33 - source.m23 * source.m32) + source.m12 * (source.m23 * source.m31 - source.m21 * source.m33) + source.m13 * (source.m21 * source.m32 - source.m22 * source.m31)) * s; dest.m01 = (source.m21 * (source.m02 * source.m33 - source.m03 * source.m32) + source.m22 * (source.m03 * source.m31 - source.m01 * source.m33) + source.m23 * (source.m01 * source.m32 - source.m02 * source.m31)) * s; dest.m02 = (source.m31 * (source.m02 * source.m13 - source.m03 * source.m12) + source.m32 * (source.m03 * source.m11 - source.m01 * source.m13) + source.m33 * (source.m01 * source.m12 - source.m02 * source.m11)) * s; dest.m03 = (source.m01 * (source.m13 * source.m22 - source.m12 * source.m23) + source.m02 * (source.m11 * source.m23 - source.m13 * source.m21) + source.m03 * (source.m12 * source.m21 - source.m11 * source.m22)) * s; dest.m10 = (source.m12 * (source.m20 * source.m33 - source.m23 * source.m30) + source.m13 * (source.m22 * source.m30 - source.m20 * source.m32) + source.m10 * (source.m23 * source.m32 - source.m22 * source.m33)) * s; dest.m11 = (source.m22 * (source.m00 * source.m33 - source.m03 * source.m30) + source.m23 * (source.m02 * source.m30 - source.m00 * source.m32) + source.m20 * (source.m03 * source.m32 - source.m02 * source.m33)) * s; dest.m12 = (source.m32 * (source.m00 * source.m13 - source.m03 * source.m10) + source.m33 * (source.m02 * source.m10 - source.m00 * source.m12) + source.m30 * (source.m03 * source.m12 - source.m02 * source.m13)) * s; dest.m13 = (source.m02 * (source.m13 * source.m20 - source.m10 * source.m23) + source.m03 * (source.m10 * source.m22 - source.m12 * source.m20) + source.m00 * (source.m12 * source.m23 - source.m13 * source.m22)) * s; dest.m20 = (source.m13 * (source.m20 * source.m31 - source.m21 * source.m30) + source.m10 * (source.m21 * source.m33 - source.m23 * source.m31) + source.m11 * (source.m23 * source.m30 - source.m20 * source.m33)) * s; dest.m21 = (source.m23 * (source.m00 * source.m31 - source.m01 * source.m30) + source.m20 * (source.m01 * source.m33 - source.m03 * source.m31) + source.m21 * (source.m03 * source.m30 - source.m00 * source.m33)) * s; dest.m22 = (source.m33 * (source.m00 * source.m11 - source.m01 * source.m10) + source.m30 * (source.m01 * source.m13 - source.m03 * source.m11) + source.m31 * (source.m03 * source.m10 - source.m00 * source.m13)) * s; dest.m23 = (source.m03 * (source.m11 * source.m20 - source.m10 * source.m21) + source.m00 * (source.m13 * source.m21 - source.m11 * source.m23) + source.m01 * (source.m10 * source.m23 - source.m13 * source.m20)) * s; dest.m30 = (source.m10 * (source.m22 * source.m31 - source.m21 * source.m32) + source.m11 * (source.m20 * source.m32 - source.m22 * source.m30) + source.m12 * (source.m21 * source.m30 - source.m20 * source.m31)) * s; dest.m31 = (source.m20 * (source.m02 * source.m31 - source.m01 * source.m32) + source.m21 * (source.m00 * source.m32 - source.m02 * source.m30) + source.m22 * (source.m01 * source.m30 - source.m00 * source.m31)) * s; dest.m32 = (source.m30 * (source.m02 * source.m11 - source.m01 * source.m12) + source.m31 * (source.m00 * source.m12 - source.m02 * source.m10) + source.m32 * (source.m01 * source.m10 - source.m00 * source.m11)) * s; dest.m33 = (source.m00 * (source.m11 * source.m22 - source.m12 * source.m21) + source.m01 * (source.m12 * source.m20 - source.m10 * source.m22) + source.m02 * (source.m10 * source.m21 - source.m11 * source.m20)) * s; } else { dest.set((source.m11 * (source.m22 * source.m33 - source.m23 * source.m32) + source.m12 * (source.m23 * source.m31 - source.m21 * source.m33) + source.m13 * (source.m21 * source.m32 - source.m22 * source.m31)) * s, (source.m21 * (source.m02 * source.m33 - source.m03 * source.m32) + source.m22 * (source.m03 * source.m31 - source.m01 * source.m33) + source.m23 * (source.m01 * source.m32 - source.m02 * source.m31)) * s, (source.m31 * (source.m02 * source.m13 - source.m03 * source.m12) + source.m32 * (source.m03 * source.m11 - source.m01 * source.m13) + source.m33 * (source.m01 * source.m12 - source.m02 * source.m11)) * s, (source.m01 * (source.m13 * source.m22 - source.m12 * source.m23) + source.m02 * (source.m11 * source.m23 - source.m13 * source.m21) + source.m03 * (source.m12 * source.m21 - source.m11 * source.m22)) * s, (source.m12 * (source.m20 * source.m33 - source.m23 * source.m30) + source.m13 * (source.m22 * source.m30 - source.m20 * source.m32) + source.m10 * (source.m23 * source.m32 - source.m22 * source.m33)) * s, (source.m22 * (source.m00 * source.m33 - source.m03 * source.m30) + source.m23 * (source.m02 * source.m30 - source.m00 * source.m32) + source.m20 * (source.m03 * source.m32 - source.m02 * source.m33)) * s, (source.m32 * (source.m00 * source.m13 - source.m03 * source.m10) + source.m33 * (source.m02 * source.m10 - source.m00 * source.m12) + source.m30 * (source.m03 * source.m12 - source.m02 * source.m13)) * s, (source.m02 * (source.m13 * source.m20 - source.m10 * source.m23) + source.m03 * (source.m10 * source.m22 - source.m12 * source.m20) + source.m00 * (source.m12 * source.m23 - source.m13 * source.m22)) * s, (source.m13 * (source.m20 * source.m31 - source.m21 * source.m30) + source.m10 * (source.m21 * source.m33 - source.m23 * source.m31) + source.m11 * (source.m23 * source.m30 - source.m20 * source.m33)) * s, (source.m23 * (source.m00 * source.m31 - source.m01 * source.m30) + source.m20 * (source.m01 * source.m33 - source.m03 * source.m31) + source.m21 * (source.m03 * source.m30 - source.m00 * source.m33)) * s, (source.m33 * (source.m00 * source.m11 - source.m01 * source.m10) + source.m30 * (source.m01 * source.m13 - source.m03 * source.m11) + source.m31 * (source.m03 * source.m10 - source.m00 * source.m13)) * s, (source.m03 * (source.m11 * source.m20 - source.m10 * source.m21) + source.m00 * (source.m13 * source.m21 - source.m11 * source.m23) + source.m01 * (source.m10 * source.m23 - source.m13 * source.m20)) * s, (source.m10 * (source.m22 * source.m31 - source.m21 * source.m32) + source.m11 * (source.m20 * source.m32 - source.m22 * source.m30) + source.m12 * (source.m21 * source.m30 - source.m20 * source.m31)) * s, (source.m20 * (source.m02 * source.m31 - source.m01 * source.m32) + source.m21 * (source.m00 * source.m32 - source.m02 * source.m30) + source.m22 * (source.m01 * source.m30 - source.m00 * source.m31)) * s, (source.m30 * (source.m02 * source.m11 - source.m01 * source.m12) + source.m31 * (source.m00 * source.m12 - source.m02 * source.m10) + source.m32 * (source.m01 * source.m10 - source.m00 * source.m11)) * s, (source.m00 * (source.m11 * source.m22 - source.m12 * source.m21) + source.m01 * (source.m12 * source.m20 - source.m10 * source.m22) + source.m02 * (source.m10 * source.m21 - source.m11 * source.m20)) * s ); } } /** * Multiply this matrix by the supplied scalar value. * * @param scalar * the scalar value to multiply each matrix element by * @return this */ public Matrix4f mul(float scalar) { m00 *= scalar; m01 *= scalar; m02 *= scalar; m03 *= scalar; m10 *= scalar; m11 *= scalar; m12 *= scalar; m13 *= scalar; m20 *= scalar; m21 *= scalar; m22 *= scalar; m23 *= scalar; m30 *= scalar; m31 *= scalar; m32 *= scalar; m33 *= scalar; return this; } /** * Multiply the supplied <code>source</code> matrix by the supplied * <code>scalar</code> and store the result in <code>dest</code>. * * @param source * the source matrix * @param scalar * the scalar * @param dest * willd hold the result */ public static void mul(Matrix4f source, float scalar, Matrix4f dest) { dest.m00 = source.m00 * scalar; dest.m01 = source.m01 * scalar; dest.m02 = source.m02 * scalar; dest.m03 = source.m03 * scalar; dest.m10 = source.m10 * scalar; dest.m11 = source.m11 * scalar; dest.m12 = source.m12 * scalar; dest.m13 = source.m13 * scalar; dest.m20 = source.m20 * scalar; dest.m21 = source.m21 * scalar; dest.m22 = source.m22 * scalar; dest.m23 = source.m23 * scalar; dest.m30 = source.m30 * scalar; dest.m31 = source.m31 * scalar; dest.m32 = source.m32 * scalar; dest.m33 = source.m33 * scalar; } /** * Transpose this matrix and store the result in <code>dest</code>. * * @param dest * @return this */ public Matrix4f transpose(Matrix4f dest) { transpose(this, dest); return this; } /** * Transpose this matrix. Modifies the matrix directly. * * @return this */ public Matrix4f transpose() { return transpose(this); } /** * Transpose the original matrix and store the results into the destination Matrix4f. * * @param original * the matrix to transpose * @param dest * will contain the result */ public static void transpose(Matrix4f original, Matrix4f dest) { if (original != dest) { dest.m00 = original.m00; dest.m01 = original.m10; dest.m02 = original.m20; dest.m03 = original.m30; dest.m10 = original.m01; dest.m11 = original.m11; dest.m12 = original.m21; dest.m13 = original.m31; dest.m20 = original.m02; dest.m21 = original.m12; dest.m22 = original.m22; dest.m23 = original.m32; dest.m30 = original.m03; dest.m31 = original.m13; dest.m32 = original.m23; dest.m33 = original.m33; } else { dest.set(original.m00, original.m10, original.m20, original.m30, original.m01, original.m11, original.m21, original.m31, original.m02, original.m12, original.m22, original.m32, original.m03, original.m13, original.m23, original.m33); } } /** * Set this matrix to be a simple translation matrix. * <p> * The resulting matrix can be multiplied against another transformation * matrix to obtain an additional translation. * * @return this */ public Matrix4f translation(float x, float y, float z) { identity(); this.m30 = x; this.m31 = y; this.m32 = z; return this; } /** * Set this matrix to be a simple translation matrix. * <p> * The resulting matrix can be multiplied against another transformation * matrix to obtain an additional translation. * * @return this */ public Matrix4f translation(Vector3f position) { return translation(position.x, position.y, position.z); } public String toString() { DecimalFormat formatter = new DecimalFormat("0.000E0"); return "Matrix4f { " + formatter.format(this.m00) + ", " + formatter.format(this.m10) + ", " + formatter.format(this.m20) + ", " + formatter.format(this.m30) + ",\n" + " " + formatter.format(this.m01) + ", " + formatter.format(this.m11) + ", " + formatter.format(this.m21) + ", " + formatter.format(this.m31) + ",\n" + " " + formatter.format(this.m02) + ", " + formatter.format(this.m12) + ", " + formatter.format(this.m22) + ", " + formatter.format(this.m32) + ",\n" + " " + formatter.format(this.m03) + ", " + formatter.format(this.m13) + ", " + formatter.format(this.m23) + ", " + formatter.format(this.m33) + " }\n"; } /** * Get the current values of <code>this</code> matrix and store them into * <code>dest</code>. * <p> * This is the reverse method of {@link #set(Matrix4f)} and allows to obtain * intermediate calculation results when chaining multiple transformations. * * @param dest * the destination matrix * @return this */ public Matrix4f get(Matrix4f dest) { dest.set(this); return this; } /** * Store this matrix into the supplied {@link FloatBuffer}. * <p> * This method will increment the position of the given FloatBuffer by 16, if it returns normally. * * @return this */ public Matrix4f get(FloatBuffer buffer) { buffer.put(this.m00); buffer.put(this.m01); buffer.put(this.m02); buffer.put(this.m03); buffer.put(this.m10); buffer.put(this.m11); buffer.put(this.m12); buffer.put(this.m13); buffer.put(this.m20); buffer.put(this.m21); buffer.put(this.m22); buffer.put(this.m23); buffer.put(this.m30); buffer.put(this.m31); buffer.put(this.m32); buffer.put(this.m33); return this; } /** * Store the values of this matrix into the given javax.vecmath.Matrix4f. * * @param javaxVecmathMatrix * @return this */ public Matrix4f toJavaxMatrix(javax.vecmath.Matrix4f javaxVecmathMatrix) { javaxVecmathMatrix.m00 = m00; javaxVecmathMatrix.m10 = m01; javaxVecmathMatrix.m20 = m02; javaxVecmathMatrix.m30 = m03; javaxVecmathMatrix.m01 = m10; javaxVecmathMatrix.m11 = m11; javaxVecmathMatrix.m21 = m12; javaxVecmathMatrix.m31 = m13; javaxVecmathMatrix.m02 = m20; javaxVecmathMatrix.m12 = m21; javaxVecmathMatrix.m22 = m22; javaxVecmathMatrix.m32 = m23; javaxVecmathMatrix.m03 = m30; javaxVecmathMatrix.m13 = m31; javaxVecmathMatrix.m23 = m32; javaxVecmathMatrix.m33 = m33; return this; } /** * Store the values of this matrix into the given org.lwjgl.util.vector.Matrix4f. * * @param lwjglMatrix * @return this */ public Matrix4f toLwjglMatrix(org.lwjgl.util.vector.Matrix4f lwjglMatrix) { lwjglMatrix.m00 = m00; lwjglMatrix.m01 = m01; lwjglMatrix.m02 = m02; lwjglMatrix.m03 = m03; lwjglMatrix.m10 = m10; lwjglMatrix.m11 = m11; lwjglMatrix.m12 = m12; lwjglMatrix.m13 = m13; lwjglMatrix.m20 = m20; lwjglMatrix.m21 = m21; lwjglMatrix.m22 = m22; lwjglMatrix.m23 = m23; lwjglMatrix.m30 = m30; lwjglMatrix.m31 = m31; lwjglMatrix.m32 = m32; lwjglMatrix.m33 = m33; return this; } /** * Store the values of this matrix into the given com.badlogic.gdx.math.Matrix4. * * @param gdxMatrix * @return this */ public Matrix4f toGdxMatrix(com.badlogic.gdx.math.Matrix4 gdxMatrix) { gdxMatrix.val[0] = m00; gdxMatrix.val[1] = m01; gdxMatrix.val[2] = m02; gdxMatrix.val[3] = m03; gdxMatrix.val[4] = m10; gdxMatrix.val[5] = m11; gdxMatrix.val[6] = m12; gdxMatrix.val[7] = m13; gdxMatrix.val[8] = m20; gdxMatrix.val[9] = m21; gdxMatrix.val[10] = m22; gdxMatrix.val[11] = m23; gdxMatrix.val[12] = m30; gdxMatrix.val[13] = m31; gdxMatrix.val[14] = m32; gdxMatrix.val[15] = m33; return this; } /** * Store this matrix into the supplied float array. * * @return this */ public Matrix4f get(float[] arr, int offset) { arr[offset+0] = this.m00; arr[offset+1] = this.m01; arr[offset+2] = this.m02; arr[offset+3] = this.m03; arr[offset+4] = this.m10; arr[offset+5] = this.m11; arr[offset+6] = this.m12; arr[offset+7] = this.m13; arr[offset+8] = this.m20; arr[offset+9] = this.m21; arr[offset+10] = this.m22; arr[offset+11] = this.m23; arr[offset+12] = this.m30; arr[offset+13] = this.m31; arr[offset+14] = this.m32; arr[offset+15] = this.m33; return this; } /** * Set all the values within this matrix to <code>0</code>. * * @return this */ public Matrix4f zero() { identity(); this.m00 = 0.0f; this.m11 = 0.0f; this.m22 = 0.0f; this.m33 = 0.0f; return this; } /** * Set this matrix to be a simple scale matrix. * * @param x * the scale in x * @param y * the scale in y * @param z * the scale in z * @return this */ public Matrix4f scaling(float x, float y, float z) { identity(); m00 = x; m11 = y; m22 = z; return this; } /** * Set this matrix to be a simple scale matrix. * * @param scale * the scale applied to each dimension * @return this */ public Matrix4f scaling(Vector3f scale) { identity(); m00 = scale.x; m11 = scale.y; m22 = scale.z; return this; } /** * Set the given matrix <code>dest</code> to be a simple scale matrix. * * @param scale * the scale applied to each dimension * @param dest * will hold the result */ public static void scaling(Vector3f scale, Matrix4f dest) { dest.identity(); dest.m00 = scale.x; dest.m11 = scale.y; dest.m22 = scale.z; } /** * Set this matrix to be a simple scale matrix. * * @param x * the scale in x * @param y * the scale in y * @param z * the scale in z * @return this */ public Matrix4f scaling(float x, float y, float z, Matrix4f dest) { dest.identity(); dest.m00 = x; dest.m11 = y; dest.m22 = z; return this; } /** * Set this matrix to a rotation matrix which rotates the given radians about a given axis. * * @param angle * the angle in degrees * @param axis * the axis to rotate about * @return this */ public Matrix4f rotation(float angle, Vector3f axis) { return rotation(angle, axis.x, axis.y, axis.z); } /** * Set this matrix to a rotation matrix which rotates the given radians about a given axis. * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle">http://en.wikipedia.org</a> * * @return this */ public Matrix4f rotation(float angle, float x, float y, float z) { float cos = (float) Math.cos(angle); float sin = (float) Math.sin(angle); float C = 1.0f - cos; m00 = cos + x * x * C; m10 = x * y * C - z * sin; m20 = x * z * C + y * sin; m30 = 0.0f; m01 = y * x * C + z * sin; m11 = cos + y * y * C; m21 = y * z * C - x * sin; m31 = 0.0f; m02 = z * x * C - y * sin; m12 = z * y * C + x * sin; m22 = cos + z * z * C; m32 = 0.0f; m03 = 0.0f; m13 = 0.0f; m23 = 0.0f; m33 = 1.0f; return this; } /** * Set the destination matrix to a rotation matrix which rotates the given radians about a given axis. * The result will be stored in <code>dest</code>. * * @param angle * the angle in degrees * @param axis * the axis to rotate about * @param dest * will hold the result */ public static void rotation(float angle, Vector3f axis, Matrix4f dest) { dest.rotation(angle, axis); } /** * Set the upper 3x3 matrix of this {@link Matrix4f} to the given {@link Matrix3f} and the rest to the identity. * * @param mat * the 3x3 matrix * @param dest * the destination matrix whose upper left 3x3 submatrix will be set to <code>mat</code> * @return this */ public Matrix4f fromMatrix3(Matrix3f mat) { fromMatrix3(mat, this); return this; } /** * Set the upper 3x3 matrix of the given <code>dest</code> {@link Matrix4f} * to the given {@link Matrix3f} and the rest to the identity. * * @param mat * the 3x3 matrix * @param dest * the destination matrix whose upper left 3x3 submatrix will be set to <code>mat</code> */ public static void fromMatrix3(Matrix3f mat, Matrix4f dest) { dest.m00 = mat.m00; dest.m01 = mat.m01; dest.m02 = mat.m02; dest.m03 = 0.0f; dest.m10 = mat.m10; dest.m11 = mat.m11; dest.m12 = mat.m12; dest.m13 = 0.0f; dest.m20 = mat.m20; dest.m21 = mat.m21; dest.m22 = mat.m22; dest.m23 = 0.0f; dest.m30 = 0.0f; dest.m31 = 0.0f; dest.m32 = 0.0f; dest.m33 = 1.0f; } /** * Transform/multiply the given vector by this matrix and store the result in that vector. * * @see Vector4f#mul(Matrix4f) * * @param v * the vector to transform and to hold the final result * @return this */ public Matrix4f transform(Vector4f v) { v.mul(this); return this; } /** * Transform/multiply the given vector by this matrix and store the result in <code>dest</code>. * * @see Vector4f#mul(Matrix4f, Vector4f) * * @param v * the vector to transform * @param dest * will contain the result * @return this */ public Matrix4f transform(Vector4f v, Vector4f dest) { v.mul(this, dest); return this; } /** * Transform/multiply the given vector by the given matrix and store the result in that vector. * * @see Vector4f#mul(Matrix4f) * * @param mat * the matrix * @param v * the vector to transform and to hold the final result */ public static void transform(Matrix4f mat, Vector4f v) { v.mul(mat); } /** * Apply scaling to this matrix by scaling the unit axes by the given x, * y and z factors. * <p> * If <code>M</code> is <code>this</code> matrix and <code>S</code> the scaling matrix, * then the new matrix will be <code>M * S</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * S * v</code> * , the scaling will be applied first! * * @param x * the factor of the x component * @param y * the factor of the y component * @param z * the factor of the z component * @return this */ public Matrix4f scale(float x, float y, float z) { // scale matrix elements: // m00 = x, m11 = y, m22 = z // m33 = 1 // all others = 0 m00 = m00 * x; m01 = m01 * x; m02 = m02 * x; m03 = m03 * x; m10 = m10 * y; m11 = m11 * y; m12 = m12 * y; m13 = m13 * y; m20 = m20 * z; m21 = m21 * z; m22 = m22 * z; m23 = m23 * z; return this; } /** * Apply scaling to this matrix by uniformly scaling all unit axes by the given <code>xyz</code> factor. * <p> * If <code>M</code> is <code>this</code> matrix and <code>S</code> the scaling matrix, * then the new matrix will be <code>M * S</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * S * v</code> * , the scaling will be applied first! * * @see #scale(float, float, float) * * @param xyz * the factor for all components * @return this */ public Matrix4f scale(float xyz) { return scale(xyz, xyz, xyz); } /** * Apply rotation about the X axis to this matrix by rotating the given amount of degrees. * <p> * If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, * then the new matrix will be <code>M * R</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * R * v</code> * , the rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations">http://en.wikipedia.org</a> * * @param ang * the angle in degrees * @return this */ public Matrix4f rotateX(float ang) { float cos = (float) Math.cos(Math.toRadians(ang)); float sin = (float) Math.sin(Math.toRadians(ang)); float rm11 = cos; float rm21 = -sin; float rm12 = sin; float rm22 = cos; // add temporaries for dependent values float nm10 = m10 * rm11 + m20 * rm12; float nm11 = m11 * rm11 + m21 * rm12; float nm12 = m12 * rm11 + m22 * rm12; float nm13 = m13 * rm11 + m23 * rm12; // set non-dependent values directly m20 = m10 * rm21 + m20 * rm22; m21 = m11 * rm21 + m21 * rm22; m22 = m12 * rm21 + m22 * rm22; m23 = m13 * rm21 + m23 * rm22; // set other values m10 = nm10; m11 = nm11; m12 = nm12; m13 = nm13; return this; } /** * Apply rotation about the Y axis to this matrix by rotating the given amount of degrees. * <p> * If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, * then the new matrix will be <code>M * R</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * R * v</code> * , the rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations">http://en.wikipedia.org</a> * * @param ang * the angle in degrees * @return this */ public Matrix4f rotateY(float ang) { float cos = (float) Math.cos(Math.toRadians(ang)); float sin = (float) Math.sin(Math.toRadians(ang)); float rm00 = cos; float rm20 = sin; float rm02 = -sin; float rm22 = cos; // add temporaries for dependent values float nm00 = m00 * rm00 + m20 * rm02; float nm01 = m01 * rm00 + m21 * rm02; float nm02 = m02 * rm00 + m22 * rm02; float nm03 = m03 * rm00 + m23 * rm02; // set non-dependent values directly m20 = m00 * rm20 + m20 * rm22; m21 = m01 * rm20 + m21 * rm22; m22 = m02 * rm20 + m22 * rm22; m23 = m03 * rm20 + m23 * rm22; // set other values m00 = nm00; m01 = nm01; m02 = nm02; m03 = nm03; return this; } /** * Apply rotation about the Z axis to this matrix by rotating the given amount of degrees. * <p> * If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, * then the new matrix will be <code>M * R</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * R * v</code> * , the rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Basic_rotations">http://en.wikipedia.org</a> * * @param ang * the angle in degrees * @return this */ public Matrix4f rotateZ(float ang) { float cos = (float) Math.cos(Math.toRadians(ang)); float sin = (float) Math.sin(Math.toRadians(ang)); float rm00 = cos; float rm10 = -sin; float rm01 = sin; float rm11 = cos; // add temporaries for dependent values float nm00 = m00 * rm00 + m10 * rm01; float nm01 = m01 * rm00 + m11 * rm01; float nm02 = m02 * rm00 + m12 * rm01; float nm03 = m03 * rm00 + m13 * rm01; float nm10 = m00 * rm10 + m10 * rm11; float nm11 = m01 * rm10 + m11 * rm11; float nm12 = m02 * rm10 + m12 * rm11; float nm13 = m03 * rm10 + m13 * rm11; // set other values m00 = nm00; m01 = nm01; m02 = nm02; m03 = nm03; m10 = nm10; m11 = nm11; m12 = nm12; m13 = nm13; return this; } /** * Apply rotation to this matrix by rotating the given amount of degrees * about the given axis specified as x, y and z components. * <p> * If <code>M</code> is <code>this</code> matrix and <code>R</code> the rotation matrix, * then the new matrix will be <code>M * R</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * R * v</code> * , the rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Rotation_matrix_from_axis_and_angle">http://en.wikipedia.org</a> * * @param ang * the angle in degrees * @param x * the x component of the axis * @param y * the y component of the axis * @param z * the z component of the axis * @return this */ public Matrix4f rotate(float ang, float x, float y, float z) { float s = (float) Math.sin(Math.toRadians(ang)); float c = (float) Math.cos(Math.toRadians(ang)); float C = 1.0f - c; // rotation matrix elements: // m30, m31, m32, m03, m13, m23 = 0 // m33 = 1 float rm00 = x * x * C + c; float rm01 = y * x * C + z * s; float rm02 = z * x * C - y * s; float rm10 = x * y * C - z * s; float rm11 = y * y * C + c; float rm12 = z * y * C + x * s; float rm20 = x * z * C + y * s; float rm21 = y * z * C - x * s; float rm22 = z * z * C + c; // add temporaries for dependent values float nm00 = m00 * rm00 + m10 * rm01 + m20 * rm02; float nm01 = m01 * rm00 + m11 * rm01 + m21 * rm02; float nm02 = m02 * rm00 + m12 * rm01 + m22 * rm02; float nm03 = m03 * rm00 + m13 * rm01 + m23 * rm02; float nm10 = m00 * rm10 + m10 * rm11 + m20 * rm12; float nm11 = m01 * rm10 + m11 * rm11 + m21 * rm12; float nm12 = m02 * rm10 + m12 * rm11 + m22 * rm12; float nm13 = m03 * rm10 + m13 * rm11 + m23 * rm12; // set non-dependent values directly m20 = m00 * rm20 + m10 * rm21 + m20 * rm22; m21 = m01 * rm20 + m11 * rm21 + m21 * rm22; m22 = m02 * rm20 + m12 * rm21 + m22 * rm22; m23 = m03 * rm20 + m13 * rm21 + m23 * rm22; // set other values m00 = nm00; m01 = nm01; m02 = nm02; m03 = nm03; m10 = nm10; m11 = nm11; m12 = nm12; m13 = nm13; return this; } /** * Apply a translation to this matrix by translating by the given number of * units in x, y and z. * <p> * If <code>M</code> is <code>this</code> matrix and <code>T</code> the translation * matrix, then the new matrix will be <code>M * T</code>. So when * transforming a vector <code>v</code> with the new matrix by using * <code>M * T * v</code>, the translation will be applied first! * * @param point * @return this */ public Matrix4f translate(Vector3f point) { return translate(point.x, point.y, point.z); } /** * Apply a translation to this matrix by translating by the given number of * units in x, y and z. * <p> * If <code>M</code> is <code>this</code> matrix and <code>T</code> the translation * matrix, then the new matrix will be <code>M * T</code>. So when * transforming a vector <code>v</code> with the new matrix by using * <code>M * T * v</code>, the translation will be applied first! * * @param x * @param y * @param z * @return this */ public Matrix4f translate(float x, float y, float z) { Matrix4f c = this; // translation matrix elements: // m00, m11, m22, m33 = 1 // m30 = x, m31 = y, m32 = z // all others = 0 c.m30 = c.m00 * x + c.m10 * y + c.m20 * z + c.m30; c.m31 = c.m01 * x + c.m11 * y + c.m21 * z + c.m31; c.m32 = c.m02 * x + c.m12 * y + c.m22 * z + c.m32; c.m33 = c.m03 * x + c.m13 * y + c.m23 * z + c.m33; return this; } public void writeExternal(ObjectOutput out) throws IOException { out.writeFloat(m00); out.writeFloat(m01); out.writeFloat(m02); out.writeFloat(m03); out.writeFloat(m10); out.writeFloat(m11); out.writeFloat(m12); out.writeFloat(m13); out.writeFloat(m20); out.writeFloat(m21); out.writeFloat(m22); out.writeFloat(m23); out.writeFloat(m30); out.writeFloat(m31); out.writeFloat(m32); out.writeFloat(m33); } public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { m00 = in.readFloat(); m01 = in.readFloat(); m02 = in.readFloat(); m03 = in.readFloat(); m10 = in.readFloat(); m11 = in.readFloat(); m12 = in.readFloat(); m13 = in.readFloat(); m20 = in.readFloat(); m21 = in.readFloat(); m22 = in.readFloat(); m23 = in.readFloat(); m30 = in.readFloat(); m31 = in.readFloat(); m32 = in.readFloat(); m33 = in.readFloat(); } /** * Apply an orthographic projection transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>O</code> the orthographic projection matrix, * then the new matrix will be <code>M * O</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * O * v</code> * , the orthographic projection transformation will be applied first! * <p> * Reference: <a href="http://www.songho.ca/opengl/gl_projectionmatrix.html">http://www.songho.ca</a> * * @param left * the distance from the center to the left frustum edge * @param right * the distance from the center to the right frustum edge * @param bottom * the distance from the center to the bottom frustum edge * @param top * the distance from the center to the top frustum edge * @param zNear * near clipping plane distance * @param zFar * far clipping plane distance * @return this */ public Matrix4f ortho(float left, float right, float bottom, float top, float zNear, float zFar) { // calculate right matrix elements float rm00 = 2.0f / (right - left); float rm11 = 2.0f / (top - bottom); float rm22 = -2.0f / (zFar - zNear); float rm30 = -(right + left) / (right - left); float rm31 = -(top + bottom) / (top - bottom); float rm32 = -(zFar + zNear) / (zFar - zNear); // perform optimized multiplication // compute the last column first, because other rows do not depend on it m30 = m00 * rm30 + m10 * rm31 + m20 * rm32 + m30; m31 = m01 * rm30 + m11 * rm31 + m21 * rm32 + m31; m32 = m02 * rm30 + m12 * rm31 + m22 * rm32 + m32; m33 = m03 * rm30 + m13 * rm31 + m23 * rm32 + m33; m00 = m00 * rm00; m01 = m01 * rm00; m02 = m02 * rm00; m03 = m03 * rm00; m10 = m10 * rm11; m11 = m11 * rm11; m12 = m12 * rm11; m13 = m13 * rm11; m20 = m20 * rm22; m21 = m21 * rm22; m22 = m22 * rm22; m23 = m23 * rm22; return this; } /** * Set this matrix to be an orthographic projection transformation. * <p> * Reference: <a href="http://www.songho.ca/opengl/gl_projectionmatrix.html">http://www.songho.ca</a> * * @param left * the distance from the center to the left frustum edge * @param right * the distance from the center to the right frustum edge * @param bottom * the distance from the center to the bottom frustum edge * @param top * the distance from the center to the top frustum edge * @param zNear * near clipping plane distance * @param zFar * far clipping plane distance * @return this */ public Matrix4f setOrtho(float left, float right, float bottom, float top, float zNear, float zFar) { m00 = 2.0f / (right - left); m01 = 0.0f; m02 = 0.0f; m03 = 0.0f; m10 = 0.0f; m11 = 2.0f / (top - bottom); m12 = 0.0f; m13 = 0.0f; m20 = 0.0f; m21 = 0.0f; m22 = -2.0f / (zFar - zNear); m23 = 0.0f; m30 = -(right + left) / (right - left); m31 = -(top + bottom) / (top - bottom); m32 = -(zFar + zNear) / (zFar - zNear); m33 = 1.0f; return this; } /** * Apply a rotation transformation to this matrix to make <code>-z</code> point along <code>dir</code>. * <p> * If <code>M</code> is <code>this</code> matrix and <code>L</code> the lookalong rotation matrix, * then the new matrix will be <code>M * L</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * L * v</code> * , the lookalong rotation transformation will be applied first! * * @param dir * the direction in space to look along * @param up * the direction of 'up' * @return this */ public Matrix4f lookAlong(Vector3f dir, Vector3f up) { return lookAlong(dir.x, dir.y, dir.z, up.x, up.y, up.z); } /** * Apply a rotation transformation to this matrix to make <code>-z</code> point along <code>dir</code>. * <p> * If <code>M</code> is <code>this</code> matrix and <code>L</code> the lookalong rotation matrix, * then the new matrix will be <code>M * L</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * L * v</code> * , the lookalong rotation transformation will be applied first! * <p> * @return this */ public Matrix4f lookAlong(float dirX, float dirY, float dirZ, float upX, float upY, float upZ) { // Normalize direction float dirLength = (float) Math.sqrt(dirX * dirX + dirY * dirY + dirZ * dirZ); dirX /= dirLength; dirY /= dirLength; dirZ /= dirLength; float upLength = (float) Math.sqrt(upX * upX + upY * upY + upZ * upZ); upX /= upLength; upY /= upLength; upZ /= upLength; // right = direction x up float rightX, rightY, rightZ; rightX = dirY * upZ - dirZ * upY; rightY = dirZ * upX - dirX * upZ; rightZ = dirX * upY - dirY * upX; // up = right x direction upX = rightY * dirZ - rightZ * dirY; upY = rightZ * dirX - rightX * dirZ; upZ = rightX * dirY - rightY * dirX; // calculate right matrix elements float rm00 = rightX; float rm01 = upX; float rm02 = -dirX; float rm10 = rightY; float rm11 = upY; float rm12 = -dirY; float rm20 = rightZ; float rm21 = upZ; float rm22 = -dirZ; // perform optimized matrix multiplication // introduce temporaries for dependent results float m00 = this.m00 * rm00 + m10 * rm01 + m20 * rm02; float m01 = this.m01 * rm00 + m11 * rm01 + m21 * rm02; float m02 = this.m02 * rm00 + m12 * rm01 + m22 * rm02; float m03 = this.m03 * rm00 + m13 * rm01 + m23 * rm02; float m10 = this.m00 * rm10 + this.m10 * rm11 + m20 * rm12; float m11 = this.m01 * rm10 + this.m11 * rm11 + m21 * rm12; float m12 = this.m02 * rm10 + this.m12 * rm11 + m22 * rm12; float m13 = this.m03 * rm10 + this.m13 * rm11 + m23 * rm12; m20 = this.m00 * rm20 + this.m10 * rm21 + this.m20 * rm22; m21 = this.m01 * rm20 + this.m11 * rm21 + this.m21 * rm22; m22 = this.m02 * rm20 + this.m12 * rm21 + this.m22 * rm22; m23 = this.m03 * rm20 + this.m13 * rm21 + this.m23 * rm22; // set the rest of the matrix elements this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m03 = m03; this.m10 = m10; this.m11 = m11; this.m12 = m12; this.m13 = m13; return this; } /** * Set this matrix to be a "lookat" transformation. * * @param eye * the position of the camera * @param center * the point in space to look at * @param up * the direction of 'up' * @return this */ public Matrix4f setLookAt(Vector3f eye, Vector3f center, Vector3f up) { return setLookAt(eye.x, eye.y, eye.z, center.x, center.y, center.z, up.x, up.y, up.z); } /** * Set this matrix to be a "lookat" transformation. * <p> * @return this */ public Matrix4f setLookAt(float eyeX, float eyeY, float eyeZ, float centerX, float centerY, float centerZ, float upX, float upY, float upZ) { // Compute direction from position to lookAt float dirX, dirY, dirZ; dirX = centerX - eyeX; dirY = centerY - eyeY; dirZ = centerZ - eyeZ; // Normalize direction float dirLength = (float) Math.sqrt( (eyeX - centerX) * (eyeX - centerX) + (eyeY - centerY) * (eyeY - centerY) + (eyeZ - centerZ) * (eyeZ - centerZ)); dirX /= dirLength; dirY /= dirLength; dirZ /= dirLength; // Normalize up float upLength = (float) Math.sqrt(upX * upX + upY * upY + upZ * upZ); upX /= upLength; upY /= upLength; upZ /= upLength; // right = direction x up float rightX, rightY, rightZ; rightX = dirY * upZ - dirZ * upY; rightY = dirZ * upX - dirX * upZ; rightZ = dirX * upY - dirY * upX; // up = right x direction upX = rightY * dirZ - rightZ * dirY; upY = rightZ * dirX - rightX * dirZ; upZ = rightX * dirY - rightY * dirX; m00 = rightX; m01 = upX; m02 = -dirX; m03 = 0.0f; m10 = rightY; m11 = upY; m12 = -dirY; m13 = 0.0f; m20 = rightZ; m21 = upZ; m22 = -dirZ; m23 = 0.0f; m30 = -rightX * eyeX - rightY * eyeY - rightZ * eyeZ; m31 = -upX * eyeX - upY * eyeY - upZ * eyeZ; m32 = dirX * eyeX + dirY * eyeY + dirZ * eyeZ; m33 = 0.0f; return this; } /** * Apply a "lookat" transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>L</code> the lookat matrix, * then the new matrix will be <code>M * L</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * L * v</code> * , the lookat transformation will be applied first! * * @param eye * the position of the camera * @param center * the point in space to look at * @param up * the direction of 'up' * @return this */ public Matrix4f lookAt(Vector3f eye, Vector3f center, Vector3f up) { return lookAt(eye.x, eye.y, eye.z, center.x, center.y, center.z, up.x, up.y, up.z); } /** * Apply a "lookat" transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>L</code> the lookat matrix, * then the new matrix will be <code>M * L</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * L * v</code> * , the lookat transformation will be applied first! * <p> * @return this */ public Matrix4f lookAt(float eyeX, float eyeY, float eyeZ, float centerX, float centerY, float centerZ, float upX, float upY, float upZ) { // Compute direction from position to lookAt float dirX, dirY, dirZ; dirX = centerX - eyeX; dirY = centerY - eyeY; dirZ = centerZ - eyeZ; // Normalize direction float dirLength = (float) Math.sqrt( (eyeX - centerX) * (eyeX - centerX) + (eyeY - centerY) * (eyeY - centerY) + (eyeZ - centerZ) * (eyeZ - centerZ)); dirX /= dirLength; dirY /= dirLength; dirZ /= dirLength; // Normalize up float upLength = (float) Math.sqrt(upX * upX + upY * upY + upZ * upZ); upX /= upLength; upY /= upLength; upZ /= upLength; // right = direction x up float rightX, rightY, rightZ; rightX = dirY * upZ - dirZ * upY; rightY = dirZ * upX - dirX * upZ; rightZ = dirX * upY - dirY * upX; // up = right x direction upX = rightY * dirZ - rightZ * dirY; upY = rightZ * dirX - rightX * dirZ; upZ = rightX * dirY - rightY * dirX; // calculate right matrix elements float rm00 = rightX; float rm01 = upX; float rm02 = -dirX; float rm10 = rightY; float rm11 = upY; float rm12 = -dirY; float rm20 = rightZ; float rm21 = upZ; float rm22 = -dirZ; float rm30 = -rightX * eyeX - rightY * eyeY - rightZ * eyeZ; float rm31 = -upX * eyeX - upY * eyeY - upZ * eyeZ; float rm32 = dirX * eyeX + dirY * eyeY + dirZ * eyeZ; // perform optimized matrix multiplication // compute last column first, because others do not depend on it m30 = m00 * rm30 + m10 * rm31 + m20 * rm32 + m30; m31 = m01 * rm30 + m11 * rm31 + m21 * rm32 + m31; m32 = m02 * rm30 + m12 * rm31 + m22 * rm32 + m32; m33 = m03 * rm30 + m13 * rm31 + m23 * rm32 + m33; // introduce temporaries for dependent results float m00 = this.m00 * rm00 + m10 * rm01 + m20 * rm02; float m01 = this.m01 * rm00 + m11 * rm01 + m21 * rm02; float m02 = this.m02 * rm00 + m12 * rm01 + m22 * rm02; float m03 = this.m03 * rm00 + m13 * rm01 + m23 * rm02; float m10 = this.m00 * rm10 + this.m10 * rm11 + m20 * rm12; float m11 = this.m01 * rm10 + this.m11 * rm11 + m21 * rm12; float m12 = this.m02 * rm10 + this.m12 * rm11 + m22 * rm12; float m13 = this.m03 * rm10 + this.m13 * rm11 + m23 * rm12; m20 = this.m00 * rm20 + this.m10 * rm21 + this.m20 * rm22; m21 = this.m01 * rm20 + this.m11 * rm21 + this.m21 * rm22; m22 = this.m02 * rm20 + this.m12 * rm21 + this.m22 * rm22; m23 = this.m03 * rm20 + this.m13 * rm21 + this.m23 * rm22; // set the rest of the matrix elements this.m00 = m00; this.m01 = m01; this.m02 = m02; this.m03 = m03; this.m10 = m10; this.m11 = m11; this.m12 = m12; this.m13 = m13; return this; } /** * Apply a symmetric perspective projection frustum transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>P</code> the perspective projection matrix, * then the new matrix will be <code>M * P</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * P * v</code> * , the perspective projection will be applied first! * <p> * @param fovy * the vertical field of view in degrees * @param aspect * the aspect ratio (i.e. width / height) * @param zNear * near clipping plane distance * @param zFar * far clipping plane distance * @return this */ public Matrix4f perspective(float fovy, float aspect, float zNear, float zFar) { float h = (float) Math.tan(Math.toRadians(fovy) * 0.5f) * zNear; float w = h * aspect; float fl = -w; float fr = +w; float fb = -h; float ft = +h; return frustum(fl, fr, fb, ft, zNear, zFar); } /** * Set this matrix to be a symmetric perspective projection frustum transformation. * <p> * @param fovy * the vertical field of view in degrees * @param aspect * the aspect ratio (i.e. width / height) * @param zNear * near clipping plane distance * @param zFar * far clipping plane distance * @return this */ public Matrix4f setPerspective(float fovy, float aspect, float zNear, float zFar) { float h = (float) Math.tan(Math.toRadians(fovy) * 0.5f) * zNear; float w = h * aspect; float fl = -w; float fr = +w; float fb = -h; float ft = +h; return setFrustum(fl, fr, fb, ft, zNear, zFar); } /** * Apply an arbitrary perspective projection frustum transformation to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>F</code> the frustum matrix, * then the new matrix will be <code>M * F</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * F * v</code> * , the frustum transformation will be applied first! * <p> * Reference: <a href="http://www.songho.ca/opengl/gl_projectionmatrix.html">http://www.songho.ca</a> * * @param left * the distance along the x-axis to the left frustum edge * @param right * the distance along the x-axis to the right frustum edge * @param bottom * the distance along the y-axis to the bottom frustum edge * @param top * the distance along the y-axis to the top frustum edge * @param zNear * the distance along the z-axis to the near clipping plane * @param zFar * the distance along the z-axis to the far clipping plane * @return this */ public Matrix4f frustum(float left, float right, float bottom, float top, float zNear, float zFar) { // calculate right matrix elements float rm00 = 2.0f * zNear / (right - left); float rm11 = 2.0f * zNear / (top - bottom); float rm20 = (right + left) / (right - left); float rm21 = (top + bottom) / (top - bottom); float rm22 = -(zFar + zNear) / (zFar - zNear); float rm32 = -2.0f * zFar * zNear / (zFar - zNear); // perform optimized matrix multiplication float m20 = m00 * rm20 + m10 * rm21 + this.m20 * rm22 - m30; float m21 = m01 * rm20 + m11 * rm21 + this.m21 * rm22 - m31; float m22 = m02 * rm20 + m12 * rm21 + this.m22 * rm22 - m32; float m23 = m03 * rm20 + m13 * rm21 + this.m23 * rm22 - m33; m00 = m00 * rm00; m01 = m01 * rm00; m02 = m02 * rm00; m03 = m03 * rm00; m10 = m10 * rm11; m11 = m11 * rm11; m12 = m12 * rm11; m13 = m13 * rm11; m30 = this.m20 * rm32; m31 = this.m21 * rm32; m32 = this.m22 * rm32; m33 = this.m23 * rm32; this.m20 = m20; this.m21 = m21; this.m22 = m22; this.m23 = m23; return this; } /** * Set this matrix to be an arbitrary perspective projection frustum transformation. * <p> * Reference: <a href="http://www.songho.ca/opengl/gl_projectionmatrix.html">http://www.songho.ca</a> * * @param left * the distance along the x-axis to the left frustum edge * @param right * the distance along the x-axis to the right frustum edge * @param bottom * the distance along the y-axis to the bottom frustum edge * @param top * the distance along the y-axis to the top frustum edge * @param zNear * the distance along the z-axis to the near clipping plane * @param zFar * the distance along the z-axis to the far clipping plane * @return this */ public Matrix4f setFrustum(float left, float right, float bottom, float top, float zNear, float zFar) { // calculate right matrix elements m00 = 2.0f * zNear / (right - left); m01 = 0.0f; m02 = 0.0f; m03 = 0.0f; m10 = 0.0f; m11 = 2.0f * zNear / (top - bottom); m12 = 0.0f; m13 = 0.0f; m20 = (right + left) / (right - left); m21 = (top + bottom) / (top - bottom); m22 = -(zFar + zNear) / (zFar - zNear); m23 = 0.0f; m30 = 0.0f; m31 = 0.0f; m32 = -2.0f * zFar * zNear / (zFar - zNear); m33 = 1.0f; return this; } /** * Apply the rotation transformation of the given {@link Quaternion} to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>Q</code> the rotation matrix obtained from the given quaternion, * then the new matrix will be <code>M * Q</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * Q * v</code> * , the quaternion rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Quaternion">http://en.wikipedia.org</a> * * @param quat * the {@link Quaternion} * @return this */ public Matrix4f rotate(Quaternion quat) { float q00 = 2.0f * quat.x * quat.x; float q11 = 2.0f * quat.y * quat.y; float q22 = 2.0f * quat.z * quat.z; float q01 = 2.0f * quat.x * quat.y; float q02 = 2.0f * quat.x * quat.z; float q03 = 2.0f * quat.x * quat.w; float q12 = 2.0f * quat.y * quat.z; float q13 = 2.0f * quat.y * quat.w; float q23 = 2.0f * quat.z * quat.w; float rm00 = 1.0f - q11 - q22; float rm01 = q01 + q23; float rm02 = q02 - q13; float rm10 = q01 - q23; float rm11 = 1.0f - q22 - q00; float rm12 = q12 + q03; float rm20 = q02 + q13; float rm21 = q12 - q03; float rm22 = 1.0f - q11 - q00; float nm00 = m00 * rm00 + m10 * rm01 + m20 * rm02; float nm01 = m01 * rm00 + m11 * rm01 + m21 * rm02; float nm02 = m02 * rm00 + m12 * rm01 + m22 * rm02; float nm03 = m03 * rm00 + m13 * rm01 + m23 * rm02; float nm10 = m00 * rm10 + m10 * rm11 + m20 * rm12; float nm11 = m01 * rm10 + m11 * rm11 + m21 * rm12; float nm12 = m02 * rm10 + m12 * rm11 + m22 * rm12; float nm13 = m03 * rm10 + m13 * rm11 + m23 * rm12; m20 = m00 * rm20 + m10 * rm21 + m20 * rm22; m21 = m01 * rm20 + m11 * rm21 + m21 * rm22; m22 = m02 * rm20 + m12 * rm21 + m22 * rm22; m23 = m03 * rm20 + m13 * rm21 + m23 * rm22; this.m00 = nm00; this.m01 = nm01; this.m02 = nm02; this.m03 = nm03; this.m10 = nm10; this.m11 = nm11; this.m12 = nm12; this.m13 = nm13; return this; } /** * Apply the rotation transformation of the given {@link AngleAxis4f} to this matrix. * <p> * If <code>M</code> is <code>this</code> matrix and <code>A</code> the rotation matrix obtained from the given angle-axis, * then the new matrix will be <code>M * A</code>. So when transforming a * vector <code>v</code> with the new matrix by using <code>M * A * v</code> * , the angle-axis rotation will be applied first! * <p> * Reference: <a href="http://en.wikipedia.org/wiki/Rotation_matrix#Axis_and_angle">http://en.wikipedia.org</a> * * @param axisAngle * the {@link AngleAxis4f} (needs to be {@link AngleAxis4f#normalize() normalized}) * @return this */ public Matrix4f rotate(AngleAxis4f axisAngle) { return rotate(axisAngle.angle, axisAngle.x, axisAngle.y, axisAngle.z); } }
Fix setLookAt
src/com/joml/Matrix4f.java
Fix setLookAt
<ide><path>rc/com/joml/Matrix4f.java <ide> m30 = -rightX * eyeX - rightY * eyeY - rightZ * eyeZ; <ide> m31 = -upX * eyeX - upY * eyeY - upZ * eyeZ; <ide> m32 = dirX * eyeX + dirY * eyeY + dirZ * eyeZ; <del> m33 = 0.0f; <add> m33 = 1.0f; <ide> <ide> return this; <ide> }
JavaScript
mit
7d8eeb5059a9f39931573913332f9785172f5a64
0
akiran/react-slick
'use strict'; import {getTrackCSS, getTrackLeft, getTrackAnimateCSS} from './trackHelper'; import helpers from './helpers'; import assign from 'object-assign'; import ReactDOM from 'react-dom'; var EventHandlers = { // Event handler for previous and next changeSlide: function (options) { var indexOffset, previousInt, slideOffset, unevenOffset, targetSlide; const {slidesToScroll, slidesToShow} = this.props const {slideCount, currentSlide} = this.state unevenOffset = (slideCount % slidesToScroll !== 0); indexOffset = unevenOffset ? 0 : (slideCount - currentSlide) % slidesToScroll; if (options.message === 'previous') { slideOffset = (indexOffset === 0) ? slidesToScroll : slidesToShow - indexOffset; targetSlide = currentSlide - slideOffset; if (this.props.lazyLoad && !this.props.infinite) { previousInt = currentSlide - slideOffset; targetSlide = previousInt === -1 ? slideCount -1 : previousInt; } } else if (options.message === 'next') { slideOffset = (indexOffset === 0) ? slidesToScroll : indexOffset; targetSlide = currentSlide + slideOffset; if (this.props.lazyLoad && !this.props.infinite) { targetSlide = ((currentSlide + slidesToScroll) % slideCount) + indexOffset; } } else if (options.message === 'dots' || options.message === 'children') { // Click on dots targetSlide = options.index * options.slidesToScroll; if (targetSlide === options.currentSlide) { return; } } else if (options.message === 'index') { targetSlide = parseInt(options.index); if (targetSlide === options.currentSlide) { return; } } this.slideHandler(targetSlide); }, // Accessiblity handler for previous and next keyHandler: function (e) { //Dont slide if the cursor is inside the form fields and arrow keys are pressed if(!e.target.tagName.match('TEXTAREA|INPUT|SELECT')) { if (e.keyCode === 37 && this.props.accessibility === true) { this.changeSlide({ message: this.props.rtl === true ? 'next' : 'previous' }); } else if (e.keyCode === 39 && this.props.accessibility === true) { this.changeSlide({ message: this.props.rtl === true ? 'previous' : 'next' }); } } }, // Focus on selecting a slide (click handler on track) selectHandler: function (options) { this.changeSlide(options) }, swipeStart: function (e) { var touches, posX, posY; if ((this.props.swipe === false) || ('ontouchend' in document && this.props.swipe === false)) { return; } else if (this.props.draggable === false && e.type.indexOf('mouse') !== -1) { return; } posX = (e.touches !== undefined) ? e.touches[0].pageX : e.clientX; posY = (e.touches !== undefined) ? e.touches[0].pageY : e.clientY; this.setState({ dragging: true, touchObject: { startX: posX, startY: posY, curX: posX, curY: posY } }); }, swipeMove: function (e) { if (!this.state.dragging) { e.preventDefault(); return; } if (this.state.animating) { return; } if (this.props.vertical && this.props.swipeToSlide && this.props.verticalSwiping) { e.preventDefault(); } var swipeLeft; var curLeft, positionOffset; var touchObject = this.state.touchObject; curLeft = getTrackLeft(assign({ slideIndex: this.state.currentSlide, trackRef: this.track }, this.props, this.state)); touchObject.curX = (e.touches) ? e.touches[0].pageX : e.clientX; touchObject.curY = (e.touches) ? e.touches[0].pageY : e.clientY; touchObject.swipeLength = Math.round(Math.sqrt(Math.pow(touchObject.curX - touchObject.startX, 2))); if (this.props.verticalSwiping) { touchObject.swipeLength = Math.round(Math.sqrt(Math.pow(touchObject.curY - touchObject.startY, 2))); } positionOffset = (this.props.rtl === false ? 1 : -1) * (touchObject.curX > touchObject.startX ? 1 : -1); if (this.props.verticalSwiping) { positionOffset = touchObject.curY > touchObject.startY ? 1 : -1; } var currentSlide = this.state.currentSlide; var dotCount = Math.ceil(this.state.slideCount / this.props.slidesToScroll); var swipeDirection = this.swipeDirection(this.state.touchObject); var touchSwipeLength = touchObject.swipeLength; if (this.props.infinite === false) { if ((currentSlide === 0 && swipeDirection === 'right') || (currentSlide + 1 >= dotCount && swipeDirection === 'left')) { touchSwipeLength = touchObject.swipeLength * this.props.edgeFriction; if (this.state.edgeDragged === false && this.props.edgeEvent) { this.props.edgeEvent(swipeDirection); this.setState({ edgeDragged: true }); } } } if (this.state.swiped === false && this.props.swipeEvent) { this.props.swipeEvent(swipeDirection); this.setState({ swiped: true }); } if (!this.props.vertical) { swipeLeft = curLeft + touchSwipeLength * positionOffset; } else { swipeLeft = curLeft + (touchSwipeLength * (this.state.listHeight / this.state.listWidth)) * positionOffset; } if (this.props.verticalSwiping) { swipeLeft = curLeft + touchSwipeLength * positionOffset; } this.setState({ touchObject: touchObject, swipeLeft: swipeLeft, trackStyle: getTrackCSS(assign({left: swipeLeft}, this.props, this.state)) }); if (Math.abs(touchObject.curX - touchObject.startX) < Math.abs(touchObject.curY - touchObject.startY) * 0.8) { return; } if (touchObject.swipeLength > 4) { e.preventDefault(); } }, getNavigableIndexes() { let max; let breakPoint = 0; let counter = 0; let indexes = []; if (!this.props.infinite) { max = this.state.slideCount; } else { breakPoint = this.props.slidesToShow * -1; counter = this.props.slidesToShow * -1; max = this.state.slideCount * 2; } while (breakPoint < max) { indexes.push(breakPoint); breakPoint = counter + this.props.slidesToScroll; counter += this.props.slidesToScroll <= this.props.slidesToShow ? this.props.slidesToScroll : this.props.slidesToShow; } return indexes; }, checkNavigable(index) { const navigables = this.getNavigableIndexes(); let prevNavigable = 0; if (index > navigables[navigables.length - 1]) { index = navigables[navigables.length - 1]; } else { for (var n in navigables) { if (index < navigables[n]) { index = prevNavigable; break; } prevNavigable = navigables[n]; } } return index; }, getSlideCount() { const centerOffset = this.props.centerMode ? this.state.slideWidth * Math.floor(this.props.slidesToShow / 2) : 0; if (this.props.swipeToSlide) { let swipedSlide; const slickList = ReactDOM.findDOMNode(this.list); const slides = slickList.querySelectorAll('.slick-slide'); Array.from(slides).every((slide) => { if (!this.props.vertical) { if (slide.offsetLeft - centerOffset + (this.getWidth(slide) / 2) > this.state.swipeLeft * -1) { swipedSlide = slide; return false; } } else { if (slide.offsetTop + (this.getHeight(slide) / 2) > this.state.swipeLeft * -1) { swipedSlide = slide; return false; } } return true; }); const slidesTraversed = Math.abs(swipedSlide.dataset.index - this.state.currentSlide) || 1; return slidesTraversed; } else { return this.props.slidesToScroll; } }, swipeEnd: function (e) { if (!this.state.dragging) { if (this.props.swipe) { e.preventDefault(); } return; } var touchObject = this.state.touchObject; var minSwipe = this.state.listWidth/this.props.touchThreshold; var swipeDirection = this.swipeDirection(touchObject); if (this.props.verticalSwiping) { minSwipe = this.state.listHeight/this.props.touchThreshold; } // reset the state of touch related state variables. this.setState({ dragging: false, edgeDragged: false, swiped: false, swipeLeft: null, touchObject: {} }); // Fix for #13 if (!touchObject.swipeLength) { return; } if (touchObject.swipeLength > minSwipe) { e.preventDefault(); let slideCount, newSlide; switch (swipeDirection) { case 'left': case 'down': newSlide = this.state.currentSlide + this.getSlideCount(); slideCount = this.props.swipeToSlide ? this.checkNavigable(newSlide) : newSlide; this.state.currentDirection = 0; break; case 'right': case 'up': newSlide = this.state.currentSlide - this.getSlideCount(); slideCount = this.props.swipeToSlide ? this.checkNavigable(newSlide) : newSlide; this.state.currentDirection = 1; break; default: slideCount = this.state.currentSlide; } this.slideHandler(slideCount); } else { // Adjust the track back to it's original position. var currentLeft = getTrackLeft(assign({ slideIndex: this.state.currentSlide, trackRef: this.track }, this.props, this.state)); this.setState({ trackStyle: getTrackAnimateCSS(assign({left: currentLeft}, this.props, this.state)) }); } }, onInnerSliderEnter: function (e) { if (this.props.autoplay && this.props.pauseOnHover) { this.pause(); } }, onInnerSliderOver: function (e) { if (this.props.autoplay && this.props.pauseOnHover) { this.pause(); } }, onInnerSliderLeave: function (e) { if (this.props.autoplay && this.props.pauseOnHover) { this.autoPlay(); } } }; export default EventHandlers;
src/mixins/event-handlers.js
'use strict'; import {getTrackCSS, getTrackLeft, getTrackAnimateCSS} from './trackHelper'; import helpers from './helpers'; import assign from 'object-assign'; import ReactDOM from 'react-dom'; var EventHandlers = { // Event handler for previous and next changeSlide: function (options) { var indexOffset, previousInt, slideOffset, unevenOffset, targetSlide; const {slidesToScroll, slidesToShow} = this.props const {slideCount, currentSlide} = this.state unevenOffset = (slideCount % slidesToScroll !== 0); indexOffset = unevenOffset ? 0 : (slideCount - currentSlide) % slidesToScroll; if (options.message === 'previous') { slideOffset = (indexOffset === 0) ? slidesToScroll : slidesToShow - indexOffset; targetSlide = currentSlide - slideOffset; if (this.props.lazyLoad) { previousInt = currentSlide - slideOffset; targetSlide = previousInt === -1 ? slideCount -1 : previousInt; } } else if (options.message === 'next') { slideOffset = (indexOffset === 0) ? slidesToScroll : indexOffset; targetSlide = currentSlide + slideOffset; if (this.props.lazyLoad) { targetSlide = ((currentSlide + slidesToScroll) % slideCount) + indexOffset; } } else if (options.message === 'dots' || options.message === 'children') { // Click on dots targetSlide = options.index * options.slidesToScroll; if (targetSlide === options.currentSlide) { return; } } else if (options.message === 'index') { targetSlide = parseInt(options.index); if (targetSlide === options.currentSlide) { return; } } this.slideHandler(targetSlide); }, // Accessiblity handler for previous and next keyHandler: function (e) { //Dont slide if the cursor is inside the form fields and arrow keys are pressed if(!e.target.tagName.match('TEXTAREA|INPUT|SELECT')) { if (e.keyCode === 37 && this.props.accessibility === true) { this.changeSlide({ message: this.props.rtl === true ? 'next' : 'previous' }); } else if (e.keyCode === 39 && this.props.accessibility === true) { this.changeSlide({ message: this.props.rtl === true ? 'previous' : 'next' }); } } }, // Focus on selecting a slide (click handler on track) selectHandler: function (options) { this.changeSlide(options) }, swipeStart: function (e) { var touches, posX, posY; if ((this.props.swipe === false) || ('ontouchend' in document && this.props.swipe === false)) { return; } else if (this.props.draggable === false && e.type.indexOf('mouse') !== -1) { return; } posX = (e.touches !== undefined) ? e.touches[0].pageX : e.clientX; posY = (e.touches !== undefined) ? e.touches[0].pageY : e.clientY; this.setState({ dragging: true, touchObject: { startX: posX, startY: posY, curX: posX, curY: posY } }); }, swipeMove: function (e) { if (!this.state.dragging) { e.preventDefault(); return; } if (this.state.animating) { return; } if (this.props.vertical && this.props.swipeToSlide && this.props.verticalSwiping) { e.preventDefault(); } var swipeLeft; var curLeft, positionOffset; var touchObject = this.state.touchObject; curLeft = getTrackLeft(assign({ slideIndex: this.state.currentSlide, trackRef: this.track }, this.props, this.state)); touchObject.curX = (e.touches) ? e.touches[0].pageX : e.clientX; touchObject.curY = (e.touches) ? e.touches[0].pageY : e.clientY; touchObject.swipeLength = Math.round(Math.sqrt(Math.pow(touchObject.curX - touchObject.startX, 2))); if (this.props.verticalSwiping) { touchObject.swipeLength = Math.round(Math.sqrt(Math.pow(touchObject.curY - touchObject.startY, 2))); } positionOffset = (this.props.rtl === false ? 1 : -1) * (touchObject.curX > touchObject.startX ? 1 : -1); if (this.props.verticalSwiping) { positionOffset = touchObject.curY > touchObject.startY ? 1 : -1; } var currentSlide = this.state.currentSlide; var dotCount = Math.ceil(this.state.slideCount / this.props.slidesToScroll); var swipeDirection = this.swipeDirection(this.state.touchObject); var touchSwipeLength = touchObject.swipeLength; if (this.props.infinite === false) { if ((currentSlide === 0 && swipeDirection === 'right') || (currentSlide + 1 >= dotCount && swipeDirection === 'left')) { touchSwipeLength = touchObject.swipeLength * this.props.edgeFriction; if (this.state.edgeDragged === false && this.props.edgeEvent) { this.props.edgeEvent(swipeDirection); this.setState({ edgeDragged: true }); } } } if (this.state.swiped === false && this.props.swipeEvent) { this.props.swipeEvent(swipeDirection); this.setState({ swiped: true }); } if (!this.props.vertical) { swipeLeft = curLeft + touchSwipeLength * positionOffset; } else { swipeLeft = curLeft + (touchSwipeLength * (this.state.listHeight / this.state.listWidth)) * positionOffset; } if (this.props.verticalSwiping) { swipeLeft = curLeft + touchSwipeLength * positionOffset; } this.setState({ touchObject: touchObject, swipeLeft: swipeLeft, trackStyle: getTrackCSS(assign({left: swipeLeft}, this.props, this.state)) }); if (Math.abs(touchObject.curX - touchObject.startX) < Math.abs(touchObject.curY - touchObject.startY) * 0.8) { return; } if (touchObject.swipeLength > 4) { e.preventDefault(); } }, getNavigableIndexes() { let max; let breakPoint = 0; let counter = 0; let indexes = []; if (!this.props.infinite) { max = this.state.slideCount; } else { breakPoint = this.props.slidesToShow * -1; counter = this.props.slidesToShow * -1; max = this.state.slideCount * 2; } while (breakPoint < max) { indexes.push(breakPoint); breakPoint = counter + this.props.slidesToScroll; counter += this.props.slidesToScroll <= this.props.slidesToShow ? this.props.slidesToScroll : this.props.slidesToShow; } return indexes; }, checkNavigable(index) { const navigables = this.getNavigableIndexes(); let prevNavigable = 0; if (index > navigables[navigables.length - 1]) { index = navigables[navigables.length - 1]; } else { for (var n in navigables) { if (index < navigables[n]) { index = prevNavigable; break; } prevNavigable = navigables[n]; } } return index; }, getSlideCount() { const centerOffset = this.props.centerMode ? this.state.slideWidth * Math.floor(this.props.slidesToShow / 2) : 0; if (this.props.swipeToSlide) { let swipedSlide; const slickList = ReactDOM.findDOMNode(this.list); const slides = slickList.querySelectorAll('.slick-slide'); Array.from(slides).every((slide) => { if (!this.props.vertical) { if (slide.offsetLeft - centerOffset + (this.getWidth(slide) / 2) > this.state.swipeLeft * -1) { swipedSlide = slide; return false; } } else { if (slide.offsetTop + (this.getHeight(slide) / 2) > this.state.swipeLeft * -1) { swipedSlide = slide; return false; } } return true; }); const slidesTraversed = Math.abs(swipedSlide.dataset.index - this.state.currentSlide) || 1; return slidesTraversed; } else { return this.props.slidesToScroll; } }, swipeEnd: function (e) { if (!this.state.dragging) { if (this.props.swipe) { e.preventDefault(); } return; } var touchObject = this.state.touchObject; var minSwipe = this.state.listWidth/this.props.touchThreshold; var swipeDirection = this.swipeDirection(touchObject); if (this.props.verticalSwiping) { minSwipe = this.state.listHeight/this.props.touchThreshold; } // reset the state of touch related state variables. this.setState({ dragging: false, edgeDragged: false, swiped: false, swipeLeft: null, touchObject: {} }); // Fix for #13 if (!touchObject.swipeLength) { return; } if (touchObject.swipeLength > minSwipe) { e.preventDefault(); let slideCount, newSlide; switch (swipeDirection) { case 'left': case 'down': newSlide = this.state.currentSlide + this.getSlideCount(); slideCount = this.props.swipeToSlide ? this.checkNavigable(newSlide) : newSlide; this.state.currentDirection = 0; break; case 'right': case 'up': newSlide = this.state.currentSlide - this.getSlideCount(); slideCount = this.props.swipeToSlide ? this.checkNavigable(newSlide) : newSlide; this.state.currentDirection = 1; break; default: slideCount = this.state.currentSlide; } this.slideHandler(slideCount); } else { // Adjust the track back to it's original position. var currentLeft = getTrackLeft(assign({ slideIndex: this.state.currentSlide, trackRef: this.track }, this.props, this.state)); this.setState({ trackStyle: getTrackAnimateCSS(assign({left: currentLeft}, this.props, this.state)) }); } }, onInnerSliderEnter: function (e) { if (this.props.autoplay && this.props.pauseOnHover) { this.pause(); } }, onInnerSliderOver: function (e) { if (this.props.autoplay && this.props.pauseOnHover) { this.pause(); } }, onInnerSliderLeave: function (e) { if (this.props.autoplay && this.props.pauseOnHover) { this.autoPlay(); } } }; export default EventHandlers;
Update condition to not use lazy load calculation for infinite
src/mixins/event-handlers.js
Update condition to not use lazy load calculation for infinite
<ide><path>rc/mixins/event-handlers.js <ide> if (options.message === 'previous') { <ide> slideOffset = (indexOffset === 0) ? slidesToScroll : slidesToShow - indexOffset; <ide> targetSlide = currentSlide - slideOffset; <del> if (this.props.lazyLoad) { <add> if (this.props.lazyLoad && !this.props.infinite) { <ide> previousInt = currentSlide - slideOffset; <ide> targetSlide = previousInt === -1 ? slideCount -1 : previousInt; <ide> } <ide> } else if (options.message === 'next') { <ide> slideOffset = (indexOffset === 0) ? slidesToScroll : indexOffset; <ide> targetSlide = currentSlide + slideOffset; <del> if (this.props.lazyLoad) { <add> if (this.props.lazyLoad && !this.props.infinite) { <ide> targetSlide = ((currentSlide + slidesToScroll) % slideCount) + indexOffset; <ide> } <ide> } else if (options.message === 'dots' || options.message === 'children') {
Java
apache-2.0
9f1e32f11292bdab5414c0874917a7466e2d2fdf
0
google/ExoPlayer,google/ExoPlayer,saki4510t/ExoPlayer,androidx/media,saki4510t/ExoPlayer,stari4ek/ExoPlayer,google/ExoPlayer,androidx/media,amzn/exoplayer-amazon-port,ened/ExoPlayer,androidx/media,stari4ek/ExoPlayer,amzn/exoplayer-amazon-port,superbderrick/ExoPlayer,superbderrick/ExoPlayer,saki4510t/ExoPlayer,stari4ek/ExoPlayer,ened/ExoPlayer,amzn/exoplayer-amazon-port,ened/ExoPlayer,superbderrick/ExoPlayer
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.trackselection; import android.support.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayerFactory; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; import com.google.android.exoplayer2.upstream.BandwidthMeter; import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.Util; import java.util.List; import org.checkerframework.checker.nullness.compatqual.NullableType; /** * A bandwidth based adaptive {@link TrackSelection}, whose selected track is updated to be the one * of highest quality given the current network conditions and the state of the buffer. */ public class AdaptiveTrackSelection extends BaseTrackSelection { /** * Factory for {@link AdaptiveTrackSelection} instances. */ public static final class Factory implements TrackSelection.Factory { private final @Nullable BandwidthMeter bandwidthMeter; private final int minDurationForQualityIncreaseMs; private final int maxDurationForQualityDecreaseMs; private final int minDurationToRetainAfterDiscardMs; private final float bandwidthFraction; private final float bufferedFractionToLiveEdgeForQualityIncrease; private final long minTimeBetweenBufferReevaluationMs; private final Clock clock; private TrackBitrateEstimator trackBitrateEstimator; private boolean blockFixedTrackSelectionBandwidth; /** Creates an adaptive track selection factory with default parameters. */ public Factory() { this( DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, DEFAULT_BANDWIDTH_FRACTION, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * @deprecated Use {@link #Factory()} instead. Custom bandwidth meter should be directly passed * to the player in {@link ExoPlayerFactory}. */ @Deprecated @SuppressWarnings("deprecation") public Factory(BandwidthMeter bandwidthMeter) { this( bandwidthMeter, DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, DEFAULT_BANDWIDTH_FRACTION, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * Creates an adaptive track selection factory. * * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher * quality, the selection may indicate that media already buffered at the lower quality can * be discarded to speed up the switch. This is the minimum duration of media that must be * retained at the lower quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. */ public Factory( int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction) { this( minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bandwidthFraction, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * @deprecated Use {@link #Factory(int, int, int, float)} instead. Custom bandwidth meter should * be directly passed to the player in {@link ExoPlayerFactory}. */ @Deprecated @SuppressWarnings("deprecation") public Factory( BandwidthMeter bandwidthMeter, int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction) { this( bandwidthMeter, minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bandwidthFraction, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * Creates an adaptive track selection factory. * * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher * quality, the selection may indicate that media already buffered at the lower quality can * be discarded to speed up the switch. This is the minimum duration of media that must be * retained at the lower quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. * @param bufferedFractionToLiveEdgeForQualityIncrease For live streaming, the fraction of the * duration from current playback position to the live edge that has to be buffered before * the selected track can be switched to one of higher quality. This parameter is only * applied when the playback position is closer to the live edge than {@code * minDurationForQualityIncreaseMs}, which would otherwise prevent switching to a higher * quality from happening. * @param minTimeBetweenBufferReevaluationMs The track selection may periodically reevaluate its * buffer and discard some chunks of lower quality to improve the playback quality if * network conditions have changed. This is the minimum duration between 2 consecutive * buffer reevaluation calls. * @param clock A {@link Clock}. */ @SuppressWarnings("deprecation") public Factory( int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, long minTimeBetweenBufferReevaluationMs, Clock clock) { this( /* bandwidthMeter= */ null, minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bandwidthFraction, bufferedFractionToLiveEdgeForQualityIncrease, minTimeBetweenBufferReevaluationMs, clock); } /** * @deprecated Use {@link #Factory(int, int, int, float, float, long, Clock)} instead. Custom * bandwidth meter should be directly passed to the player in {@link ExoPlayerFactory}. */ @Deprecated public Factory( @Nullable BandwidthMeter bandwidthMeter, int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, long minTimeBetweenBufferReevaluationMs, Clock clock) { this.bandwidthMeter = bandwidthMeter; this.minDurationForQualityIncreaseMs = minDurationForQualityIncreaseMs; this.maxDurationForQualityDecreaseMs = maxDurationForQualityDecreaseMs; this.minDurationToRetainAfterDiscardMs = minDurationToRetainAfterDiscardMs; this.bandwidthFraction = bandwidthFraction; this.bufferedFractionToLiveEdgeForQualityIncrease = bufferedFractionToLiveEdgeForQualityIncrease; this.minTimeBetweenBufferReevaluationMs = minTimeBetweenBufferReevaluationMs; this.clock = clock; trackBitrateEstimator = TrackBitrateEstimator.DEFAULT; } /** * Sets a TrackBitrateEstimator. * * <p>This method is experimental, and will be renamed or removed in a future release. * * @param trackBitrateEstimator A {@link TrackBitrateEstimator}. */ public void experimental_setTrackBitrateEstimator(TrackBitrateEstimator trackBitrateEstimator) { this.trackBitrateEstimator = trackBitrateEstimator; } /** * Enables blocking of the total fixed track selection bandwidth. * * <p>This method is experimental, and will be renamed or removed in a future release. */ public void experimental_enableBlockFixedTrackSelectionBandwidth() { this.blockFixedTrackSelectionBandwidth = true; } @Override public AdaptiveTrackSelection createTrackSelection( TrackGroup group, BandwidthMeter bandwidthMeter, int... tracks) { if (this.bandwidthMeter != null) { bandwidthMeter = this.bandwidthMeter; } AdaptiveTrackSelection adaptiveTrackSelection = new AdaptiveTrackSelection( group, tracks, new DefaultBandwidthProvider(bandwidthMeter, bandwidthFraction), minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bufferedFractionToLiveEdgeForQualityIncrease, minTimeBetweenBufferReevaluationMs, clock); adaptiveTrackSelection.experimental_setTrackBitrateEstimator(trackBitrateEstimator); return adaptiveTrackSelection; } @Override public @NullableType TrackSelection[] createTrackSelections( @NullableType Definition[] definitions, BandwidthMeter bandwidthMeter) { TrackSelection[] selections = new TrackSelection[definitions.length]; AdaptiveTrackSelection adaptiveSelection = null; int totalFixedBandwidth = 0; for (int i = 0; i < definitions.length; i++) { Definition definition = definitions[i]; if (definition == null) { continue; } if (definition.tracks.length > 1) { selections[i] = createTrackSelection(definition.group, bandwidthMeter, definition.tracks); adaptiveSelection = (AdaptiveTrackSelection) selections[i]; } else { selections[i] = new FixedTrackSelection(definition.group, definition.tracks[0]); int trackBitrate = definition.group.getFormat(definition.tracks[0]).bitrate; if (trackBitrate != Format.NO_VALUE) { totalFixedBandwidth += trackBitrate; } } } if (blockFixedTrackSelectionBandwidth && adaptiveSelection != null) { adaptiveSelection.experimental_setNonAllocatableBandwidth(totalFixedBandwidth); } return selections; } } public static final int DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS = 10000; public static final int DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS = 25000; public static final int DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS = 25000; public static final float DEFAULT_BANDWIDTH_FRACTION = 0.75f; public static final float DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE = 0.75f; public static final long DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS = 2000; private final BandwidthProvider bandwidthProvider; private final long minDurationForQualityIncreaseUs; private final long maxDurationForQualityDecreaseUs; private final long minDurationToRetainAfterDiscardUs; private final float bufferedFractionToLiveEdgeForQualityIncrease; private final long minTimeBetweenBufferReevaluationMs; private final Clock clock; private final Format[] formats; private final int[] formatBitrates; private final int[] trackBitrates; private TrackBitrateEstimator trackBitrateEstimator; private float playbackSpeed; private int selectedIndex; private int reason; private long lastBufferEvaluationMs; /** * @param group The {@link TrackGroup}. * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be * empty. May be in any order. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. */ public AdaptiveTrackSelection(TrackGroup group, int[] tracks, BandwidthMeter bandwidthMeter) { this( group, tracks, bandwidthMeter, DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, DEFAULT_BANDWIDTH_FRACTION, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * @param group The {@link TrackGroup}. * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be * empty. May be in any order. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher * quality, the selection may indicate that media already buffered at the lower quality can be * discarded to speed up the switch. This is the minimum duration of media that must be * retained at the lower quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. * @param bufferedFractionToLiveEdgeForQualityIncrease For live streaming, the fraction of the * duration from current playback position to the live edge that has to be buffered before the * selected track can be switched to one of higher quality. This parameter is only applied * when the playback position is closer to the live edge than {@code * minDurationForQualityIncreaseMs}, which would otherwise prevent switching to a higher * quality from happening. * @param minTimeBetweenBufferReevaluationMs The track selection may periodically reevaluate its * buffer and discard some chunks of lower quality to improve the playback quality if network * condition has changed. This is the minimum duration between 2 consecutive buffer * reevaluation calls. */ public AdaptiveTrackSelection( TrackGroup group, int[] tracks, BandwidthMeter bandwidthMeter, long minDurationForQualityIncreaseMs, long maxDurationForQualityDecreaseMs, long minDurationToRetainAfterDiscardMs, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, long minTimeBetweenBufferReevaluationMs, Clock clock) { this( group, tracks, new DefaultBandwidthProvider(bandwidthMeter, bandwidthFraction), minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bufferedFractionToLiveEdgeForQualityIncrease, minTimeBetweenBufferReevaluationMs, clock); } private AdaptiveTrackSelection( TrackGroup group, int[] tracks, BandwidthProvider bandwidthProvider, long minDurationForQualityIncreaseMs, long maxDurationForQualityDecreaseMs, long minDurationToRetainAfterDiscardMs, float bufferedFractionToLiveEdgeForQualityIncrease, long minTimeBetweenBufferReevaluationMs, Clock clock) { super(group, tracks); this.bandwidthProvider = bandwidthProvider; this.minDurationForQualityIncreaseUs = minDurationForQualityIncreaseMs * 1000L; this.maxDurationForQualityDecreaseUs = maxDurationForQualityDecreaseMs * 1000L; this.minDurationToRetainAfterDiscardUs = minDurationToRetainAfterDiscardMs * 1000L; this.bufferedFractionToLiveEdgeForQualityIncrease = bufferedFractionToLiveEdgeForQualityIncrease; this.minTimeBetweenBufferReevaluationMs = minTimeBetweenBufferReevaluationMs; this.clock = clock; playbackSpeed = 1f; reason = C.SELECTION_REASON_INITIAL; lastBufferEvaluationMs = C.TIME_UNSET; trackBitrateEstimator = TrackBitrateEstimator.DEFAULT; formats = new Format[length]; formatBitrates = new int[length]; trackBitrates = new int[length]; for (int i = 0; i < length; i++) { @SuppressWarnings("nullness:method.invocation.invalid") Format format = getFormat(i); formats[i] = format; formatBitrates[i] = formats[i].bitrate; } @SuppressWarnings("nullness:method.invocation.invalid") int selectedIndex = determineIdealSelectedIndex(Long.MIN_VALUE, formatBitrates); this.selectedIndex = selectedIndex; } /** * Sets a TrackBitrateEstimator. * * <p>This method is experimental, and will be renamed or removed in a future release. * * @param trackBitrateEstimator A {@link TrackBitrateEstimator}. */ public void experimental_setTrackBitrateEstimator(TrackBitrateEstimator trackBitrateEstimator) { this.trackBitrateEstimator = trackBitrateEstimator; } /** * Sets the non-allocatable bandwidth, which shouldn't be considered available. * * <p>This method is experimental, and will be renamed or removed in a future release. * * @param nonAllocatableBandwidth The non-allocatable bandwidth in bits per second. */ public void experimental_setNonAllocatableBandwidth(long nonAllocatableBandwidth) { ((DefaultBandwidthProvider) bandwidthProvider) .experimental_setNonAllocatableBandwidth(nonAllocatableBandwidth); } @Override public void enable() { lastBufferEvaluationMs = C.TIME_UNSET; } @Override public void onPlaybackSpeed(float playbackSpeed) { this.playbackSpeed = playbackSpeed; } @Override public void updateSelectedTrack( long playbackPositionUs, long bufferedDurationUs, long availableDurationUs, List<? extends MediaChunk> queue, MediaChunkIterator[] mediaChunkIterators) { long nowMs = clock.elapsedRealtime(); // Update the estimated track bitrates. trackBitrateEstimator.getBitrates(formats, queue, mediaChunkIterators, trackBitrates); // Stash the current selection, then make a new one. int currentSelectedIndex = selectedIndex; selectedIndex = determineIdealSelectedIndex(nowMs, trackBitrates); if (selectedIndex == currentSelectedIndex) { return; } if (!isBlacklisted(currentSelectedIndex, nowMs)) { // Revert back to the current selection if conditions are not suitable for switching. Format currentFormat = getFormat(currentSelectedIndex); Format selectedFormat = getFormat(selectedIndex); if (selectedFormat.bitrate > currentFormat.bitrate && bufferedDurationUs < minDurationForQualityIncreaseUs(availableDurationUs)) { // The selected track is a higher quality, but we have insufficient buffer to safely switch // up. Defer switching up for now. selectedIndex = currentSelectedIndex; } else if (selectedFormat.bitrate < currentFormat.bitrate && bufferedDurationUs >= maxDurationForQualityDecreaseUs) { // The selected track is a lower quality, but we have sufficient buffer to defer switching // down for now. selectedIndex = currentSelectedIndex; } } // If we adapted, update the trigger. if (selectedIndex != currentSelectedIndex) { reason = C.SELECTION_REASON_ADAPTIVE; } } @Override public int getSelectedIndex() { return selectedIndex; } @Override public int getSelectionReason() { return reason; } @Override public @Nullable Object getSelectionData() { return null; } @Override public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) { long nowMs = clock.elapsedRealtime(); if (!shouldEvaluateQueueSize(nowMs)) { return queue.size(); } lastBufferEvaluationMs = nowMs; if (queue.isEmpty()) { return 0; } int queueSize = queue.size(); MediaChunk lastChunk = queue.get(queueSize - 1); long playoutBufferedDurationBeforeLastChunkUs = Util.getPlayoutDurationForMediaDuration( lastChunk.startTimeUs - playbackPositionUs, playbackSpeed); long minDurationToRetainAfterDiscardUs = getMinDurationToRetainAfterDiscardUs(); if (playoutBufferedDurationBeforeLastChunkUs < minDurationToRetainAfterDiscardUs) { return queueSize; } int idealSelectedIndex = determineIdealSelectedIndex(nowMs, formatBitrates); Format idealFormat = getFormat(idealSelectedIndex); // If the chunks contain video, discard from the first SD chunk beyond // minDurationToRetainAfterDiscardUs whose resolution and bitrate are both lower than the ideal // track. for (int i = 0; i < queueSize; i++) { MediaChunk chunk = queue.get(i); Format format = chunk.trackFormat; long mediaDurationBeforeThisChunkUs = chunk.startTimeUs - playbackPositionUs; long playoutDurationBeforeThisChunkUs = Util.getPlayoutDurationForMediaDuration(mediaDurationBeforeThisChunkUs, playbackSpeed); if (playoutDurationBeforeThisChunkUs >= minDurationToRetainAfterDiscardUs && format.bitrate < idealFormat.bitrate && format.height != Format.NO_VALUE && format.height < 720 && format.width != Format.NO_VALUE && format.width < 1280 && format.height < idealFormat.height) { return i; } } return queueSize; } /** * Called when updating the selected track to determine whether a candidate track can be selected. * * @param format The {@link Format} of the candidate track. * @param trackBitrate The estimated bitrate of the track. May differ from {@link Format#bitrate} * if a more accurate estimate of the current track bitrate is available. * @param playbackSpeed The current playback speed. * @param effectiveBitrate The bitrate available to this selection. * @return Whether this {@link Format} can be selected. */ @SuppressWarnings("unused") protected boolean canSelectFormat( Format format, int trackBitrate, float playbackSpeed, long effectiveBitrate) { return Math.round(trackBitrate * playbackSpeed) <= effectiveBitrate; } /** * Called from {@link #evaluateQueueSize(long, List)} to determine whether an evaluation should be * performed. * * @param nowMs The current value of {@link Clock#elapsedRealtime()}. * @return Whether an evaluation should be performed. */ protected boolean shouldEvaluateQueueSize(long nowMs) { return lastBufferEvaluationMs == C.TIME_UNSET || nowMs - lastBufferEvaluationMs >= minTimeBetweenBufferReevaluationMs; } /** * Called from {@link #evaluateQueueSize(long, List)} to determine the minimum duration of buffer * to retain after discarding chunks. * * @return The minimum duration of buffer to retain after discarding chunks, in microseconds. */ protected long getMinDurationToRetainAfterDiscardUs() { return minDurationToRetainAfterDiscardUs; } /** * Computes the ideal selected index ignoring buffer health. * * @param nowMs The current time in the timebase of {@link Clock#elapsedRealtime()}, or {@link * Long#MIN_VALUE} to ignore blacklisting. * @param trackBitrates The estimated track bitrates. May differ from format bitrates if more * accurate estimates of the current track bitrates are available. */ private int determineIdealSelectedIndex(long nowMs, int[] trackBitrates) { long effectiveBitrate = bandwidthProvider.getAllocatedBandwidth(); int lowestBitrateNonBlacklistedIndex = 0; for (int i = 0; i < length; i++) { if (nowMs == Long.MIN_VALUE || !isBlacklisted(i, nowMs)) { Format format = getFormat(i); if (canSelectFormat(format, trackBitrates[i], playbackSpeed, effectiveBitrate)) { return i; } else { lowestBitrateNonBlacklistedIndex = i; } } } return lowestBitrateNonBlacklistedIndex; } private long minDurationForQualityIncreaseUs(long availableDurationUs) { boolean isAvailableDurationTooShort = availableDurationUs != C.TIME_UNSET && availableDurationUs <= minDurationForQualityIncreaseUs; return isAvailableDurationTooShort ? (long) (availableDurationUs * bufferedFractionToLiveEdgeForQualityIncrease) : minDurationForQualityIncreaseUs; } /** Provides the allocated bandwidth. */ private interface BandwidthProvider { /** Returns the allocated bitrate. */ long getAllocatedBandwidth(); } private static final class DefaultBandwidthProvider implements BandwidthProvider { private final BandwidthMeter bandwidthMeter; private final float bandwidthFraction; private long nonAllocatableBandwidth; /* package */ DefaultBandwidthProvider(BandwidthMeter bandwidthMeter, float bandwidthFraction) { this.bandwidthMeter = bandwidthMeter; this.bandwidthFraction = bandwidthFraction; } @Override public long getAllocatedBandwidth() { long totalBandwidth = (long) (bandwidthMeter.getBitrateEstimate() * bandwidthFraction); return Math.max(0L, totalBandwidth - nonAllocatableBandwidth); } /* package */ void experimental_setNonAllocatableBandwidth(long nonAllocatableBandwidth) { this.nonAllocatableBandwidth = nonAllocatableBandwidth; } } }
library/core/src/main/java/com/google/android/exoplayer2/trackselection/AdaptiveTrackSelection.java
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.trackselection; import android.support.annotation.Nullable; import com.google.android.exoplayer2.C; import com.google.android.exoplayer2.ExoPlayerFactory; import com.google.android.exoplayer2.Format; import com.google.android.exoplayer2.source.TrackGroup; import com.google.android.exoplayer2.source.chunk.MediaChunk; import com.google.android.exoplayer2.source.chunk.MediaChunkIterator; import com.google.android.exoplayer2.upstream.BandwidthMeter; import com.google.android.exoplayer2.util.Clock; import com.google.android.exoplayer2.util.Util; import java.util.List; /** * A bandwidth based adaptive {@link TrackSelection}, whose selected track is updated to be the one * of highest quality given the current network conditions and the state of the buffer. */ public class AdaptiveTrackSelection extends BaseTrackSelection { /** * Factory for {@link AdaptiveTrackSelection} instances. */ public static final class Factory implements TrackSelection.Factory { private final @Nullable BandwidthMeter bandwidthMeter; private final int minDurationForQualityIncreaseMs; private final int maxDurationForQualityDecreaseMs; private final int minDurationToRetainAfterDiscardMs; private final float bandwidthFraction; private final float bufferedFractionToLiveEdgeForQualityIncrease; private final long minTimeBetweenBufferReevaluationMs; private final Clock clock; private TrackBitrateEstimator trackBitrateEstimator; /** Creates an adaptive track selection factory with default parameters. */ public Factory() { this( DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, DEFAULT_BANDWIDTH_FRACTION, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * @deprecated Use {@link #Factory()} instead. Custom bandwidth meter should be directly passed * to the player in {@link ExoPlayerFactory}. */ @Deprecated @SuppressWarnings("deprecation") public Factory(BandwidthMeter bandwidthMeter) { this( bandwidthMeter, DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, DEFAULT_BANDWIDTH_FRACTION, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * Creates an adaptive track selection factory. * * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher * quality, the selection may indicate that media already buffered at the lower quality can * be discarded to speed up the switch. This is the minimum duration of media that must be * retained at the lower quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. */ public Factory( int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction) { this( minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bandwidthFraction, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * @deprecated Use {@link #Factory(int, int, int, float)} instead. Custom bandwidth meter should * be directly passed to the player in {@link ExoPlayerFactory}. */ @Deprecated @SuppressWarnings("deprecation") public Factory( BandwidthMeter bandwidthMeter, int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction) { this( bandwidthMeter, minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bandwidthFraction, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * Creates an adaptive track selection factory. * * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher * quality, the selection may indicate that media already buffered at the lower quality can * be discarded to speed up the switch. This is the minimum duration of media that must be * retained at the lower quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. * @param bufferedFractionToLiveEdgeForQualityIncrease For live streaming, the fraction of the * duration from current playback position to the live edge that has to be buffered before * the selected track can be switched to one of higher quality. This parameter is only * applied when the playback position is closer to the live edge than {@code * minDurationForQualityIncreaseMs}, which would otherwise prevent switching to a higher * quality from happening. * @param minTimeBetweenBufferReevaluationMs The track selection may periodically reevaluate its * buffer and discard some chunks of lower quality to improve the playback quality if * network conditions have changed. This is the minimum duration between 2 consecutive * buffer reevaluation calls. * @param clock A {@link Clock}. */ @SuppressWarnings("deprecation") public Factory( int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, long minTimeBetweenBufferReevaluationMs, Clock clock) { this( /* bandwidthMeter= */ null, minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bandwidthFraction, bufferedFractionToLiveEdgeForQualityIncrease, minTimeBetweenBufferReevaluationMs, clock); } /** * @deprecated Use {@link #Factory(int, int, int, float, float, long, Clock)} instead. Custom * bandwidth meter should be directly passed to the player in {@link ExoPlayerFactory}. */ @Deprecated public Factory( @Nullable BandwidthMeter bandwidthMeter, int minDurationForQualityIncreaseMs, int maxDurationForQualityDecreaseMs, int minDurationToRetainAfterDiscardMs, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, long minTimeBetweenBufferReevaluationMs, Clock clock) { this.bandwidthMeter = bandwidthMeter; this.minDurationForQualityIncreaseMs = minDurationForQualityIncreaseMs; this.maxDurationForQualityDecreaseMs = maxDurationForQualityDecreaseMs; this.minDurationToRetainAfterDiscardMs = minDurationToRetainAfterDiscardMs; this.bandwidthFraction = bandwidthFraction; this.bufferedFractionToLiveEdgeForQualityIncrease = bufferedFractionToLiveEdgeForQualityIncrease; this.minTimeBetweenBufferReevaluationMs = minTimeBetweenBufferReevaluationMs; this.clock = clock; trackBitrateEstimator = TrackBitrateEstimator.DEFAULT; } /** * Sets a TrackBitrateEstimator. * * <p>This method is experimental, and will be renamed or removed in a future release. * * @param trackBitrateEstimator A {@link TrackBitrateEstimator}. */ public void experimental_setTrackBitrateEstimator(TrackBitrateEstimator trackBitrateEstimator) { this.trackBitrateEstimator = trackBitrateEstimator; } @Override public AdaptiveTrackSelection createTrackSelection( TrackGroup group, BandwidthMeter bandwidthMeter, int... tracks) { if (this.bandwidthMeter != null) { bandwidthMeter = this.bandwidthMeter; } AdaptiveTrackSelection adaptiveTrackSelection = new AdaptiveTrackSelection( group, tracks, new DefaultBandwidthProvider(bandwidthMeter, bandwidthFraction), minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bufferedFractionToLiveEdgeForQualityIncrease, minTimeBetweenBufferReevaluationMs, clock); adaptiveTrackSelection.experimental_setTrackBitrateEstimator(trackBitrateEstimator); return adaptiveTrackSelection; } } public static final int DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS = 10000; public static final int DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS = 25000; public static final int DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS = 25000; public static final float DEFAULT_BANDWIDTH_FRACTION = 0.75f; public static final float DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE = 0.75f; public static final long DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS = 2000; private final BandwidthProvider bandwidthProvider; private final long minDurationForQualityIncreaseUs; private final long maxDurationForQualityDecreaseUs; private final long minDurationToRetainAfterDiscardUs; private final float bufferedFractionToLiveEdgeForQualityIncrease; private final long minTimeBetweenBufferReevaluationMs; private final Clock clock; private final Format[] formats; private final int[] formatBitrates; private final int[] trackBitrates; private TrackBitrateEstimator trackBitrateEstimator; private float playbackSpeed; private int selectedIndex; private int reason; private long lastBufferEvaluationMs; /** * @param group The {@link TrackGroup}. * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be * empty. May be in any order. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. */ public AdaptiveTrackSelection(TrackGroup group, int[] tracks, BandwidthMeter bandwidthMeter) { this( group, tracks, bandwidthMeter, DEFAULT_MIN_DURATION_FOR_QUALITY_INCREASE_MS, DEFAULT_MAX_DURATION_FOR_QUALITY_DECREASE_MS, DEFAULT_MIN_DURATION_TO_RETAIN_AFTER_DISCARD_MS, DEFAULT_BANDWIDTH_FRACTION, DEFAULT_BUFFERED_FRACTION_TO_LIVE_EDGE_FOR_QUALITY_INCREASE, DEFAULT_MIN_TIME_BETWEEN_BUFFER_REEVALUTATION_MS, Clock.DEFAULT); } /** * @param group The {@link TrackGroup}. * @param tracks The indices of the selected tracks within the {@link TrackGroup}. Must not be * empty. May be in any order. * @param bandwidthMeter Provides an estimate of the currently available bandwidth. * @param minDurationForQualityIncreaseMs The minimum duration of buffered data required for the * selected track to switch to one of higher quality. * @param maxDurationForQualityDecreaseMs The maximum duration of buffered data required for the * selected track to switch to one of lower quality. * @param minDurationToRetainAfterDiscardMs When switching to a track of significantly higher * quality, the selection may indicate that media already buffered at the lower quality can be * discarded to speed up the switch. This is the minimum duration of media that must be * retained at the lower quality. * @param bandwidthFraction The fraction of the available bandwidth that the selection should * consider available for use. Setting to a value less than 1 is recommended to account for * inaccuracies in the bandwidth estimator. * @param bufferedFractionToLiveEdgeForQualityIncrease For live streaming, the fraction of the * duration from current playback position to the live edge that has to be buffered before the * selected track can be switched to one of higher quality. This parameter is only applied * when the playback position is closer to the live edge than {@code * minDurationForQualityIncreaseMs}, which would otherwise prevent switching to a higher * quality from happening. * @param minTimeBetweenBufferReevaluationMs The track selection may periodically reevaluate its * buffer and discard some chunks of lower quality to improve the playback quality if network * condition has changed. This is the minimum duration between 2 consecutive buffer * reevaluation calls. */ public AdaptiveTrackSelection( TrackGroup group, int[] tracks, BandwidthMeter bandwidthMeter, long minDurationForQualityIncreaseMs, long maxDurationForQualityDecreaseMs, long minDurationToRetainAfterDiscardMs, float bandwidthFraction, float bufferedFractionToLiveEdgeForQualityIncrease, long minTimeBetweenBufferReevaluationMs, Clock clock) { this( group, tracks, new DefaultBandwidthProvider(bandwidthMeter, bandwidthFraction), minDurationForQualityIncreaseMs, maxDurationForQualityDecreaseMs, minDurationToRetainAfterDiscardMs, bufferedFractionToLiveEdgeForQualityIncrease, minTimeBetweenBufferReevaluationMs, clock); } private AdaptiveTrackSelection( TrackGroup group, int[] tracks, BandwidthProvider bandwidthProvider, long minDurationForQualityIncreaseMs, long maxDurationForQualityDecreaseMs, long minDurationToRetainAfterDiscardMs, float bufferedFractionToLiveEdgeForQualityIncrease, long minTimeBetweenBufferReevaluationMs, Clock clock) { super(group, tracks); this.bandwidthProvider = bandwidthProvider; this.minDurationForQualityIncreaseUs = minDurationForQualityIncreaseMs * 1000L; this.maxDurationForQualityDecreaseUs = maxDurationForQualityDecreaseMs * 1000L; this.minDurationToRetainAfterDiscardUs = minDurationToRetainAfterDiscardMs * 1000L; this.bufferedFractionToLiveEdgeForQualityIncrease = bufferedFractionToLiveEdgeForQualityIncrease; this.minTimeBetweenBufferReevaluationMs = minTimeBetweenBufferReevaluationMs; this.clock = clock; playbackSpeed = 1f; reason = C.SELECTION_REASON_INITIAL; lastBufferEvaluationMs = C.TIME_UNSET; trackBitrateEstimator = TrackBitrateEstimator.DEFAULT; formats = new Format[length]; formatBitrates = new int[length]; trackBitrates = new int[length]; for (int i = 0; i < length; i++) { @SuppressWarnings("nullness:method.invocation.invalid") Format format = getFormat(i); formats[i] = format; formatBitrates[i] = formats[i].bitrate; } @SuppressWarnings("nullness:method.invocation.invalid") int selectedIndex = determineIdealSelectedIndex(Long.MIN_VALUE, formatBitrates); this.selectedIndex = selectedIndex; } /** * Sets a TrackBitrateEstimator. * * <p>This method is experimental, and will be renamed or removed in a future release. * * @param trackBitrateEstimator A {@link TrackBitrateEstimator}. */ public void experimental_setTrackBitrateEstimator(TrackBitrateEstimator trackBitrateEstimator) { this.trackBitrateEstimator = trackBitrateEstimator; } /** * Sets the non-allocatable bandwidth, which shouldn't be considered available. * * <p>This method is experimental, and will be renamed or removed in a future release. * * @param nonAllocatableBandwidth The non-allocatable bandwidth in bits per second. */ public void experimental_setNonAllocatableBandwidth(long nonAllocatableBandwidth) { ((DefaultBandwidthProvider) bandwidthProvider) .experimental_setNonAllocatableBandwidth(nonAllocatableBandwidth); } @Override public void enable() { lastBufferEvaluationMs = C.TIME_UNSET; } @Override public void onPlaybackSpeed(float playbackSpeed) { this.playbackSpeed = playbackSpeed; } @Override public void updateSelectedTrack( long playbackPositionUs, long bufferedDurationUs, long availableDurationUs, List<? extends MediaChunk> queue, MediaChunkIterator[] mediaChunkIterators) { long nowMs = clock.elapsedRealtime(); // Update the estimated track bitrates. trackBitrateEstimator.getBitrates(formats, queue, mediaChunkIterators, trackBitrates); // Stash the current selection, then make a new one. int currentSelectedIndex = selectedIndex; selectedIndex = determineIdealSelectedIndex(nowMs, trackBitrates); if (selectedIndex == currentSelectedIndex) { return; } if (!isBlacklisted(currentSelectedIndex, nowMs)) { // Revert back to the current selection if conditions are not suitable for switching. Format currentFormat = getFormat(currentSelectedIndex); Format selectedFormat = getFormat(selectedIndex); if (selectedFormat.bitrate > currentFormat.bitrate && bufferedDurationUs < minDurationForQualityIncreaseUs(availableDurationUs)) { // The selected track is a higher quality, but we have insufficient buffer to safely switch // up. Defer switching up for now. selectedIndex = currentSelectedIndex; } else if (selectedFormat.bitrate < currentFormat.bitrate && bufferedDurationUs >= maxDurationForQualityDecreaseUs) { // The selected track is a lower quality, but we have sufficient buffer to defer switching // down for now. selectedIndex = currentSelectedIndex; } } // If we adapted, update the trigger. if (selectedIndex != currentSelectedIndex) { reason = C.SELECTION_REASON_ADAPTIVE; } } @Override public int getSelectedIndex() { return selectedIndex; } @Override public int getSelectionReason() { return reason; } @Override public @Nullable Object getSelectionData() { return null; } @Override public int evaluateQueueSize(long playbackPositionUs, List<? extends MediaChunk> queue) { long nowMs = clock.elapsedRealtime(); if (!shouldEvaluateQueueSize(nowMs)) { return queue.size(); } lastBufferEvaluationMs = nowMs; if (queue.isEmpty()) { return 0; } int queueSize = queue.size(); MediaChunk lastChunk = queue.get(queueSize - 1); long playoutBufferedDurationBeforeLastChunkUs = Util.getPlayoutDurationForMediaDuration( lastChunk.startTimeUs - playbackPositionUs, playbackSpeed); long minDurationToRetainAfterDiscardUs = getMinDurationToRetainAfterDiscardUs(); if (playoutBufferedDurationBeforeLastChunkUs < minDurationToRetainAfterDiscardUs) { return queueSize; } int idealSelectedIndex = determineIdealSelectedIndex(nowMs, formatBitrates); Format idealFormat = getFormat(idealSelectedIndex); // If the chunks contain video, discard from the first SD chunk beyond // minDurationToRetainAfterDiscardUs whose resolution and bitrate are both lower than the ideal // track. for (int i = 0; i < queueSize; i++) { MediaChunk chunk = queue.get(i); Format format = chunk.trackFormat; long mediaDurationBeforeThisChunkUs = chunk.startTimeUs - playbackPositionUs; long playoutDurationBeforeThisChunkUs = Util.getPlayoutDurationForMediaDuration(mediaDurationBeforeThisChunkUs, playbackSpeed); if (playoutDurationBeforeThisChunkUs >= minDurationToRetainAfterDiscardUs && format.bitrate < idealFormat.bitrate && format.height != Format.NO_VALUE && format.height < 720 && format.width != Format.NO_VALUE && format.width < 1280 && format.height < idealFormat.height) { return i; } } return queueSize; } /** * Called when updating the selected track to determine whether a candidate track can be selected. * * @param format The {@link Format} of the candidate track. * @param trackBitrate The estimated bitrate of the track. May differ from {@link Format#bitrate} * if a more accurate estimate of the current track bitrate is available. * @param playbackSpeed The current playback speed. * @param effectiveBitrate The bitrate available to this selection. * @return Whether this {@link Format} can be selected. */ @SuppressWarnings("unused") protected boolean canSelectFormat( Format format, int trackBitrate, float playbackSpeed, long effectiveBitrate) { return Math.round(trackBitrate * playbackSpeed) <= effectiveBitrate; } /** * Called from {@link #evaluateQueueSize(long, List)} to determine whether an evaluation should be * performed. * * @param nowMs The current value of {@link Clock#elapsedRealtime()}. * @return Whether an evaluation should be performed. */ protected boolean shouldEvaluateQueueSize(long nowMs) { return lastBufferEvaluationMs == C.TIME_UNSET || nowMs - lastBufferEvaluationMs >= minTimeBetweenBufferReevaluationMs; } /** * Called from {@link #evaluateQueueSize(long, List)} to determine the minimum duration of buffer * to retain after discarding chunks. * * @return The minimum duration of buffer to retain after discarding chunks, in microseconds. */ protected long getMinDurationToRetainAfterDiscardUs() { return minDurationToRetainAfterDiscardUs; } /** * Computes the ideal selected index ignoring buffer health. * * @param nowMs The current time in the timebase of {@link Clock#elapsedRealtime()}, or {@link * Long#MIN_VALUE} to ignore blacklisting. * @param trackBitrates The estimated track bitrates. May differ from format bitrates if more * accurate estimates of the current track bitrates are available. */ private int determineIdealSelectedIndex(long nowMs, int[] trackBitrates) { long effectiveBitrate = bandwidthProvider.getAllocatedBandwidth(); int lowestBitrateNonBlacklistedIndex = 0; for (int i = 0; i < length; i++) { if (nowMs == Long.MIN_VALUE || !isBlacklisted(i, nowMs)) { Format format = getFormat(i); if (canSelectFormat(format, trackBitrates[i], playbackSpeed, effectiveBitrate)) { return i; } else { lowestBitrateNonBlacklistedIndex = i; } } } return lowestBitrateNonBlacklistedIndex; } private long minDurationForQualityIncreaseUs(long availableDurationUs) { boolean isAvailableDurationTooShort = availableDurationUs != C.TIME_UNSET && availableDurationUs <= minDurationForQualityIncreaseUs; return isAvailableDurationTooShort ? (long) (availableDurationUs * bufferedFractionToLiveEdgeForQualityIncrease) : minDurationForQualityIncreaseUs; } /** Provides the allocated bandwidth. */ private interface BandwidthProvider { /** Returns the allocated bitrate. */ long getAllocatedBandwidth(); } private static final class DefaultBandwidthProvider implements BandwidthProvider { private final BandwidthMeter bandwidthMeter; private final float bandwidthFraction; private long nonAllocatableBandwidth; /* package */ DefaultBandwidthProvider(BandwidthMeter bandwidthMeter, float bandwidthFraction) { this.bandwidthMeter = bandwidthMeter; this.bandwidthFraction = bandwidthFraction; } @Override public long getAllocatedBandwidth() { long totalBandwidth = (long) (bandwidthMeter.getBitrateEstimate() * bandwidthFraction); return Math.max(0L, totalBandwidth - nonAllocatableBandwidth); } /* package */ void experimental_setNonAllocatableBandwidth(long nonAllocatableBandwidth) { this.nonAllocatableBandwidth = nonAllocatableBandwidth; } } }
Add experimental flag to AdaptiveTrackSelection.Factory to block fixed track bandwidth This option to block bandwidth already exists on the AdaptiveTrackSelection itself but it's not currently possible to forward the total fixed track bandwidth automatically. PiperOrigin-RevId: 223785139
library/core/src/main/java/com/google/android/exoplayer2/trackselection/AdaptiveTrackSelection.java
Add experimental flag to AdaptiveTrackSelection.Factory to block fixed track bandwidth
<ide><path>ibrary/core/src/main/java/com/google/android/exoplayer2/trackselection/AdaptiveTrackSelection.java <ide> import com.google.android.exoplayer2.util.Clock; <ide> import com.google.android.exoplayer2.util.Util; <ide> import java.util.List; <add>import org.checkerframework.checker.nullness.compatqual.NullableType; <ide> <ide> /** <ide> * A bandwidth based adaptive {@link TrackSelection}, whose selected track is updated to be the one <ide> private final Clock clock; <ide> <ide> private TrackBitrateEstimator trackBitrateEstimator; <add> private boolean blockFixedTrackSelectionBandwidth; <ide> <ide> /** Creates an adaptive track selection factory with default parameters. */ <ide> public Factory() { <ide> this.trackBitrateEstimator = trackBitrateEstimator; <ide> } <ide> <add> /** <add> * Enables blocking of the total fixed track selection bandwidth. <add> * <add> * <p>This method is experimental, and will be renamed or removed in a future release. <add> */ <add> public void experimental_enableBlockFixedTrackSelectionBandwidth() { <add> this.blockFixedTrackSelectionBandwidth = true; <add> } <add> <ide> @Override <ide> public AdaptiveTrackSelection createTrackSelection( <ide> TrackGroup group, BandwidthMeter bandwidthMeter, int... tracks) { <ide> clock); <ide> adaptiveTrackSelection.experimental_setTrackBitrateEstimator(trackBitrateEstimator); <ide> return adaptiveTrackSelection; <add> } <add> <add> @Override <add> public @NullableType TrackSelection[] createTrackSelections( <add> @NullableType Definition[] definitions, BandwidthMeter bandwidthMeter) { <add> TrackSelection[] selections = new TrackSelection[definitions.length]; <add> AdaptiveTrackSelection adaptiveSelection = null; <add> int totalFixedBandwidth = 0; <add> for (int i = 0; i < definitions.length; i++) { <add> Definition definition = definitions[i]; <add> if (definition == null) { <add> continue; <add> } <add> if (definition.tracks.length > 1) { <add> selections[i] = createTrackSelection(definition.group, bandwidthMeter, definition.tracks); <add> adaptiveSelection = (AdaptiveTrackSelection) selections[i]; <add> } else { <add> selections[i] = new FixedTrackSelection(definition.group, definition.tracks[0]); <add> int trackBitrate = definition.group.getFormat(definition.tracks[0]).bitrate; <add> if (trackBitrate != Format.NO_VALUE) { <add> totalFixedBandwidth += trackBitrate; <add> } <add> } <add> } <add> if (blockFixedTrackSelectionBandwidth && adaptiveSelection != null) { <add> adaptiveSelection.experimental_setNonAllocatableBandwidth(totalFixedBandwidth); <add> } <add> return selections; <ide> } <ide> } <ide>
JavaScript
mit
b0bc1110ded4ee3b3b7c37de68ca0cb9d10099bc
0
leovo2708/ngx-treeview,leovo2708/ngx-treeview,leovo2708/ngx-treeview
const path = require('path'); const gulp = require('gulp'); const through = require('through2'); const sass = require('node-sass'); const fs = require('fs'); const libPath = 'tmp'; gulp.task('inline', function () { const globs = [ path.join(libPath, '**', '*.ts'), '!' + path.join(libPath, '**', '*.spec.ts') ]; gulp.src(globs).pipe(through.obj((file, encode, callback) => { const filePath = file.path; function resolveUrl(url) { return path.join(libPath, url); } function inlineTemplate(content) { return content.replace(/templateUrl:\s*'([^']+?\.html)'/g, (matchers, templateUrl) => { const templateFile = resolveUrl(templateUrl); const templateContent = fs.readFileSync(templateFile, encode); const shortenedTemplate = templateContent .replace(/\'/g, '\\\'') .replace(/([\n\r]\s*)+/gm, ' '); return `template: '${shortenedTemplate}'`; }); } function inlineStyles(content) { return content.replace(/styleUrls:\s*(\[[\s\S]*?\])/gm, (matchers, styleUrls) => { const urls = eval(styleUrls); return 'styles: [' + urls.map(styleUrl => { const styleFile = resolveUrl(styleUrl); let styleContent = fs.readFileSync(styleFile, encode); if (/\.(scss)$/i.test(styleUrl)) { styleContent = compileSass(styleContent, styleFile); } const shortenedStyle = styleContent .replace(/\'/g, '\\\'') .replace(/([\n\r]\s*)+/gm, ' '); return `'${shortenedStyle}'`; }).join(',\n') + ']'; }); } function compileSass(content, file) { const result = sass.renderSync({ data: content, file: file, outputStyle: 'compact' }); return result.css.toString(); } function removeModuleId(content) { return content.replace(/\s*moduleId:\s*module\.id\s*,?\s*/gm, ''); } function inline(content) { return [ inlineTemplate, inlineStyles, removeModuleId ].reduce((content, fn) => fn(content), content); } if (/\.(component.ts)$/i.test(filePath)) { let fileContent = file.contents.toString(); fileContent = inline(fileContent); file.contents = new Buffer(fileContent); } return callback(null, file); })).pipe(gulp.dest(libPath)); }); gulp.task('default', ['inline']);
gulpfile.js
const path = require('path'); const gulp = require('gulp'); const through = require('through2'); const sass = require('node-sass'); const fs = require('fs'); const libPath = 'tmp'; gulp.task('inline', function () { const globs = [ path.join(libPath, '**', '*.ts'), '!' + path.join(libPath, '**', '*.spec.ts') ]; gulp.src(globs).pipe(through.obj((file, encode, callback) => { const filePath = file.path; function resolveUrl(url) { return path.join(libPath, url); } function inlineTemplate(content) { return content.replace(/templateUrl:\s*'([^']+?\.html)'/g, (matchers, templateUrl) => { const templateFile = resolveUrl(templateUrl); const templateContent = fs.readFileSync(templateFile, encode); const shortenedTemplate = templateContent .replace(/([\n\r]\s*)+/gm, ' '); return `template: '${shortenedTemplate}'`; }); } function inlineStyles(content) { return content.replace(/styleUrls:\s*(\[[\s\S]*?\])/gm, (matchers, styleUrls) => { const urls = eval(styleUrls); return 'styles: [' + urls.map(styleUrl => { const styleFile = resolveUrl(styleUrl); let styleContent = fs.readFileSync(styleFile, encode); if (/\.(scss)$/i.test(styleUrl)) { styleContent = compileSass(styleContent, styleFile); } const shortenedStyle = styleContent .replace(/([\n\r]\s*)+/gm, ' '); return `'${shortenedStyle}'`; }).join(',\n') + ']'; }); } function compileSass(content, file) { const result = sass.renderSync({ data: content, file: file, outputStyle: 'compact' }); return result.css.toString(); } function removeModuleId(content) { return content.replace(/\s*moduleId:\s*module\.id\s*,?\s*/gm, ''); } function inline(content) { return [ inlineTemplate, inlineStyles, removeModuleId ].reduce((content, fn) => fn(content), content); } if (/\.(component.ts)$/i.test(filePath)) { let fileContent = file.contents.toString(); fileContent = inline(fileContent); file.contents = new Buffer(fileContent); } return callback(null, file); })).pipe(gulp.dest(libPath)); }); gulp.task('default', ['inline']);
Update gulp script.
gulpfile.js
Update gulp script.
<ide><path>ulpfile.js <ide> const templateFile = resolveUrl(templateUrl); <ide> const templateContent = fs.readFileSync(templateFile, encode); <ide> const shortenedTemplate = templateContent <add> .replace(/\'/g, '\\\'') <ide> .replace(/([\n\r]\s*)+/gm, ' '); <ide> return `template: '${shortenedTemplate}'`; <ide> }); <ide> styleContent = compileSass(styleContent, styleFile); <ide> } <ide> const shortenedStyle = styleContent <add> .replace(/\'/g, '\\\'') <ide> .replace(/([\n\r]\s*)+/gm, ' '); <ide> return `'${shortenedStyle}'`; <ide> }).join(',\n') + ']';
JavaScript
mit
d2ff7ea6aff6cbe150504713b17961a3cd70eab7
0
julianlloyd/scrollReveal.js,jlmakes/scrollReveal.js
import { getNode, getNodes, logger } from '../../utils/core' import { deepAssign, each, nextUniqueId } from '../../utils/generic' import { isMobile } from '../../utils/browser' import style from '../functions/style' import initialize from '../functions/initialize' export default function reveal (target, options, interval, sync) { /** * The reveal method has an optional 2nd parameter, * so here we just shuffle things around to accept * the interval being passed as the 2nd argument. */ if (typeof options === 'number') { interval = parseInt(options) options = {} } else { options = options || {} } const config = deepAssign({}, this.defaults, options) const containers = this.store.containers const container = getNode(config.container) const targets = getNodes(target, container) if (!targets.length) { logger('Reveal aborted.', 'Reveal cannot be performed on 0 elements.') return this } /** * Verify our platform matches our platform configuration. */ if (!config.mobile && isMobile() || !config.desktop && !isMobile()) { logger('Reveal aborted.', 'This platform has been disabled.') return this } /** * Sequence intervals must be at least 16ms (60fps) * but can be negative for sequencing in reverse. */ let sequence if (!isNaN(interval)) { if (Math.abs(interval) >= 16) { const sequenceId = nextUniqueId() sequence = { elementIds: [], firstActiveIndex: 0, id: sequenceId, interval, lastActiveIndex: 0, } } else { logger('Reveal failed.', 'Sequence intervals can not be between -16 and 16.') return this } } let containerId each(containers, storedContainer => { if (!containerId && storedContainer.node === container) { containerId = storedContainer.id } }) if (isNaN(containerId)) { containerId = nextUniqueId() } try { const elements = targets.map(node => { const elementId = node.getAttribute('data-sr-id') || nextUniqueId() const element = { id: elementId, config, containerId, node, } if (sequence) { element.sequence = { id: sequence.id, index: sequence.elementIds.length, } sequence.elementIds.push(element.id) } element.styles = style(element) return element }) /** * Modifying the DOM via setAttribute needs to be handled * separately from reading computed styles in the map above * for the browser to batch DOM changes (limiting reflows) */ each(elements, element => { this.store.elements[element.id] = element element.node.setAttribute('data-sr-id', element.id) }) } catch (error) { logger('Reveal failed.', error.message) return this } containers[containerId] = containers[containerId] || { id: containerId, node: container, } if (sequence) { this.store.sequences[sequence.id] = sequence } /** * If reveal wasn't invoked by sync, we want to make * sure to add this call to the history. */ if (!sync) { this.store.history.push({ target, options, interval }) /** * Push initialization to the event queue, giving chained * reveal calls time to be interpretted. */ if (this.initTimeout) { window.clearTimeout(this.initTimeout) } this.initTimeout = window.setTimeout(initialize.bind(this), 0) } return this }
src/instance/methods/reveal.js
import { getNode, getNodes, logger } from '../../utils/core' import { deepAssign, each, nextUniqueId } from '../../utils/generic' import { isMobile } from '../../utils/browser' import style from '../functions/style' import initialize from '../functions/initialize' export default function reveal (target, options, interval, sync) { /** * The reveal method has an optional 2nd parameter, * so here we just shuffle things around to accept * the interval being passed as the 2nd argument. */ if (typeof options === 'number') { interval = parseInt(options) options = {} } else { options = options || {} } const config = deepAssign({}, this.defaults, options) const containers = this.store.containers const container = getNode(config.container) const targets = getNodes(target, container) if (!targets.length) { logger('Reveal aborted.', 'Reveal cannot be performed on 0 elements.') return this } /** * Verify our platform matches our platform configuration. */ if (!config.mobile && isMobile() || !config.desktop && !isMobile()) { logger('Reveal aborted.', 'This platform has been disabled.') return this } /** * Sequence intervals must be at least 16ms (60fps) * but can be negative for sequencing in reverse. */ let sequence if (!isNaN(interval)) { if (Math.abs(interval) >= 16) { const sequenceId = nextUniqueId() sequence = { elementIds: [], firstActiveIndex: 0, id: sequenceId, interval, lastActiveIndex: 0, } } else { logger('Reveal failed.', 'Sequence intervals can not be between -16 and 16.') return this } } let containerId each(containers, (storedContainer, id) => { if (storedContainer.node === container) { containerId = parseInt(id) } }) if (isNaN(containerId)) { containerId = nextUniqueId() } try { const elements = targets.map(node => { const elementId = node.getAttribute('data-sr-id') || nextUniqueId() const element = { id: elementId, config, containerId, node, } if (sequence) { element.sequence = { id: sequence.id, index: sequence.elementIds.length, } sequence.elementIds.push(element.id) } element.styles = style(element) return element }) /** * Modifying the DOM via setAttribute needs to be handled * separately from reading computed styles in the map above * for the browser to batch DOM changes (limiting reflows) */ each(elements, element => { this.store.elements[element.id] = element element.node.setAttribute('data-sr-id', element.id) }) } catch (error) { logger('Reveal failed.', error.message) return this } containers[containerId] = containers[containerId] || { id: containerId, node: container, } if (sequence) { this.store.sequences[sequence.id] = sequence } /** * If reveal wasn't invoked by sync, we want to make * sure to add this call to the history. */ if (!sync) { this.store.history.push({ target, options, interval }) /** * Push initialization to the event queue, giving chained * reveal calls time to be interpretted. */ if (this.initTimeout) { window.clearTimeout(this.initTimeout) } this.initTimeout = window.setTimeout(initialize.bind(this), 0) } return this }
refactor getting existing container id during reveal
src/instance/methods/reveal.js
refactor getting existing container id during reveal
<ide><path>rc/instance/methods/reveal.js <ide> } <ide> <ide> let containerId <del> each(containers, (storedContainer, id) => { <del> if (storedContainer.node === container) { <del> containerId = parseInt(id) <add> each(containers, storedContainer => { <add> if (!containerId && storedContainer.node === container) { <add> containerId = storedContainer.id <ide> } <ide> }) <ide>
Java
apache-2.0
8f5b9a4742d792ed010963fd6a563b7fea4e3d61
0
floviolleau/vector-android,vector-im/riot-android,noepitome/neon-android,vector-im/vector-android,riot-spanish/riot-android,floviolleau/vector-android,noepitome/neon-android,noepitome/neon-android,noepitome/neon-android,riot-spanish/riot-android,vt0r/vector-android,floviolleau/vector-android,vector-im/vector-android,vt0r/vector-android,vector-im/riot-android,vector-im/riot-android,vector-im/vector-android,vector-im/vector-android,vector-im/riot-android,vt0r/vector-android,riot-spanish/riot-android,vector-im/riot-android,riot-spanish/riot-android
package im.vector; import android.app.Activity; import android.content.Context; import android.content.Intent; import org.matrix.androidsdk.HomeserverConnectionConfig; import org.matrix.androidsdk.MXDataHandler; import org.matrix.androidsdk.MXSession; import org.matrix.androidsdk.data.IMXStore; import org.matrix.androidsdk.data.MXFileStore; import org.matrix.androidsdk.data.MXMemoryStore; import org.matrix.androidsdk.db.MXLatestChatMessageCache; import org.matrix.androidsdk.db.MXMediasCache; import org.matrix.androidsdk.rest.model.login.Credentials; import im.vector.activity.CommonActivityUtils; import im.vector.activity.SplashActivity; import im.vector.gcm.GcmRegistrationManager; import im.vector.store.LoginStorage; import im.vector.util.RageShake; import java.util.ArrayList; import java.util.Collection; /** * Singleton to control access to the Matrix SDK and providing point of control for MXSessions. */ public class Matrix { private static Matrix instance = null; private LoginStorage mLoginStorage; private ArrayList<MXSession> mMXSessions; private GcmRegistrationManager mGcmRegistrationManager; private Context mAppContext; public boolean mHasBeenDisconnected = false; protected Matrix(Context appContext) { mAppContext = appContext.getApplicationContext(); mLoginStorage = new LoginStorage(mAppContext); mMXSessions = new ArrayList<MXSession>(); mGcmRegistrationManager = new GcmRegistrationManager(mAppContext); RageShake.getInstance().start(mAppContext); } public synchronized static Matrix getInstance(Context appContext) { if ((instance == null) && (null != appContext)) { instance = new Matrix(appContext); } return instance; } public LoginStorage getLoginStorage() { return mLoginStorage; } /** * Static method top the MXSession list * @param context the application content * @return the sessions list */ public static ArrayList<MXSession> getMXSessions(Context context) { if ((null != context) && (null != instance)) { return instance.getSessions(); } else { return null; } } /** * @return The list of sessions */ public ArrayList<MXSession> getSessions() { ArrayList<MXSession> sessions = new ArrayList<MXSession>(); synchronized (instance) { if (null != mMXSessions) { sessions = new ArrayList<MXSession>(mMXSessions); } } return sessions; } /** * Retrieve the default session if one exists. * * The default session may be user-configured, or it may be the last session the user was using. * @return The default session or null. */ public synchronized MXSession getDefaultSession() { ArrayList<MXSession> sessions = getSessions(); if (sessions.size() > 0) { return sessions.get(0); } ArrayList<HomeserverConnectionConfig> hsConfigList = mLoginStorage.getCredentialsList(); // any account ? if ((hsConfigList == null) || (hsConfigList.size() == 0)) { return null; } ArrayList<String> matrixIds = new ArrayList<String>(); sessions = new ArrayList<MXSession>(); for(HomeserverConnectionConfig config: hsConfigList) { // avoid duplicated accounts. if (config.getCredentials() != null && matrixIds.indexOf(config.getCredentials().userId) < 0) { MXSession session = createSession(config); sessions.add(session); matrixIds.add(config.getCredentials().userId); } } synchronized (instance) { mMXSessions = sessions; } return sessions.get(0); } /** * Static method to return a MXSession from an account Id. * @param matrixId the matrix id * @return the MXSession. */ public static MXSession getMXSession(Context context, String matrixId) { return Matrix.getInstance(context.getApplicationContext()).getSession(matrixId); } /** *Retrieve a session from an user Id. * The application should be able to manage multi session. * @param matrixId the matrix id * @return the MXsession if it exists. */ public synchronized MXSession getSession(String matrixId) { if (null != matrixId) { ArrayList<MXSession> sessions; synchronized (this) { sessions = getSessions(); } for (MXSession session : sessions) { Credentials credentials = session.getCredentials(); if ((null != credentials) && (credentials.userId.equals(matrixId))) { return session; } } } return getDefaultSession(); } /** * Add an error listener to each sessions * @param activity the activity. */ public static void setSessionErrorListener(Activity activity) { if ((null != instance) && (null != activity)) { Collection<MXSession> sessions = getMXSessions(activity); for(MXSession session : sessions) { if (session.isActive()) { session.setFailureCallback(new ErrorListener(session, activity)); } } } } /** * Remove the sessions error listener to each */ public static void removeSessionErrorListener(Activity activity) { if ((null != instance) && (null != activity)) { Collection<MXSession> sessions = getMXSessions(activity); for(MXSession session : sessions) { if (session.isActive()) { session.setFailureCallback(null); } } } } /** * Return the used media caches. * This class can inherited to customized it. * @return the mediasCache. */ public MXMediasCache getMediasCache() { if (getSessions().size() > 0) { return getSessions().get(0).getMediasCache(); } return null; } /** * Return the used latestMessages caches. * This class can inherited to customized it. * @return the latest messages cache. */ public MXLatestChatMessageCache getDefaultLatestChatMessageCache() { if (getSessions().size() > 0) { return getSessions().get(0).getLatestChatMessageCache(); } return null; } /** * * @return true if the matrix client instance defines a valid session */ public static Boolean hasValidSessions() { return (null != instance) && (null != instance.mMXSessions) && (instance.mMXSessions.size() > 0); } /** * Refresh the sessions push rules. */ public void refreshPushRules() { ArrayList<MXSession> sessions = null; synchronized (this) { sessions = getSessions(); } for(MXSession session : sessions) { if (null != session.getDataHandler()) { session.getDataHandler().refreshPushRules(); } } } /** * Clear a session. * @param context the context. * @param session the session to clear. * @param clearCredentials true to clear the credentials. */ public synchronized void clearSession(Context context, MXSession session, Boolean clearCredentials) { if (clearCredentials) { mLoginStorage.removeCredentials(session.getHomeserverConfig()); } session.clear(context); synchronized (instance) { mMXSessions.remove(session); } } /** * Clear any existing session. * @param context the context. * @param clearCredentials true to clear the credentials. */ public synchronized void clearSessions(Context context, Boolean clearCredentials) { synchronized (instance) { while (mMXSessions.size() > 0) { clearSession(context, mMXSessions.get(0), clearCredentials); } } } /** * Set a default session. * @param session The session to store as the default session. */ public synchronized void addSession(MXSession session) { mLoginStorage.addCredentials(session.getHomeserverConfig()); synchronized (instance) { mMXSessions.add(session); } } /** * Creates an MXSession from some credentials. * @param hsConfig The HomeserverConnectionConfig to create a session from. * @return The session. */ public MXSession createSession(HomeserverConnectionConfig hsConfig) { return createSession(mAppContext, hsConfig); } /** * Creates an MXSession from some credentials. * @param context the context. * @param hsConfig The HomeserverConnectionConfig to create a session from. * @return The session. */ public MXSession createSession(Context context, HomeserverConnectionConfig hsConfig) { IMXStore store; Credentials credentials = hsConfig.getCredentials(); if (true) { store = new MXFileStore(hsConfig, context); } else { store = new MXMemoryStore(hsConfig.getCredentials()); } return new MXSession(hsConfig, new MXDataHandler(store, credentials), mAppContext); } /** * Reload the matrix sessions. * The session caches are cleared before being reloaded. * Any opened activity is closed and the application switches to the splash screen. * @param fromActivity the caller activity */ public void reloadSessions(Activity fromActivity) { ArrayList<MXSession> sessions = getMXSessions(fromActivity); for(MXSession session : sessions) { CommonActivityUtils.logout(fromActivity, session, false); } clearSessions(fromActivity, false); synchronized (instance) { // build a new sessions list ArrayList<HomeserverConnectionConfig> configs = mLoginStorage.getCredentialsList(); for(HomeserverConnectionConfig config : configs) { MXSession session = createSession(config); mMXSessions.add(session); } } Intent intent = new Intent(fromActivity, SplashActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK); fromActivity.startActivity(intent); fromActivity.finish(); } public GcmRegistrationManager getSharedGcmRegistrationManager() { return mGcmRegistrationManager; } }
console/src/main/java/im/vector/Matrix.java
package im.vector; import android.app.Activity; import android.content.Context; import android.content.Intent; import org.matrix.androidsdk.HomeserverConnectionConfig; import org.matrix.androidsdk.MXDataHandler; import org.matrix.androidsdk.MXSession; import org.matrix.androidsdk.data.IMXStore; import org.matrix.androidsdk.data.MXFileStore; import org.matrix.androidsdk.data.MXMemoryStore; import org.matrix.androidsdk.db.MXLatestChatMessageCache; import org.matrix.androidsdk.db.MXMediasCache; import org.matrix.androidsdk.rest.model.login.Credentials; import im.vector.activity.CommonActivityUtils; import im.vector.activity.SplashActivity; import im.vector.gcm.GcmRegistrationManager; import im.vector.store.LoginStorage; import im.vector.util.RageShake; import java.util.ArrayList; import java.util.Collection; /** * Singleton to control access to the Matrix SDK and providing point of control for MXSessions. */ public class Matrix { private static Matrix instance = null; private LoginStorage mLoginStorage; private ArrayList<MXSession> mMXSessions; private GcmRegistrationManager mGcmRegistrationManager; private Context mAppContext; public boolean mHasBeenDisconnected = false; protected Matrix(Context appContext) { mAppContext = appContext.getApplicationContext(); mLoginStorage = new LoginStorage(mAppContext); mMXSessions = new ArrayList<MXSession>(); mGcmRegistrationManager = new GcmRegistrationManager(mAppContext); RageShake.getInstance().start(mAppContext); } public synchronized static Matrix getInstance(Context appContext) { if ((instance == null) && (null != appContext)) { instance = new Matrix(appContext); } return instance; } public LoginStorage getLoginStorage() { return mLoginStorage; } /** * Static method top the MXSession list * @param context the application content * @return the sessions list */ public static ArrayList<MXSession> getMXSessions(Context context) { if ((null != context) && (null != instance)) { return instance.getSessions(); } else { return null; } } /** * @return The list of sessions */ public ArrayList<MXSession> getSessions() { ArrayList<MXSession> sessions = new ArrayList<MXSession>(); synchronized (instance) { if (null != mMXSessions) { sessions = new ArrayList<MXSession>(mMXSessions); } } return sessions; } /** * Retrieve the default session if one exists. * * The default session may be user-configured, or it may be the last session the user was using. * @return The default session or null. */ public synchronized MXSession getDefaultSession() { ArrayList<MXSession> sessions = getSessions(); if (sessions.size() > 0) { return sessions.get(0); } ArrayList<HomeserverConnectionConfig> hsConfigList = mLoginStorage.getCredentialsList(); // any account ? if ((hsConfigList == null) || (hsConfigList.size() == 0)) { return null; } ArrayList<String> matrixIds = new ArrayList<String>(); sessions = new ArrayList<MXSession>(); for(HomeserverConnectionConfig config: hsConfigList) { // avoid duplicated accounts. if (config.getCredentials() != null && matrixIds.indexOf(config.getCredentials().userId) < 0) { MXSession session = createSession(config); sessions.add(session); matrixIds.add(config.getCredentials().userId); } } synchronized (instance) { mMXSessions = sessions; } return sessions.get(0); } /** * Static method to return a MXSession from an account Id. * @param matrixId the matrix id * @return the MXSession. */ public static MXSession getMXSession(Context context, String matrixId) { return Matrix.getInstance(context.getApplicationContext()).getSession(matrixId); } /** *Retrieve a session from an user Id. * The application should be able to manage multi session. * @param matrixId the matrix id * @return the MXsession if it exists. */ public synchronized MXSession getSession(String matrixId) { if (null != matrixId) { ArrayList<MXSession> sessions; synchronized (this) { sessions = getSessions(); } for (MXSession session : sessions) { Credentials credentials = session.getCredentials(); if ((null != credentials) && (credentials.userId.equals(matrixId))) { return session; } } } return getDefaultSession(); } /** * Add an error listener to each sessions * @param activity the activity. */ public static void setSessionErrorListener(Activity activity) { if ((null != instance) && (null != activity)) { Collection<MXSession> sessions = getMXSessions(activity); for(MXSession session : sessions) { if (session.isActive()) { session.setFailureCallback(new ErrorListener(session, activity)); } } } } /** * Remove the sessions error listener to each */ public static void removeSessionErrorListener(Activity activity) { if ((null != instance) && (null != activity)) { Collection<MXSession> sessions = getMXSessions(activity); for(MXSession session : sessions) { if (session.isActive()) { session.setFailureCallback(null); } } } } /** * Return the used media caches. * This class can inherited to customized it. * @return the mediasCache. */ public MXMediasCache getMediasCache() { if (getSessions().size() > 0) { return getSessions().get(0).getMediasCache(); } return null; } /** * Return the used latestMessages caches. * This class can inherited to customized it. * @return the latest messages cache. */ public MXLatestChatMessageCache getDefaultLatestChatMessageCache() { if (getSessions().size() > 0) { return getSessions().get(0).getLatestChatMessageCache(); } return null; } /** * * @return true if the matrix client instance defines a valid session */ public static Boolean hasValidSessions() { return (null != instance) && (instance.mMXSessions.size() > 0); } /** * Refresh the sessions push rules. */ public void refreshPushRules() { ArrayList<MXSession> sessions = null; synchronized (this) { sessions = getSessions(); } for(MXSession session : sessions) { if (null != session.getDataHandler()) { session.getDataHandler().refreshPushRules(); } } } /** * Clear a session. * @param context the context. * @param session the session to clear. * @param clearCredentials true to clear the credentials. */ public synchronized void clearSession(Context context, MXSession session, Boolean clearCredentials) { if (clearCredentials) { mLoginStorage.removeCredentials(session.getHomeserverConfig()); } session.clear(context); synchronized (instance) { mMXSessions.remove(session); } } /** * Clear any existing session. * @param context the context. * @param clearCredentials true to clear the credentials. */ public synchronized void clearSessions(Context context, Boolean clearCredentials) { synchronized (instance) { while (mMXSessions.size() > 0) { clearSession(context, mMXSessions.get(0), clearCredentials); } } } /** * Set a default session. * @param session The session to store as the default session. */ public synchronized void addSession(MXSession session) { mLoginStorage.addCredentials(session.getHomeserverConfig()); synchronized (instance) { mMXSessions.add(session); } } /** * Creates an MXSession from some credentials. * @param hsConfig The HomeserverConnectionConfig to create a session from. * @return The session. */ public MXSession createSession(HomeserverConnectionConfig hsConfig) { return createSession(mAppContext, hsConfig); } /** * Creates an MXSession from some credentials. * @param context the context. * @param hsConfig The HomeserverConnectionConfig to create a session from. * @return The session. */ public MXSession createSession(Context context, HomeserverConnectionConfig hsConfig) { IMXStore store; Credentials credentials = hsConfig.getCredentials(); if (true) { store = new MXFileStore(hsConfig, context); } else { store = new MXMemoryStore(hsConfig.getCredentials()); } return new MXSession(hsConfig, new MXDataHandler(store, credentials), mAppContext); } /** * Reload the matrix sessions. * The session caches are cleared before being reloaded. * Any opened activity is closed and the application switches to the splash screen. * @param fromActivity the caller activity */ public void reloadSessions(Activity fromActivity) { ArrayList<MXSession> sessions = getMXSessions(fromActivity); for(MXSession session : sessions) { CommonActivityUtils.logout(fromActivity, session, false); } clearSessions(fromActivity, false); synchronized (instance) { // build a new sessions list ArrayList<HomeserverConnectionConfig> configs = mLoginStorage.getCredentialsList(); for(HomeserverConnectionConfig config : configs) { MXSession session = createSession(config); mMXSessions.add(session); } } Intent intent = new Intent(fromActivity, SplashActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK | Intent.FLAG_ACTIVITY_CLEAR_TASK); fromActivity.startActivity(intent); fromActivity.finish(); } public GcmRegistrationManager getSharedGcmRegistrationManager() { return mGcmRegistrationManager; } }
fix a crash.
console/src/main/java/im/vector/Matrix.java
fix a crash.
<ide><path>onsole/src/main/java/im/vector/Matrix.java <ide> * @return true if the matrix client instance defines a valid session <ide> */ <ide> public static Boolean hasValidSessions() { <del> return (null != instance) && (instance.mMXSessions.size() > 0); <add> return (null != instance) && (null != instance.mMXSessions) && (instance.mMXSessions.size() > 0); <ide> } <ide> <ide> /**
Java
mit
error: pathspec 'TGOCMessages/tgocmessage/src/androidTest/java/br/com/tgoc/tgocmessage/TGOCMessageTest.java' did not match any file(s) known to git
cdac51b4bd0b96fb6fd1431c57ad2f7ef7e13af1
1
two-guys-one-code/TGOCMessagesActivity
package br.com.tgoc.tgocmessage; import android.support.test.InstrumentationRegistry; import android.support.test.runner.AndroidJUnit4; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; /** * Created by Rodrigo.Cavalcante on 30/11/16. */ @RunWith(AndroidJUnit4.class) public class TGOCMessageTest { TGOCMessage message = null; @Before public void setup() {} @Test public void test_date() { message = new TGOCMessage(); assertNotEquals(message.getDate(), null); } @Test public void test_senderID() { message = new TGOCMessage("id", null); assertEquals(message.senderId, "id"); } @Test public void test_nullSenderID() { message = new TGOCMessage(null, null); assertEquals(message.senderId, null); } @Test public void test_text() { message = new TGOCMessage(null, "text"); assertEquals(message.text, "text"); } @Test public void test_nullText() { message = new TGOCMessage(null, null); assertEquals(message.text, null); } @Test public void test_senderDisplayName() { message = new TGOCMessage(null, null, "display name"); assertEquals(message.senderDisplayName, "display name"); } @Test public void test_nullSenderDisplayName() { message = new TGOCMessage(null, null, null); assertEquals(message.senderDisplayName, null); } @Test public void test_falseIsMediaType() { message = new TGOCMessage(); assertEquals(message.isMediaMessage, false); } @Test public void test_isMediaType() { message = new TGOCMessage(null, null, null, null); assertEquals(message.isMediaMessage, true); } }
TGOCMessages/tgocmessage/src/androidTest/java/br/com/tgoc/tgocmessage/TGOCMessageTest.java
add tests to TGOCMessage
TGOCMessages/tgocmessage/src/androidTest/java/br/com/tgoc/tgocmessage/TGOCMessageTest.java
add tests to TGOCMessage
<ide><path>GOCMessages/tgocmessage/src/androidTest/java/br/com/tgoc/tgocmessage/TGOCMessageTest.java <add>package br.com.tgoc.tgocmessage; <add> <add>import android.support.test.InstrumentationRegistry; <add>import android.support.test.runner.AndroidJUnit4; <add> <add>import org.junit.Before; <add>import org.junit.Test; <add>import org.junit.runner.RunWith; <add> <add>import static org.junit.Assert.assertEquals; <add>import static org.junit.Assert.assertNotEquals; <add> <add>/** <add> * Created by Rodrigo.Cavalcante on 30/11/16. <add> */ <add>@RunWith(AndroidJUnit4.class) <add>public class TGOCMessageTest { <add> <add> TGOCMessage message = null; <add> <add> @Before <add> public void setup() {} <add> <add> @Test <add> public void test_date() { <add> message = new TGOCMessage(); <add> <add> assertNotEquals(message.getDate(), null); <add> } <add> <add> @Test <add> public void test_senderID() { <add> message = new TGOCMessage("id", null); <add> <add> assertEquals(message.senderId, "id"); <add> } <add> <add> @Test <add> public void test_nullSenderID() { <add> message = new TGOCMessage(null, null); <add> <add> assertEquals(message.senderId, null); <add> } <add> <add> @Test <add> public void test_text() { <add> message = new TGOCMessage(null, "text"); <add> <add> assertEquals(message.text, "text"); <add> } <add> <add> @Test <add> public void test_nullText() { <add> message = new TGOCMessage(null, null); <add> <add> assertEquals(message.text, null); <add> } <add> <add> @Test <add> public void test_senderDisplayName() { <add> message = new TGOCMessage(null, null, "display name"); <add> <add> assertEquals(message.senderDisplayName, "display name"); <add> } <add> <add> @Test <add> public void test_nullSenderDisplayName() { <add> message = new TGOCMessage(null, null, null); <add> <add> assertEquals(message.senderDisplayName, null); <add> } <add> <add> @Test <add> public void test_falseIsMediaType() { <add> message = new TGOCMessage(); <add> <add> assertEquals(message.isMediaMessage, false); <add> } <add> <add> @Test <add> public void test_isMediaType() { <add> message = new TGOCMessage(null, null, null, null); <add> <add> assertEquals(message.isMediaMessage, true); <add> } <add>}
Java
mit
7cdaffbe5c04128e78abb2a34f617085c1d7bc5a
0
iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable
/* * Copyright (c) 2010, The Broad Institute * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk; import net.sf.picard.filter.SamRecordFilter; import net.sf.picard.reference.IndexedFastaSequenceFile; import net.sf.picard.reference.ReferenceSequenceFile; import net.sf.samtools.*; import org.apache.log4j.Logger; import org.broadinstitute.sting.commandline.ArgumentException; import org.broadinstitute.sting.commandline.ArgumentSource; import org.broad.tribble.util.variantcontext.VariantContext; import org.broadinstitute.sting.commandline.CommandLineUtils; import org.broadinstitute.sting.commandline.ParsingEngine; import org.broadinstitute.sting.gatk.arguments.GATKArgumentCollection; import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; import org.broadinstitute.sting.gatk.datasources.sample.Sample; import org.broadinstitute.sting.gatk.datasources.sample.SampleDataSource; import org.broadinstitute.sting.gatk.datasources.shards.MonolithicShardStrategy; import org.broadinstitute.sting.gatk.datasources.shards.Shard; import org.broadinstitute.sting.gatk.datasources.shards.ShardStrategy; import org.broadinstitute.sting.gatk.datasources.shards.ShardStrategyFactory; import org.broadinstitute.sting.gatk.datasources.simpleDataSources.ReferenceDataSource; import org.broadinstitute.sting.gatk.datasources.simpleDataSources.ReferenceOrderedDataSource; import org.broadinstitute.sting.gatk.datasources.simpleDataSources.SAMDataSource; import org.broadinstitute.sting.gatk.datasources.simpleDataSources.SAMReaderID; import org.broadinstitute.sting.gatk.executive.MicroScheduler; import org.broadinstitute.sting.gatk.filters.FilterManager; import org.broadinstitute.sting.gatk.filters.ReadGroupBlackListFilter; import org.broadinstitute.sting.gatk.filters.SamRecordHeaderFilter; import org.broadinstitute.sting.gatk.io.OutputTracker; import org.broadinstitute.sting.gatk.io.stubs.Stub; import org.broadinstitute.sting.gatk.refdata.tracks.RMDTrack; import org.broadinstitute.sting.gatk.refdata.tracks.builders.RMDTrackBuilder; import org.broadinstitute.sting.gatk.refdata.utils.RMDIntervalGenerator; import org.broadinstitute.sting.gatk.refdata.utils.RMDTriplet; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocSortedSet; import org.broadinstitute.sting.utils.SequenceDictionaryUtils; import org.broadinstitute.sting.utils.baq.BAQ; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.interval.IntervalMergingRule; import org.broadinstitute.sting.utils.interval.IntervalUtils; import org.broadinstitute.sting.utils.text.XReadLines; import java.io.File; import java.io.FileNotFoundException; import java.util.*; /** * A GenomeAnalysisEngine that runs a specified walker. */ public class GenomeAnalysisEngine { /** * our log, which we want to capture anything from this class */ private static Logger logger = Logger.getLogger(GenomeAnalysisEngine.class); /** * The GATK command-line argument parsing code. */ private ParsingEngine parsingEngine; /** * The genomeLocParser can create and parse GenomeLocs. */ private GenomeLocParser genomeLocParser; /** * Accessor for sharded read data. */ private SAMDataSource readsDataSource = null; /** * Accessor for sharded reference data. */ private ReferenceDataSource referenceDataSource = null; /** * Accessor for sample metadata */ private SampleDataSource sampleDataSource = null; /** * Accessor for sharded reference-ordered data. */ private List<ReferenceOrderedDataSource> rodDataSources; // our argument collection private GATKArgumentCollection argCollection; /** * Collection of intervals used by the engine. */ private GenomeLocSortedSet intervals = null; /** * Collection of inputs used by the engine. */ private Map<ArgumentSource, Object> inputs = new HashMap<ArgumentSource, Object>(); /** * Collection of outputs used by the engine. */ private Collection<Stub<?>> outputs = new ArrayList<Stub<?>>(); /** * Collection of the filters applied to the input data. */ private Collection<SamRecordFilter> filters; /** * our walker manager */ private final WalkerManager walkerManager = new WalkerManager(); private Walker<?, ?> walker; public void setWalker(Walker<?, ?> walker) { this.walker = walker; } /** * A processed collection of SAM reader identifiers. */ private Collection<SAMReaderID> samReaderIDs = Collections.emptyList(); /** * Set the SAM/BAM files over which to traverse. * @param samReaderIDs Collection of ids to use during this traversal. */ public void setSAMFileIDs(Collection<SAMReaderID> samReaderIDs) { this.samReaderIDs = samReaderIDs; } /** * Collection of reference metadata files over which to traverse. */ private Collection<RMDTriplet> referenceMetaDataFiles; /** * Set the reference metadata files to use for this traversal. * @param referenceMetaDataFiles Collection of files and descriptors over which to traverse. */ public void setReferenceMetaDataFiles(Collection<RMDTriplet> referenceMetaDataFiles) { this.referenceMetaDataFiles = referenceMetaDataFiles; } /** * Actually run the GATK with the specified walker. * * @return the value of this traversal. */ public Object execute() { //HeapSizeMonitor monitor = new HeapSizeMonitor(); //monitor.start(); setStartTime(new java.util.Date()); // validate our parameters if (this.getArguments() == null) { throw new ReviewedStingException("The GATKArgumentCollection passed to GenomeAnalysisEngine can not be null."); } // validate our parameters if (this.walker == null) throw new ReviewedStingException("The walker passed to GenomeAnalysisEngine can not be null."); // Prepare the data for traversal. initializeDataSources(); // our microscheduler, which is in charge of running everything MicroScheduler microScheduler = createMicroscheduler(); // create temp directories as necessary initializeTempDirectory(); // create the output streams " initializeOutputStreams(microScheduler.getOutputTracker()); // initialize and validate the interval list initializeIntervals(); validateSuppliedIntervals(); ShardStrategy shardStrategy = getShardStrategy(microScheduler.getReference()); // execute the microscheduler, storing the results Object result = microScheduler.execute(this.walker, shardStrategy); //monitor.stop(); //logger.info(String.format("Maximum heap size consumed: %d",monitor.getMaxMemoryUsed())); return result; } /** * Retrieves an instance of the walker based on the walker name. * * @param walkerName Name of the walker. Must not be null. If the walker cannot be instantiated, an exception will be thrown. * @return An instance of the walker. */ public Walker<?, ?> getWalkerByName(String walkerName) { return walkerManager.createByName(walkerName); } /** * Gets the name of a given walker type. * @param walkerType Type of walker. * @return Name of the walker. */ public String getWalkerName(Class<? extends Walker> walkerType) { return walkerManager.getName(walkerType); } /** * Gets a list of the filters to associate with the given walker. Will NOT initialize the engine with this filters; * the caller must handle that directly. * @return A collection of available filters. */ public Collection<SamRecordFilter> createFilters() { Set<SamRecordFilter> filters = new HashSet<SamRecordFilter>(); filters.addAll(WalkerManager.getReadFilters(walker,this.getFilterManager())); if (this.getArguments().readGroupBlackList != null && this.getArguments().readGroupBlackList.size() > 0) filters.add(new ReadGroupBlackListFilter(this.getArguments().readGroupBlackList)); for(String filterName: this.getArguments().readFilters) filters.add(this.getFilterManager().createByName(filterName)); return Collections.unmodifiableSet(filters); } /** * Allow subclasses and others within this package direct access to the walker manager. * @return The walker manager used by this package. */ protected WalkerManager getWalkerManager() { return walkerManager; } /** * setup a microscheduler * * @return a new microscheduler */ private MicroScheduler createMicroscheduler() { Walker my_walker = this.walker; // the mircoscheduler to return MicroScheduler microScheduler = null; // Temporarily require all walkers to have a reference, even if that reference is not conceptually necessary. if ((my_walker instanceof ReadWalker || my_walker instanceof DuplicateWalker || my_walker instanceof ReadPairWalker) && this.getArguments().referenceFile == null) { throw new UserException.CommandLineException("Read-based traversals require a reference file but none was given"); } return MicroScheduler.create(this,my_walker,this.getReadsDataSource(),this.getReferenceDataSource().getReference(),this.getRodDataSources(),this.getArguments().numberOfThreads); } protected DownsamplingMethod getDownsamplingMethod() { GATKArgumentCollection argCollection = this.getArguments(); DownsamplingMethod method; if(argCollection.getDownsamplingMethod() != null) method = argCollection.getDownsamplingMethod(); else if(WalkerManager.getDownsamplingMethod(walker) != null) method = WalkerManager.getDownsamplingMethod(walker); else method = argCollection.getDefaultDownsamplingMethod(); return method; } public BAQ.QualityMode getWalkerBAQQualityMode() { return WalkerManager.getBAQQualityMode(walker); } public BAQ.ApplicationTime getWalkerBAQApplicationTime() { return WalkerManager.getBAQApplicationTime(walker); } protected boolean generateExtendedEvents() { return walker.generateExtendedEvents(); } protected boolean includeReadsWithDeletionAtLoci() { return walker.includeReadsWithDeletionAtLoci(); } /** * Verifies that the supplied set of reads files mesh with what the walker says it requires. */ protected void validateSuppliedReads() { GATKArgumentCollection arguments = this.getArguments(); // Check what the walker says is required against what was provided on the command line. if (WalkerManager.isRequired(walker, DataSource.READS) && (arguments.samFiles == null || arguments.samFiles.size() == 0)) throw new ArgumentException("Walker requires reads but none were provided."); // Check what the walker says is allowed against what was provided on the command line. if ((arguments.samFiles != null && arguments.samFiles.size() > 0) && !WalkerManager.isAllowed(walker, DataSource.READS)) throw new ArgumentException("Walker does not allow reads but reads were provided."); } /** * Verifies that the supplied reference file mesh with what the walker says it requires. */ protected void validateSuppliedReference() { GATKArgumentCollection arguments = this.getArguments(); // Check what the walker says is required against what was provided on the command line. // TODO: Temporarily disabling WalkerManager.isRequired check on the reference because the reference is always required. if (/*WalkerManager.isRequired(walker, DataSource.REFERENCE) &&*/ arguments.referenceFile == null) throw new ArgumentException("Walker requires a reference but none was provided."); // Check what the walker says is allowed against what was provided on the command line. if (arguments.referenceFile != null && !WalkerManager.isAllowed(walker, DataSource.REFERENCE)) throw new ArgumentException("Walker does not allow a reference but one was provided."); } /** * Verifies that all required reference-ordered data has been supplied, and any reference-ordered data that was not * 'allowed' is still present. * * @param rods Reference-ordered data to load. */ protected void validateSuppliedReferenceOrderedData(List<ReferenceOrderedDataSource> rods) { // Check to make sure that all required metadata is present. List<RMD> allRequired = WalkerManager.getRequiredMetaData(walker); for (RMD required : allRequired) { boolean found = false; for (ReferenceOrderedDataSource rod : rods) { if (rod.matchesNameAndRecordType(required.name(), required.type())) found = true; } if (!found) throw new ArgumentException(String.format("Walker requires reference metadata to be supplied named '%s' of type '%s', but this metadata was not provided. " + "Please supply the specified metadata file.", required.name(), required.type().getSimpleName())); } // Check to see that no forbidden rods are present. for (ReferenceOrderedDataSource rod : rods) { if (!WalkerManager.isAllowed(walker, rod)) throw new ArgumentException(String.format("Walker of type %s does not allow access to metadata: %s", walker.getClass(), rod.getName())); } } protected void validateSuppliedIntervals() { // Only read walkers support '-L unmapped' intervals. Trap and validate any other instances of -L unmapped. if(!(walker instanceof ReadWalker)) { GenomeLocSortedSet intervals = getIntervals(); if(intervals != null && getIntervals().contains(GenomeLoc.UNMAPPED)) throw new ArgumentException("Interval list specifies unmapped region. Only read walkers may include the unmapped region."); } } /** * Get the sharding strategy given a driving data source. * * @param drivingDataSource Data on which to shard. * @return the sharding strategy */ protected ShardStrategy getShardStrategy(ReferenceSequenceFile drivingDataSource) { GenomeLocSortedSet intervals = this.getIntervals(); SAMDataSource readsDataSource = this.getReadsDataSource(); ValidationExclusion exclusions = (readsDataSource != null ? readsDataSource.getReadsInfo().getValidationExclusionList() : null); ReferenceDataSource referenceDataSource = this.getReferenceDataSource(); // Use monolithic sharding if no index is present. Monolithic sharding is always required for the original // sharding system; it's required with the new sharding system only for locus walkers. if(readsDataSource != null && !readsDataSource.hasIndex() ) { if(!exclusions.contains(ValidationExclusion.TYPE.ALLOW_UNINDEXED_BAM)) throw new UserException.CommandLineException("The GATK cannot currently process unindexed BAM files without the -U ALLOW_UNINDEXED_BAM"); if(intervals != null && WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE) throw new UserException.CommandLineException("Cannot perform interval processing when walker is not driven by reference and no index is available."); Shard.ShardType shardType; if(walker instanceof LocusWalker) { if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate) throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Locus walkers can only traverse coordinate-sorted data. Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately."); shardType = Shard.ShardType.LOCUS; } else if(walker instanceof ReadWalker || walker instanceof DuplicateWalker || walker instanceof ReadPairWalker) shardType = Shard.ShardType.READ; else throw new UserException.CommandLineException("The GATK cannot currently process unindexed BAM files"); List<GenomeLoc> region; if(intervals != null) region = intervals.toList(); else { region = new ArrayList<GenomeLoc>(); for(SAMSequenceRecord sequenceRecord: drivingDataSource.getSequenceDictionary().getSequences()) region.add(getGenomeLocParser().createGenomeLoc(sequenceRecord.getSequenceName(),1,sequenceRecord.getSequenceLength())); } return new MonolithicShardStrategy(readsDataSource,shardType,region); } ShardStrategy shardStrategy = null; ShardStrategyFactory.SHATTER_STRATEGY shardType; long SHARD_SIZE = 100000L; if (walker instanceof LocusWalker) { if (walker instanceof RodWalker) SHARD_SIZE *= 1000; if (intervals != null && !intervals.isEmpty()) { if(!readsDataSource.isEmpty() && readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate) throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Locus walkers can only traverse coordinate-sorted data. Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately."); shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), ShardStrategyFactory.SHATTER_STRATEGY.LOCUS_EXPERIMENTAL, drivingDataSource.getSequenceDictionary(), SHARD_SIZE, getGenomeLocParser(), intervals); } else shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), ShardStrategyFactory.SHATTER_STRATEGY.LOCUS_EXPERIMENTAL, drivingDataSource.getSequenceDictionary(), SHARD_SIZE,getGenomeLocParser()); } else if (walker instanceof ReadWalker || walker instanceof DuplicateWalker) { shardType = ShardStrategyFactory.SHATTER_STRATEGY.READS_EXPERIMENTAL; if (intervals != null && !intervals.isEmpty()) { shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), shardType, drivingDataSource.getSequenceDictionary(), SHARD_SIZE, getGenomeLocParser(), intervals); } else { shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), shardType, drivingDataSource.getSequenceDictionary(), SHARD_SIZE, getGenomeLocParser()); } } else if (walker instanceof ReadPairWalker) { if(readsDataSource != null && readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.queryname) throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.queryname, "Read pair walkers can only walk over query name-sorted data. Please resort your input BAM file."); if(intervals != null && !intervals.isEmpty()) throw new UserException.CommandLineException("Pairs traversal cannot be used in conjunction with intervals."); shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), ShardStrategyFactory.SHATTER_STRATEGY.READS_EXPERIMENTAL, drivingDataSource.getSequenceDictionary(), SHARD_SIZE, getGenomeLocParser()); } else throw new ReviewedStingException("Unable to support walker of type" + walker.getClass().getName()); return shardStrategy; } protected boolean flashbackData() { return walker instanceof ReadWalker; } /** * Create the temp directory if it doesn't exist. */ private void initializeTempDirectory() { File tempDir = new File(System.getProperty("java.io.tmpdir")); tempDir.mkdirs(); } /** * Initialize the output streams as specified by the user. * * @param outputTracker the tracker supplying the initialization data. */ private void initializeOutputStreams(OutputTracker outputTracker) { for (Map.Entry<ArgumentSource, Object> input : getInputs().entrySet()) outputTracker.addInput(input.getKey(), input.getValue()); for (Stub<?> stub : getOutputs()) outputTracker.addOutput(stub); outputTracker.prepareWalker(walker, getArguments().strictnessLevel); } public ReferenceDataSource getReferenceDataSource() { return referenceDataSource; } public GenomeLocParser getGenomeLocParser() { return genomeLocParser; } /** * Manage lists of filters. */ private final FilterManager filterManager = new FilterManager(); private Date startTime = null; // the start time for execution public void setParser(ParsingEngine parsingEngine) { this.parsingEngine = parsingEngine; } /** * Explicitly set the GenomeLocParser, for unit testing. * @param genomeLocParser GenomeLocParser to use. */ public void setGenomeLocParser(GenomeLocParser genomeLocParser) { this.genomeLocParser = genomeLocParser; } /** * Sets the start time when the execute() function was last called * @param startTime the start time when the execute() function was last called */ protected void setStartTime(Date startTime) { this.startTime = startTime; } /** * @return the start time when the execute() function was last called */ public Date getStartTime() { return startTime; } /** * Setup the intervals to be processed */ protected void initializeIntervals() { // return if no interval arguments at all if ((argCollection.intervals == null) && (argCollection.excludeIntervals == null) && (argCollection.RODToInterval == null)) return; // if '-L all' was specified, verify that it was the only -L specified and return if so. if(argCollection.intervals != null) { for(String interval: argCollection.intervals) { if(interval.trim().equals("all")) { if(argCollection.intervals.size() > 1) throw new UserException("'-L all' was specified along with other intervals or interval lists; the GATK cannot combine '-L all' with other intervals."); // '-L all' was specified and seems valid. Return. return; } } } // if include argument isn't given, create new set of all possible intervals GenomeLocSortedSet includeSortedSet = (argCollection.intervals == null && argCollection.RODToInterval == null ? GenomeLocSortedSet.createSetFromSequenceDictionary(this.referenceDataSource.getReference().getSequenceDictionary()) : loadIntervals(argCollection.intervals, argCollection.intervalMerging, genomeLocParser.mergeIntervalLocations(checkRODToIntervalArgument(),argCollection.intervalMerging))); // if no exclude arguments, can return parseIntervalArguments directly if (argCollection.excludeIntervals == null) intervals = includeSortedSet; // otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets else { GenomeLocSortedSet excludeSortedSet = loadIntervals(argCollection.excludeIntervals, argCollection.intervalMerging, null); intervals = includeSortedSet.subtractRegions(excludeSortedSet); // logging messages only printed when exclude (-XL) arguments are given long toPruneSize = includeSortedSet.coveredSize(); long toExcludeSize = excludeSortedSet.coveredSize(); long intervalSize = intervals.coveredSize(); logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize)); logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)", toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize))); } } /** * Loads the intervals relevant to the current execution * @param argList String representation of arguments; might include 'all', filenames, intervals in samtools * notation, or a combination of the * @param mergingRule Technique to use when merging interval data. * @param additionalIntervals a list of additional intervals to add to the returned set. Can be null. * @return A sorted, merged list of all intervals specified in this arg list. */ private GenomeLocSortedSet loadIntervals(List<String> argList, IntervalMergingRule mergingRule, List<GenomeLoc> additionalIntervals) { return IntervalUtils.sortAndMergeIntervals(genomeLocParser,IntervalUtils.mergeListsBySetOperator(additionalIntervals, IntervalUtils.parseIntervalArguments(genomeLocParser,argList, this.getArguments().unsafe != ValidationExclusion.TYPE.ALLOW_EMPTY_INTERVAL_LIST), argCollection.BTIMergeRule), mergingRule); } /** * if we have a ROD specified as a 'rodToIntervalTrackName', convert its records to RODs * @return ROD intervals as GenomeLocs */ private List<GenomeLoc> checkRODToIntervalArgument() { Map<String, ReferenceOrderedDataSource> rodNames = RMDIntervalGenerator.getRMDTrackNames(rodDataSources); // Do we have any RODs that overloaded as interval lists with the 'rodToIntervalTrackName' flag? List<GenomeLoc> ret = new ArrayList<GenomeLoc>(); if (rodNames != null && argCollection.RODToInterval != null) { String rodName = argCollection.RODToInterval; // check to make sure we have a rod of that name if (!rodNames.containsKey(rodName)) throw new UserException.CommandLineException("--rodToIntervalTrackName (-BTI) was passed the name '"+rodName+"', which wasn't given as a ROD name in the -B option"); for (String str : rodNames.keySet()) if (str.equals(rodName)) { logger.info("Adding interval list from track (ROD) named " + rodName); RMDIntervalGenerator intervalGenerator = new RMDIntervalGenerator(rodNames.get(str)); ret.addAll(intervalGenerator.toGenomeLocList()); } } return ret; } /** * Add additional, externally managed IO streams for inputs. * * @param argumentSource Field into which to inject the value. * @param value Instance to inject. */ public void addInput(ArgumentSource argumentSource, Object value) { inputs.put(argumentSource, value); } /** * Add additional, externally managed IO streams for output. * * @param stub Instance to inject. */ public void addOutput(Stub<?> stub) { outputs.add(stub); } protected void initializeDataSources() { logger.info("Strictness is " + argCollection.strictnessLevel); // TODO -- REMOVE ME BAQ.DEFAULT_GOP = argCollection.BAQGOP; validateSuppliedReference(); referenceDataSource = openReferenceSequenceFile(argCollection.referenceFile); validateSuppliedReads(); readsDataSource = createReadsDataSource(genomeLocParser, referenceDataSource.getReference()); sampleDataSource = new SampleDataSource(getSAMFileHeader(), argCollection.sampleFiles); for (SamRecordFilter filter : filters) if (filter instanceof SamRecordHeaderFilter) ((SamRecordHeaderFilter)filter).setHeader(this.getSAMFileHeader()); sampleDataSource = new SampleDataSource(getSAMFileHeader(), argCollection.sampleFiles); // set the sequence dictionary of all of Tribble tracks to the sequence dictionary of our reference rodDataSources = getReferenceOrderedDataSources(referenceMetaDataFiles,referenceDataSource.getReference().getSequenceDictionary(),genomeLocParser,argCollection.unsafe); } /** * Gets a unique identifier for the reader sourcing this read. * @param read Read to examine. * @return A unique identifier for the source file of this read. Exception if not found. */ public SAMReaderID getReaderIDForRead(final SAMRecord read) { return getReadsDataSource().getReaderID(read); } /** * Gets the source file for this read. * @param id Unique identifier determining which input file to use. * @return The source filename for this read. */ public File getSourceFileForReaderID(final SAMReaderID id) { return getReadsDataSource().getSAMFile(id); } /** * Returns sets of samples present in the (merged) input SAM stream, grouped by readers (i.e. underlying * individual bam files). For instance: if GATK is run with three input bam files (three -I arguments), then the list * returned by this method will contain 3 elements (one for each reader), with each element being a set of sample names * found in the corresponding bam file. * * @return Sets of samples in the merged input SAM stream, grouped by readers */ public List<Set<String>> getSamplesByReaders() { Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs(); List<Set<String>> sample_sets = new ArrayList<Set<String>>(readers.size()); for (SAMReaderID r : readers) { Set<String> samples = new HashSet<String>(1); sample_sets.add(samples); for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) { samples.add(g.getSample()); } } return sample_sets; } /** * Returns sets of libraries present in the (merged) input SAM stream, grouped by readers (i.e. underlying * individual bam files). For instance: if GATK is run with three input bam files (three -I arguments), then the list * returned by this method will contain 3 elements (one for each reader), with each element being a set of library names * found in the corresponding bam file. * * @return Sets of libraries present in the (merged) input SAM stream, grouped by readers */ public List<Set<String>> getLibrariesByReaders() { Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs(); List<Set<String>> lib_sets = new ArrayList<Set<String>>(readers.size()); for (SAMReaderID r : readers) { Set<String> libs = new HashSet<String>(2); lib_sets.add(libs); for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) { libs.add(g.getLibrary()); } } return lib_sets; } /** * **** UNLESS YOU HAVE GOOD REASON TO, DO NOT USE THIS METHOD; USE getFileToReadGroupIdMapping() INSTEAD **** * * Returns sets of (remapped) read groups in input SAM stream, grouped by readers (i.e. underlying * individual bam files). For instance: if GATK is run with three input bam files (three -I arguments), then the list * returned by this method will contain 3 elements (one for each reader), with each element being a set of remapped read groups * (i.e. as seen by read.getReadGroup().getReadGroupId() in the merged stream) that come from the corresponding bam file. * * @return sets of (merged) read group ids in order of input bams */ public List<Set<String>> getMergedReadGroupsByReaders() { Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs(); List<Set<String>> rg_sets = new ArrayList<Set<String>>(readers.size()); for (SAMReaderID r : readers) { Set<String> groups = new HashSet<String>(5); rg_sets.add(groups); for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) { if (getReadsDataSource().hasReadGroupCollisions()) { // Check if there were read group clashes with hasGroupIdDuplicates and if so: // use HeaderMerger to translate original read group id from the reader into the read group id in the // merged stream, and save that remapped read group id to associate it with specific reader groups.add(getReadsDataSource().getReadGroupId(r, g.getReadGroupId())); } else { // otherwise, pass through the unmapped read groups since this is what Picard does as well groups.add(g.getReadGroupId()); } } } return rg_sets; } /** * Now that all files are open, validate the sequence dictionaries of the reads vs. the reference vrs the reference ordered data (if available). * * @param reads Reads data source. * @param reference Reference data source. * @param rods a collection of the reference ordered data tracks */ private void validateSourcesAgainstReference(SAMDataSource reads, ReferenceSequenceFile reference, Collection<ReferenceOrderedDataSource> rods, RMDTrackBuilder manager) { if ((reads.isEmpty() && (rods == null || rods.isEmpty())) || reference == null ) return; // Compile a set of sequence names that exist in the reference file. SAMSequenceDictionary referenceDictionary = reference.getSequenceDictionary(); if (!reads.isEmpty()) { // Compile a set of sequence names that exist in the BAM files. SAMSequenceDictionary readsDictionary = reads.getHeader().getSequenceDictionary(); Set<String> readsSequenceNames = new TreeSet<String>(); for (SAMSequenceRecord dictionaryEntry : readsDictionary.getSequences()) readsSequenceNames.add(dictionaryEntry.getSequenceName()); if (readsSequenceNames.size() == 0) { logger.info("Reads file is unmapped. Skipping validation against reference."); return; } // compare the reads to the reference SequenceDictionaryUtils.validateDictionaries(logger, getArguments().unsafe, "reads", readsDictionary, "reference", referenceDictionary); } for (ReferenceOrderedDataSource rod : rods) manager.validateTrackSequenceDictionary(rod.getName(),rod.getSequenceDictionary(),referenceDictionary); } /** * Gets a data source for the given set of reads. * * @return A data source for the given set of reads. */ private SAMDataSource createReadsDataSource(GenomeLocParser genomeLocParser, IndexedFastaSequenceFile refReader) { DownsamplingMethod method = getDownsamplingMethod(); if ( getWalkerBAQApplicationTime() == BAQ.ApplicationTime.FORBIDDEN && argCollection.BAQMode != BAQ.CalculationMode.OFF) throw new UserException.BadArgumentValue("baq", "Walker cannot accept BAQ'd base qualities, and yet BAQ mode " + argCollection.BAQMode + " was requested."); return new SAMDataSource( samReaderIDs, genomeLocParser, argCollection.useOriginalBaseQualities, argCollection.strictnessLevel, argCollection.readBufferSize, method, new ValidationExclusion(Arrays.asList(argCollection.unsafe)), filters, includeReadsWithDeletionAtLoci(), generateExtendedEvents(), getWalkerBAQApplicationTime() == BAQ.ApplicationTime.ON_INPUT ? argCollection.BAQMode : BAQ.CalculationMode.OFF, getWalkerBAQQualityMode(), refReader); } /** * Opens a reference sequence file paired with an index. * * @param refFile Handle to a reference sequence file. Non-null. * @return A thread-safe file wrapper. */ private ReferenceDataSource openReferenceSequenceFile(File refFile) { ReferenceDataSource ref = new ReferenceDataSource(refFile); genomeLocParser = new GenomeLocParser(ref.getReference()); return ref; } /** * Open the reference-ordered data sources. * * @param referenceMetaDataFiles collection of RMD descriptors to load and validate. * @param sequenceDictionary GATK-wide sequnce dictionary to use for validation. * @param genomeLocParser to use when creating and validating GenomeLocs. * @param validationExclusionType potentially indicate which validations to include / exclude. * * @return A list of reference-ordered data sources. */ private List<ReferenceOrderedDataSource> getReferenceOrderedDataSources(Collection<RMDTriplet> referenceMetaDataFiles, SAMSequenceDictionary sequenceDictionary, GenomeLocParser genomeLocParser, ValidationExclusion.TYPE validationExclusionType) { RMDTrackBuilder builder = new RMDTrackBuilder(referenceMetaDataFiles,sequenceDictionary,genomeLocParser,validationExclusionType); // try and make the tracks given their requests // create of live instances of the tracks List<RMDTrack> tracks = new ArrayList<RMDTrack>(); List<ReferenceOrderedDataSource> dataSources = new ArrayList<ReferenceOrderedDataSource>(); for (RMDTriplet fileDescriptor : referenceMetaDataFiles) dataSources.add(new ReferenceOrderedDataSource(fileDescriptor, builder, sequenceDictionary, genomeLocParser, flashbackData())); // validation: check to make sure everything the walker needs is present, and that all sequence dictionaries match. validateSuppliedReferenceOrderedData(dataSources); validateSourcesAgainstReference(readsDataSource, referenceDataSource.getReference(), dataSources, builder); return dataSources; } /** * Returns the SAM File Header from the input reads' data source file * @return the SAM File Header from the input reads' data source file */ public SAMFileHeader getSAMFileHeader() { return readsDataSource.getHeader(); } /** * Returns the unmerged SAM file header for an individual reader. * @param reader The reader. * @return Header for that reader. */ public SAMFileHeader getSAMFileHeader(SAMReaderID reader) { return readsDataSource.getHeader(reader); } /** * Returns data source object encapsulating all essential info and handlers used to traverse * reads; header merger, individual file readers etc can be accessed through the returned data source object. * * @return the reads data source */ public SAMDataSource getReadsDataSource() { return this.readsDataSource; } /** * Sets the collection of GATK main application arguments. * * @param argCollection the GATK argument collection */ public void setArguments(GATKArgumentCollection argCollection) { this.argCollection = argCollection; } /** * Gets the collection of GATK main application arguments. * * @return the GATK argument collection */ public GATKArgumentCollection getArguments() { return this.argCollection; } /** * Get the list of intervals passed to the engine. * @return List of intervals. */ public GenomeLocSortedSet getIntervals() { return this.intervals; } /** * Gets the list of filters employed by this engine. * @return Collection of filters (actual instances) used by this engine. */ public Collection<SamRecordFilter> getFilters() { return this.filters; } /** * Sets the list of filters employed by this engine. * @param filters Collection of filters (actual instances) used by this engine. */ public void setFilters(Collection<SamRecordFilter> filters) { this.filters = filters; } /** * Gets the filter manager for this engine. * @return filter manager for this engine. */ protected FilterManager getFilterManager() { return filterManager; } /** * Gets the input sources for this engine. * @return input sources for this engine. */ protected Map<ArgumentSource, Object> getInputs() { return inputs; } /** * Gets the output stubs for this engine. * @return output stubs for this engine. */ protected Collection<Stub<?>> getOutputs() { return outputs; } /** * Returns data source objects encapsulating all rod data; * individual rods can be accessed through the returned data source objects. * * @return the rods data sources */ public List<ReferenceOrderedDataSource> getRodDataSources() { return this.rodDataSources; } /** * Gets cumulative metrics about the entire run to this point. * @return cumulative metrics about the entire run. */ public ReadMetrics getCumulativeMetrics() { return readsDataSource == null ? null : readsDataSource.getCumulativeReadMetrics(); } public SampleDataSource getSampleMetadata() { return this.sampleDataSource; } /** * Get a sample by its ID * If an alias is passed in, return the main sample object * @param id sample id * @return sample Object with this ID */ public Sample getSampleById(String id) { return sampleDataSource.getSampleById(id); } /** * Get the sample for a given read group * Must first look up ID for read group * @param readGroup of sample * @return sample object with ID from the read group */ public Sample getSampleByReadGroup(SAMReadGroupRecord readGroup) { return sampleDataSource.getSampleByReadGroup(readGroup); } /** * Get a sample for a given read * Must first look up read group, and then sample ID for that read group * @param read of sample * @return sample object of this read */ public Sample getSampleByRead(SAMRecord read) { return getSampleByReadGroup(read.getReadGroup()); } /** * Get number of sample objects * @return size of samples map */ public int sampleCount() { return sampleDataSource.sampleCount(); } /** * Return all samples with a given family ID * Note that this isn't terribly efficient (linear) - it may be worth adding a new family ID data structure for this * @param familyId family ID * @return Samples with the given family ID */ public Set<Sample> getFamily(String familyId) { return sampleDataSource.getFamily(familyId); } /** * Returns all children of a given sample * See note on the efficiency of getFamily() - since this depends on getFamily() it's also not efficient * @param sample parent sample * @return children of the given sample */ public Set<Sample> getChildren(Sample sample) { return sampleDataSource.getChildren(sample); } /** * Gets all the samples * @return */ public Collection<Sample> getSamples() { return sampleDataSource.getSamples(); } /** * Takes a list of sample names and returns their corresponding sample objects * * @param sampleNameList List of sample names * @return Corresponding set of samples */ public Set<Sample> getSamples(Collection<String> sampleNameList) { return sampleDataSource.getSamples(sampleNameList); } /** * Returns a set of samples that have any value (which could be null) for a given property * @param key Property key * @return Set of samples with the property */ public Set<Sample> getSamplesWithProperty(String key) { return sampleDataSource.getSamplesWithProperty(key); } /** * Returns a set of samples that have a property with a certain value * Value must be a string for now - could add a similar method for matching any objects in the future * * @param key Property key * @param value String property value * @return Set of samples that match key and value */ public Set<Sample> getSamplesWithProperty(String key, String value) { return sampleDataSource.getSamplesWithProperty(key, value); } /** * Returns a set of sample objects for the sample names in a variant context * * @param context Any variant context * @return a set of the sample objects */ public Set<Sample> getSamplesByVariantContext(VariantContext context) { Set<Sample> samples = new HashSet<Sample>(); for (String sampleName : context.getSampleNames()) { samples.add(sampleDataSource.getOrCreateSample(sampleName)); } return samples; } /** * Returns all samples that were referenced in the SAM file */ public Set<Sample> getSAMFileSamples() { return sampleDataSource.getSAMFileSamples(); } /** * Return a subcontext restricted to samples with a given property key/value * Gets the sample names from key/value and relies on VariantContext.subContextFromGenotypes for the filtering * @param context VariantContext to filter * @param key property key * @param value property value (must be string) * @return subcontext */ public VariantContext subContextFromSampleProperty(VariantContext context, String key, String value) { return sampleDataSource.subContextFromSampleProperty(context, key, value); } public Map<String,String> getApproximateCommandLineArguments(Object... argumentProviders) { return CommandLineUtils.getApproximateCommandLineArguments(parsingEngine,argumentProviders); } public String createApproximateCommandLineArgumentString(Object... argumentProviders) { return CommandLineUtils.createApproximateCommandLineArgumentString(parsingEngine,argumentProviders); } }
java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java
/* * Copyright (c) 2010, The Broad Institute * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package org.broadinstitute.sting.gatk; import net.sf.picard.filter.SamRecordFilter; import net.sf.picard.reference.IndexedFastaSequenceFile; import net.sf.picard.reference.ReferenceSequenceFile; import net.sf.samtools.*; import org.apache.log4j.Logger; import org.broadinstitute.sting.commandline.ArgumentException; import org.broadinstitute.sting.commandline.ArgumentSource; import org.broad.tribble.util.variantcontext.VariantContext; import org.broadinstitute.sting.commandline.CommandLineUtils; import org.broadinstitute.sting.commandline.ParsingEngine; import org.broadinstitute.sting.gatk.arguments.GATKArgumentCollection; import org.broadinstitute.sting.gatk.arguments.ValidationExclusion; import org.broadinstitute.sting.gatk.datasources.sample.Sample; import org.broadinstitute.sting.gatk.datasources.sample.SampleDataSource; import org.broadinstitute.sting.gatk.datasources.shards.MonolithicShardStrategy; import org.broadinstitute.sting.gatk.datasources.shards.Shard; import org.broadinstitute.sting.gatk.datasources.shards.ShardStrategy; import org.broadinstitute.sting.gatk.datasources.shards.ShardStrategyFactory; import org.broadinstitute.sting.gatk.datasources.simpleDataSources.ReferenceDataSource; import org.broadinstitute.sting.gatk.datasources.simpleDataSources.ReferenceOrderedDataSource; import org.broadinstitute.sting.gatk.datasources.simpleDataSources.SAMDataSource; import org.broadinstitute.sting.gatk.datasources.simpleDataSources.SAMReaderID; import org.broadinstitute.sting.gatk.executive.MicroScheduler; import org.broadinstitute.sting.gatk.filters.FilterManager; import org.broadinstitute.sting.gatk.filters.ReadGroupBlackListFilter; import org.broadinstitute.sting.gatk.filters.SamRecordHeaderFilter; import org.broadinstitute.sting.gatk.io.OutputTracker; import org.broadinstitute.sting.gatk.io.stubs.Stub; import org.broadinstitute.sting.gatk.refdata.tracks.RMDTrack; import org.broadinstitute.sting.gatk.refdata.tracks.builders.RMDTrackBuilder; import org.broadinstitute.sting.gatk.refdata.utils.RMDIntervalGenerator; import org.broadinstitute.sting.gatk.refdata.utils.RMDTriplet; import org.broadinstitute.sting.gatk.walkers.*; import org.broadinstitute.sting.utils.GenomeLoc; import org.broadinstitute.sting.utils.GenomeLocParser; import org.broadinstitute.sting.utils.GenomeLocSortedSet; import org.broadinstitute.sting.utils.SequenceDictionaryUtils; import org.broadinstitute.sting.utils.baq.BAQ; import org.broadinstitute.sting.utils.exceptions.ReviewedStingException; import org.broadinstitute.sting.utils.exceptions.UserException; import org.broadinstitute.sting.utils.interval.IntervalMergingRule; import org.broadinstitute.sting.utils.interval.IntervalUtils; import org.broadinstitute.sting.utils.text.XReadLines; import java.io.File; import java.io.FileNotFoundException; import java.util.*; /** * A GenomeAnalysisEngine that runs a specified walker. */ public class GenomeAnalysisEngine { /** * our log, which we want to capture anything from this class */ private static Logger logger = Logger.getLogger(GenomeAnalysisEngine.class); /** * The GATK command-line argument parsing code. */ private ParsingEngine parsingEngine; /** * The genomeLocParser can create and parse GenomeLocs. */ private GenomeLocParser genomeLocParser; /** * Accessor for sharded read data. */ private SAMDataSource readsDataSource = null; /** * Accessor for sharded reference data. */ private ReferenceDataSource referenceDataSource = null; /** * Accessor for sample metadata */ private SampleDataSource sampleDataSource = null; /** * Accessor for sharded reference-ordered data. */ private List<ReferenceOrderedDataSource> rodDataSources; // our argument collection private GATKArgumentCollection argCollection; /** * Collection of intervals used by the engine. */ private GenomeLocSortedSet intervals = null; /** * Collection of inputs used by the engine. */ private Map<ArgumentSource, Object> inputs = new HashMap<ArgumentSource, Object>(); /** * Collection of outputs used by the engine. */ private Collection<Stub<?>> outputs = new ArrayList<Stub<?>>(); /** * Collection of the filters applied to the input data. */ private Collection<SamRecordFilter> filters; /** * our walker manager */ private final WalkerManager walkerManager = new WalkerManager(); private Walker<?, ?> walker; public void setWalker(Walker<?, ?> walker) { this.walker = walker; } /** * A processed collection of SAM reader identifiers. */ private Collection<SAMReaderID> samReaderIDs = Collections.emptyList(); /** * Set the SAM/BAM files over which to traverse. * @param samReaderIDs Collection of ids to use during this traversal. */ public void setSAMFileIDs(Collection<SAMReaderID> samReaderIDs) { this.samReaderIDs = samReaderIDs; } /** * Collection of reference metadata files over which to traverse. */ private Collection<RMDTriplet> referenceMetaDataFiles; /** * Set the reference metadata files to use for this traversal. * @param referenceMetaDataFiles Collection of files and descriptors over which to traverse. */ public void setReferenceMetaDataFiles(Collection<RMDTriplet> referenceMetaDataFiles) { this.referenceMetaDataFiles = referenceMetaDataFiles; } /** * Actually run the GATK with the specified walker. * * @return the value of this traversal. */ public Object execute() { //HeapSizeMonitor monitor = new HeapSizeMonitor(); //monitor.start(); setStartTime(new java.util.Date()); // validate our parameters if (this.getArguments() == null) { throw new ReviewedStingException("The GATKArgumentCollection passed to GenomeAnalysisEngine can not be null."); } // validate our parameters if (this.walker == null) throw new ReviewedStingException("The walker passed to GenomeAnalysisEngine can not be null."); // Prepare the data for traversal. initializeDataSources(); // our microscheduler, which is in charge of running everything MicroScheduler microScheduler = createMicroscheduler(); // create the output streams " initializeOutputStreams(microScheduler.getOutputTracker()); // initialize and validate the interval list initializeIntervals(); validateSuppliedIntervals(); ShardStrategy shardStrategy = getShardStrategy(microScheduler.getReference()); // execute the microscheduler, storing the results Object result = microScheduler.execute(this.walker, shardStrategy); //monitor.stop(); //logger.info(String.format("Maximum heap size consumed: %d",monitor.getMaxMemoryUsed())); return result; } /** * Retrieves an instance of the walker based on the walker name. * * @param walkerName Name of the walker. Must not be null. If the walker cannot be instantiated, an exception will be thrown. * @return An instance of the walker. */ public Walker<?, ?> getWalkerByName(String walkerName) { return walkerManager.createByName(walkerName); } /** * Gets the name of a given walker type. * @param walkerType Type of walker. * @return Name of the walker. */ public String getWalkerName(Class<? extends Walker> walkerType) { return walkerManager.getName(walkerType); } /** * Gets a list of the filters to associate with the given walker. Will NOT initialize the engine with this filters; * the caller must handle that directly. * @return A collection of available filters. */ public Collection<SamRecordFilter> createFilters() { Set<SamRecordFilter> filters = new HashSet<SamRecordFilter>(); filters.addAll(WalkerManager.getReadFilters(walker,this.getFilterManager())); if (this.getArguments().readGroupBlackList != null && this.getArguments().readGroupBlackList.size() > 0) filters.add(new ReadGroupBlackListFilter(this.getArguments().readGroupBlackList)); for(String filterName: this.getArguments().readFilters) filters.add(this.getFilterManager().createByName(filterName)); return Collections.unmodifiableSet(filters); } /** * Allow subclasses and others within this package direct access to the walker manager. * @return The walker manager used by this package. */ protected WalkerManager getWalkerManager() { return walkerManager; } /** * setup a microscheduler * * @return a new microscheduler */ private MicroScheduler createMicroscheduler() { Walker my_walker = this.walker; // the mircoscheduler to return MicroScheduler microScheduler = null; // Temporarily require all walkers to have a reference, even if that reference is not conceptually necessary. if ((my_walker instanceof ReadWalker || my_walker instanceof DuplicateWalker || my_walker instanceof ReadPairWalker) && this.getArguments().referenceFile == null) { throw new UserException.CommandLineException("Read-based traversals require a reference file but none was given"); } return MicroScheduler.create(this,my_walker,this.getReadsDataSource(),this.getReferenceDataSource().getReference(),this.getRodDataSources(),this.getArguments().numberOfThreads); } protected DownsamplingMethod getDownsamplingMethod() { GATKArgumentCollection argCollection = this.getArguments(); DownsamplingMethod method; if(argCollection.getDownsamplingMethod() != null) method = argCollection.getDownsamplingMethod(); else if(WalkerManager.getDownsamplingMethod(walker) != null) method = WalkerManager.getDownsamplingMethod(walker); else method = argCollection.getDefaultDownsamplingMethod(); return method; } public BAQ.QualityMode getWalkerBAQQualityMode() { return WalkerManager.getBAQQualityMode(walker); } public BAQ.ApplicationTime getWalkerBAQApplicationTime() { return WalkerManager.getBAQApplicationTime(walker); } protected boolean generateExtendedEvents() { return walker.generateExtendedEvents(); } protected boolean includeReadsWithDeletionAtLoci() { return walker.includeReadsWithDeletionAtLoci(); } /** * Verifies that the supplied set of reads files mesh with what the walker says it requires. */ protected void validateSuppliedReads() { GATKArgumentCollection arguments = this.getArguments(); // Check what the walker says is required against what was provided on the command line. if (WalkerManager.isRequired(walker, DataSource.READS) && (arguments.samFiles == null || arguments.samFiles.size() == 0)) throw new ArgumentException("Walker requires reads but none were provided."); // Check what the walker says is allowed against what was provided on the command line. if ((arguments.samFiles != null && arguments.samFiles.size() > 0) && !WalkerManager.isAllowed(walker, DataSource.READS)) throw new ArgumentException("Walker does not allow reads but reads were provided."); } /** * Verifies that the supplied reference file mesh with what the walker says it requires. */ protected void validateSuppliedReference() { GATKArgumentCollection arguments = this.getArguments(); // Check what the walker says is required against what was provided on the command line. // TODO: Temporarily disabling WalkerManager.isRequired check on the reference because the reference is always required. if (/*WalkerManager.isRequired(walker, DataSource.REFERENCE) &&*/ arguments.referenceFile == null) throw new ArgumentException("Walker requires a reference but none was provided."); // Check what the walker says is allowed against what was provided on the command line. if (arguments.referenceFile != null && !WalkerManager.isAllowed(walker, DataSource.REFERENCE)) throw new ArgumentException("Walker does not allow a reference but one was provided."); } /** * Verifies that all required reference-ordered data has been supplied, and any reference-ordered data that was not * 'allowed' is still present. * * @param rods Reference-ordered data to load. */ protected void validateSuppliedReferenceOrderedData(List<ReferenceOrderedDataSource> rods) { // Check to make sure that all required metadata is present. List<RMD> allRequired = WalkerManager.getRequiredMetaData(walker); for (RMD required : allRequired) { boolean found = false; for (ReferenceOrderedDataSource rod : rods) { if (rod.matchesNameAndRecordType(required.name(), required.type())) found = true; } if (!found) throw new ArgumentException(String.format("Walker requires reference metadata to be supplied named '%s' of type '%s', but this metadata was not provided. " + "Please supply the specified metadata file.", required.name(), required.type().getSimpleName())); } // Check to see that no forbidden rods are present. for (ReferenceOrderedDataSource rod : rods) { if (!WalkerManager.isAllowed(walker, rod)) throw new ArgumentException(String.format("Walker of type %s does not allow access to metadata: %s", walker.getClass(), rod.getName())); } } protected void validateSuppliedIntervals() { // Only read walkers support '-L unmapped' intervals. Trap and validate any other instances of -L unmapped. if(!(walker instanceof ReadWalker)) { GenomeLocSortedSet intervals = getIntervals(); if(intervals != null && getIntervals().contains(GenomeLoc.UNMAPPED)) throw new ArgumentException("Interval list specifies unmapped region. Only read walkers may include the unmapped region."); } } /** * Get the sharding strategy given a driving data source. * * @param drivingDataSource Data on which to shard. * @return the sharding strategy */ protected ShardStrategy getShardStrategy(ReferenceSequenceFile drivingDataSource) { GenomeLocSortedSet intervals = this.getIntervals(); SAMDataSource readsDataSource = this.getReadsDataSource(); ValidationExclusion exclusions = (readsDataSource != null ? readsDataSource.getReadsInfo().getValidationExclusionList() : null); ReferenceDataSource referenceDataSource = this.getReferenceDataSource(); // Use monolithic sharding if no index is present. Monolithic sharding is always required for the original // sharding system; it's required with the new sharding system only for locus walkers. if(readsDataSource != null && !readsDataSource.hasIndex() ) { if(!exclusions.contains(ValidationExclusion.TYPE.ALLOW_UNINDEXED_BAM)) throw new UserException.CommandLineException("The GATK cannot currently process unindexed BAM files without the -U ALLOW_UNINDEXED_BAM"); if(intervals != null && WalkerManager.getWalkerDataSource(walker) != DataSource.REFERENCE) throw new UserException.CommandLineException("Cannot perform interval processing when walker is not driven by reference and no index is available."); Shard.ShardType shardType; if(walker instanceof LocusWalker) { if (readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate) throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Locus walkers can only traverse coordinate-sorted data. Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately."); shardType = Shard.ShardType.LOCUS; } else if(walker instanceof ReadWalker || walker instanceof DuplicateWalker || walker instanceof ReadPairWalker) shardType = Shard.ShardType.READ; else throw new UserException.CommandLineException("The GATK cannot currently process unindexed BAM files"); List<GenomeLoc> region; if(intervals != null) region = intervals.toList(); else { region = new ArrayList<GenomeLoc>(); for(SAMSequenceRecord sequenceRecord: drivingDataSource.getSequenceDictionary().getSequences()) region.add(getGenomeLocParser().createGenomeLoc(sequenceRecord.getSequenceName(),1,sequenceRecord.getSequenceLength())); } return new MonolithicShardStrategy(readsDataSource,shardType,region); } ShardStrategy shardStrategy = null; ShardStrategyFactory.SHATTER_STRATEGY shardType; long SHARD_SIZE = 100000L; if (walker instanceof LocusWalker) { if (walker instanceof RodWalker) SHARD_SIZE *= 1000; if (intervals != null && !intervals.isEmpty()) { if(!readsDataSource.isEmpty() && readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.coordinate) throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.coordinate, "Locus walkers can only traverse coordinate-sorted data. Please resort your input BAM file(s) or set the Sort Order tag in the header appropriately."); shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), ShardStrategyFactory.SHATTER_STRATEGY.LOCUS_EXPERIMENTAL, drivingDataSource.getSequenceDictionary(), SHARD_SIZE, getGenomeLocParser(), intervals); } else shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), ShardStrategyFactory.SHATTER_STRATEGY.LOCUS_EXPERIMENTAL, drivingDataSource.getSequenceDictionary(), SHARD_SIZE,getGenomeLocParser()); } else if (walker instanceof ReadWalker || walker instanceof DuplicateWalker) { shardType = ShardStrategyFactory.SHATTER_STRATEGY.READS_EXPERIMENTAL; if (intervals != null && !intervals.isEmpty()) { shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), shardType, drivingDataSource.getSequenceDictionary(), SHARD_SIZE, getGenomeLocParser(), intervals); } else { shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), shardType, drivingDataSource.getSequenceDictionary(), SHARD_SIZE, getGenomeLocParser()); } } else if (walker instanceof ReadPairWalker) { if(readsDataSource != null && readsDataSource.getSortOrder() != SAMFileHeader.SortOrder.queryname) throw new UserException.MissortedBAM(SAMFileHeader.SortOrder.queryname, "Read pair walkers can only walk over query name-sorted data. Please resort your input BAM file."); if(intervals != null && !intervals.isEmpty()) throw new UserException.CommandLineException("Pairs traversal cannot be used in conjunction with intervals."); shardStrategy = ShardStrategyFactory.shatter(readsDataSource, referenceDataSource.getReference(), ShardStrategyFactory.SHATTER_STRATEGY.READS_EXPERIMENTAL, drivingDataSource.getSequenceDictionary(), SHARD_SIZE, getGenomeLocParser()); } else throw new ReviewedStingException("Unable to support walker of type" + walker.getClass().getName()); return shardStrategy; } protected boolean flashbackData() { return walker instanceof ReadWalker; } /** * Initialize the output streams as specified by the user. * * @param outputTracker the tracker supplying the initialization data. */ private void initializeOutputStreams(OutputTracker outputTracker) { for (Map.Entry<ArgumentSource, Object> input : getInputs().entrySet()) outputTracker.addInput(input.getKey(), input.getValue()); for (Stub<?> stub : getOutputs()) outputTracker.addOutput(stub); outputTracker.prepareWalker(walker, getArguments().strictnessLevel); } public ReferenceDataSource getReferenceDataSource() { return referenceDataSource; } public GenomeLocParser getGenomeLocParser() { return genomeLocParser; } /** * Manage lists of filters. */ private final FilterManager filterManager = new FilterManager(); private Date startTime = null; // the start time for execution public void setParser(ParsingEngine parsingEngine) { this.parsingEngine = parsingEngine; } /** * Explicitly set the GenomeLocParser, for unit testing. * @param genomeLocParser GenomeLocParser to use. */ public void setGenomeLocParser(GenomeLocParser genomeLocParser) { this.genomeLocParser = genomeLocParser; } /** * Sets the start time when the execute() function was last called * @param startTime the start time when the execute() function was last called */ protected void setStartTime(Date startTime) { this.startTime = startTime; } /** * @return the start time when the execute() function was last called */ public Date getStartTime() { return startTime; } /** * Setup the intervals to be processed */ protected void initializeIntervals() { // return if no interval arguments at all if ((argCollection.intervals == null) && (argCollection.excludeIntervals == null) && (argCollection.RODToInterval == null)) return; // if '-L all' was specified, verify that it was the only -L specified and return if so. if(argCollection.intervals != null) { for(String interval: argCollection.intervals) { if(interval.trim().equals("all")) { if(argCollection.intervals.size() > 1) throw new UserException("'-L all' was specified along with other intervals or interval lists; the GATK cannot combine '-L all' with other intervals."); // '-L all' was specified and seems valid. Return. return; } } } // if include argument isn't given, create new set of all possible intervals GenomeLocSortedSet includeSortedSet = (argCollection.intervals == null && argCollection.RODToInterval == null ? GenomeLocSortedSet.createSetFromSequenceDictionary(this.referenceDataSource.getReference().getSequenceDictionary()) : loadIntervals(argCollection.intervals, argCollection.intervalMerging, genomeLocParser.mergeIntervalLocations(checkRODToIntervalArgument(),argCollection.intervalMerging))); // if no exclude arguments, can return parseIntervalArguments directly if (argCollection.excludeIntervals == null) intervals = includeSortedSet; // otherwise there are exclude arguments => must merge include and exclude GenomeLocSortedSets else { GenomeLocSortedSet excludeSortedSet = loadIntervals(argCollection.excludeIntervals, argCollection.intervalMerging, null); intervals = includeSortedSet.subtractRegions(excludeSortedSet); // logging messages only printed when exclude (-XL) arguments are given long toPruneSize = includeSortedSet.coveredSize(); long toExcludeSize = excludeSortedSet.coveredSize(); long intervalSize = intervals.coveredSize(); logger.info(String.format("Initial include intervals span %d loci; exclude intervals span %d loci", toPruneSize, toExcludeSize)); logger.info(String.format("Excluding %d loci from original intervals (%.2f%% reduction)", toPruneSize - intervalSize, (toPruneSize - intervalSize) / (0.01 * toPruneSize))); } } /** * Loads the intervals relevant to the current execution * @param argList String representation of arguments; might include 'all', filenames, intervals in samtools * notation, or a combination of the * @param mergingRule Technique to use when merging interval data. * @param additionalIntervals a list of additional intervals to add to the returned set. Can be null. * @return A sorted, merged list of all intervals specified in this arg list. */ private GenomeLocSortedSet loadIntervals(List<String> argList, IntervalMergingRule mergingRule, List<GenomeLoc> additionalIntervals) { return IntervalUtils.sortAndMergeIntervals(genomeLocParser,IntervalUtils.mergeListsBySetOperator(additionalIntervals, IntervalUtils.parseIntervalArguments(genomeLocParser,argList, this.getArguments().unsafe != ValidationExclusion.TYPE.ALLOW_EMPTY_INTERVAL_LIST), argCollection.BTIMergeRule), mergingRule); } /** * if we have a ROD specified as a 'rodToIntervalTrackName', convert its records to RODs * @return ROD intervals as GenomeLocs */ private List<GenomeLoc> checkRODToIntervalArgument() { Map<String, ReferenceOrderedDataSource> rodNames = RMDIntervalGenerator.getRMDTrackNames(rodDataSources); // Do we have any RODs that overloaded as interval lists with the 'rodToIntervalTrackName' flag? List<GenomeLoc> ret = new ArrayList<GenomeLoc>(); if (rodNames != null && argCollection.RODToInterval != null) { String rodName = argCollection.RODToInterval; // check to make sure we have a rod of that name if (!rodNames.containsKey(rodName)) throw new UserException.CommandLineException("--rodToIntervalTrackName (-BTI) was passed the name '"+rodName+"', which wasn't given as a ROD name in the -B option"); for (String str : rodNames.keySet()) if (str.equals(rodName)) { logger.info("Adding interval list from track (ROD) named " + rodName); RMDIntervalGenerator intervalGenerator = new RMDIntervalGenerator(rodNames.get(str)); ret.addAll(intervalGenerator.toGenomeLocList()); } } return ret; } /** * Add additional, externally managed IO streams for inputs. * * @param argumentSource Field into which to inject the value. * @param value Instance to inject. */ public void addInput(ArgumentSource argumentSource, Object value) { inputs.put(argumentSource, value); } /** * Add additional, externally managed IO streams for output. * * @param stub Instance to inject. */ public void addOutput(Stub<?> stub) { outputs.add(stub); } protected void initializeDataSources() { logger.info("Strictness is " + argCollection.strictnessLevel); // TODO -- REMOVE ME BAQ.DEFAULT_GOP = argCollection.BAQGOP; validateSuppliedReference(); referenceDataSource = openReferenceSequenceFile(argCollection.referenceFile); validateSuppliedReads(); readsDataSource = createReadsDataSource(genomeLocParser, referenceDataSource.getReference()); sampleDataSource = new SampleDataSource(getSAMFileHeader(), argCollection.sampleFiles); for (SamRecordFilter filter : filters) if (filter instanceof SamRecordHeaderFilter) ((SamRecordHeaderFilter)filter).setHeader(this.getSAMFileHeader()); sampleDataSource = new SampleDataSource(getSAMFileHeader(), argCollection.sampleFiles); // set the sequence dictionary of all of Tribble tracks to the sequence dictionary of our reference rodDataSources = getReferenceOrderedDataSources(referenceMetaDataFiles,referenceDataSource.getReference().getSequenceDictionary(),genomeLocParser,argCollection.unsafe); } /** * Gets a unique identifier for the reader sourcing this read. * @param read Read to examine. * @return A unique identifier for the source file of this read. Exception if not found. */ public SAMReaderID getReaderIDForRead(final SAMRecord read) { return getReadsDataSource().getReaderID(read); } /** * Gets the source file for this read. * @param id Unique identifier determining which input file to use. * @return The source filename for this read. */ public File getSourceFileForReaderID(final SAMReaderID id) { return getReadsDataSource().getSAMFile(id); } /** * Returns sets of samples present in the (merged) input SAM stream, grouped by readers (i.e. underlying * individual bam files). For instance: if GATK is run with three input bam files (three -I arguments), then the list * returned by this method will contain 3 elements (one for each reader), with each element being a set of sample names * found in the corresponding bam file. * * @return Sets of samples in the merged input SAM stream, grouped by readers */ public List<Set<String>> getSamplesByReaders() { Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs(); List<Set<String>> sample_sets = new ArrayList<Set<String>>(readers.size()); for (SAMReaderID r : readers) { Set<String> samples = new HashSet<String>(1); sample_sets.add(samples); for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) { samples.add(g.getSample()); } } return sample_sets; } /** * Returns sets of libraries present in the (merged) input SAM stream, grouped by readers (i.e. underlying * individual bam files). For instance: if GATK is run with three input bam files (three -I arguments), then the list * returned by this method will contain 3 elements (one for each reader), with each element being a set of library names * found in the corresponding bam file. * * @return Sets of libraries present in the (merged) input SAM stream, grouped by readers */ public List<Set<String>> getLibrariesByReaders() { Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs(); List<Set<String>> lib_sets = new ArrayList<Set<String>>(readers.size()); for (SAMReaderID r : readers) { Set<String> libs = new HashSet<String>(2); lib_sets.add(libs); for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) { libs.add(g.getLibrary()); } } return lib_sets; } /** * **** UNLESS YOU HAVE GOOD REASON TO, DO NOT USE THIS METHOD; USE getFileToReadGroupIdMapping() INSTEAD **** * * Returns sets of (remapped) read groups in input SAM stream, grouped by readers (i.e. underlying * individual bam files). For instance: if GATK is run with three input bam files (three -I arguments), then the list * returned by this method will contain 3 elements (one for each reader), with each element being a set of remapped read groups * (i.e. as seen by read.getReadGroup().getReadGroupId() in the merged stream) that come from the corresponding bam file. * * @return sets of (merged) read group ids in order of input bams */ public List<Set<String>> getMergedReadGroupsByReaders() { Collection<SAMReaderID> readers = getReadsDataSource().getReaderIDs(); List<Set<String>> rg_sets = new ArrayList<Set<String>>(readers.size()); for (SAMReaderID r : readers) { Set<String> groups = new HashSet<String>(5); rg_sets.add(groups); for (SAMReadGroupRecord g : getReadsDataSource().getHeader(r).getReadGroups()) { if (getReadsDataSource().hasReadGroupCollisions()) { // Check if there were read group clashes with hasGroupIdDuplicates and if so: // use HeaderMerger to translate original read group id from the reader into the read group id in the // merged stream, and save that remapped read group id to associate it with specific reader groups.add(getReadsDataSource().getReadGroupId(r, g.getReadGroupId())); } else { // otherwise, pass through the unmapped read groups since this is what Picard does as well groups.add(g.getReadGroupId()); } } } return rg_sets; } /** * Now that all files are open, validate the sequence dictionaries of the reads vs. the reference vrs the reference ordered data (if available). * * @param reads Reads data source. * @param reference Reference data source. * @param rods a collection of the reference ordered data tracks */ private void validateSourcesAgainstReference(SAMDataSource reads, ReferenceSequenceFile reference, Collection<ReferenceOrderedDataSource> rods, RMDTrackBuilder manager) { if ((reads.isEmpty() && (rods == null || rods.isEmpty())) || reference == null ) return; // Compile a set of sequence names that exist in the reference file. SAMSequenceDictionary referenceDictionary = reference.getSequenceDictionary(); if (!reads.isEmpty()) { // Compile a set of sequence names that exist in the BAM files. SAMSequenceDictionary readsDictionary = reads.getHeader().getSequenceDictionary(); Set<String> readsSequenceNames = new TreeSet<String>(); for (SAMSequenceRecord dictionaryEntry : readsDictionary.getSequences()) readsSequenceNames.add(dictionaryEntry.getSequenceName()); if (readsSequenceNames.size() == 0) { logger.info("Reads file is unmapped. Skipping validation against reference."); return; } // compare the reads to the reference SequenceDictionaryUtils.validateDictionaries(logger, getArguments().unsafe, "reads", readsDictionary, "reference", referenceDictionary); } for (ReferenceOrderedDataSource rod : rods) manager.validateTrackSequenceDictionary(rod.getName(),rod.getSequenceDictionary(),referenceDictionary); } /** * Gets a data source for the given set of reads. * * @return A data source for the given set of reads. */ private SAMDataSource createReadsDataSource(GenomeLocParser genomeLocParser, IndexedFastaSequenceFile refReader) { DownsamplingMethod method = getDownsamplingMethod(); if ( getWalkerBAQApplicationTime() == BAQ.ApplicationTime.FORBIDDEN && argCollection.BAQMode != BAQ.CalculationMode.OFF) throw new UserException.BadArgumentValue("baq", "Walker cannot accept BAQ'd base qualities, and yet BAQ mode " + argCollection.BAQMode + " was requested."); return new SAMDataSource( samReaderIDs, genomeLocParser, argCollection.useOriginalBaseQualities, argCollection.strictnessLevel, argCollection.readBufferSize, method, new ValidationExclusion(Arrays.asList(argCollection.unsafe)), filters, includeReadsWithDeletionAtLoci(), generateExtendedEvents(), getWalkerBAQApplicationTime() == BAQ.ApplicationTime.ON_INPUT ? argCollection.BAQMode : BAQ.CalculationMode.OFF, getWalkerBAQQualityMode(), refReader); } /** * Opens a reference sequence file paired with an index. * * @param refFile Handle to a reference sequence file. Non-null. * @return A thread-safe file wrapper. */ private ReferenceDataSource openReferenceSequenceFile(File refFile) { ReferenceDataSource ref = new ReferenceDataSource(refFile); genomeLocParser = new GenomeLocParser(ref.getReference()); return ref; } /** * Open the reference-ordered data sources. * * @param referenceMetaDataFiles collection of RMD descriptors to load and validate. * @param sequenceDictionary GATK-wide sequnce dictionary to use for validation. * @param genomeLocParser to use when creating and validating GenomeLocs. * @param validationExclusionType potentially indicate which validations to include / exclude. * * @return A list of reference-ordered data sources. */ private List<ReferenceOrderedDataSource> getReferenceOrderedDataSources(Collection<RMDTriplet> referenceMetaDataFiles, SAMSequenceDictionary sequenceDictionary, GenomeLocParser genomeLocParser, ValidationExclusion.TYPE validationExclusionType) { RMDTrackBuilder builder = new RMDTrackBuilder(referenceMetaDataFiles,sequenceDictionary,genomeLocParser,validationExclusionType); // try and make the tracks given their requests // create of live instances of the tracks List<RMDTrack> tracks = new ArrayList<RMDTrack>(); List<ReferenceOrderedDataSource> dataSources = new ArrayList<ReferenceOrderedDataSource>(); for (RMDTriplet fileDescriptor : referenceMetaDataFiles) dataSources.add(new ReferenceOrderedDataSource(fileDescriptor, builder, sequenceDictionary, genomeLocParser, flashbackData())); // validation: check to make sure everything the walker needs is present, and that all sequence dictionaries match. validateSuppliedReferenceOrderedData(dataSources); validateSourcesAgainstReference(readsDataSource, referenceDataSource.getReference(), dataSources, builder); return dataSources; } /** * Returns the SAM File Header from the input reads' data source file * @return the SAM File Header from the input reads' data source file */ public SAMFileHeader getSAMFileHeader() { return readsDataSource.getHeader(); } /** * Returns the unmerged SAM file header for an individual reader. * @param reader The reader. * @return Header for that reader. */ public SAMFileHeader getSAMFileHeader(SAMReaderID reader) { return readsDataSource.getHeader(reader); } /** * Returns data source object encapsulating all essential info and handlers used to traverse * reads; header merger, individual file readers etc can be accessed through the returned data source object. * * @return the reads data source */ public SAMDataSource getReadsDataSource() { return this.readsDataSource; } /** * Sets the collection of GATK main application arguments. * * @param argCollection the GATK argument collection */ public void setArguments(GATKArgumentCollection argCollection) { this.argCollection = argCollection; } /** * Gets the collection of GATK main application arguments. * * @return the GATK argument collection */ public GATKArgumentCollection getArguments() { return this.argCollection; } /** * Get the list of intervals passed to the engine. * @return List of intervals. */ public GenomeLocSortedSet getIntervals() { return this.intervals; } /** * Gets the list of filters employed by this engine. * @return Collection of filters (actual instances) used by this engine. */ public Collection<SamRecordFilter> getFilters() { return this.filters; } /** * Sets the list of filters employed by this engine. * @param filters Collection of filters (actual instances) used by this engine. */ public void setFilters(Collection<SamRecordFilter> filters) { this.filters = filters; } /** * Gets the filter manager for this engine. * @return filter manager for this engine. */ protected FilterManager getFilterManager() { return filterManager; } /** * Gets the input sources for this engine. * @return input sources for this engine. */ protected Map<ArgumentSource, Object> getInputs() { return inputs; } /** * Gets the output stubs for this engine. * @return output stubs for this engine. */ protected Collection<Stub<?>> getOutputs() { return outputs; } /** * Returns data source objects encapsulating all rod data; * individual rods can be accessed through the returned data source objects. * * @return the rods data sources */ public List<ReferenceOrderedDataSource> getRodDataSources() { return this.rodDataSources; } /** * Gets cumulative metrics about the entire run to this point. * @return cumulative metrics about the entire run. */ public ReadMetrics getCumulativeMetrics() { return readsDataSource == null ? null : readsDataSource.getCumulativeReadMetrics(); } public SampleDataSource getSampleMetadata() { return this.sampleDataSource; } /** * Get a sample by its ID * If an alias is passed in, return the main sample object * @param id sample id * @return sample Object with this ID */ public Sample getSampleById(String id) { return sampleDataSource.getSampleById(id); } /** * Get the sample for a given read group * Must first look up ID for read group * @param readGroup of sample * @return sample object with ID from the read group */ public Sample getSampleByReadGroup(SAMReadGroupRecord readGroup) { return sampleDataSource.getSampleByReadGroup(readGroup); } /** * Get a sample for a given read * Must first look up read group, and then sample ID for that read group * @param read of sample * @return sample object of this read */ public Sample getSampleByRead(SAMRecord read) { return getSampleByReadGroup(read.getReadGroup()); } /** * Get number of sample objects * @return size of samples map */ public int sampleCount() { return sampleDataSource.sampleCount(); } /** * Return all samples with a given family ID * Note that this isn't terribly efficient (linear) - it may be worth adding a new family ID data structure for this * @param familyId family ID * @return Samples with the given family ID */ public Set<Sample> getFamily(String familyId) { return sampleDataSource.getFamily(familyId); } /** * Returns all children of a given sample * See note on the efficiency of getFamily() - since this depends on getFamily() it's also not efficient * @param sample parent sample * @return children of the given sample */ public Set<Sample> getChildren(Sample sample) { return sampleDataSource.getChildren(sample); } /** * Gets all the samples * @return */ public Collection<Sample> getSamples() { return sampleDataSource.getSamples(); } /** * Takes a list of sample names and returns their corresponding sample objects * * @param sampleNameList List of sample names * @return Corresponding set of samples */ public Set<Sample> getSamples(Collection<String> sampleNameList) { return sampleDataSource.getSamples(sampleNameList); } /** * Returns a set of samples that have any value (which could be null) for a given property * @param key Property key * @return Set of samples with the property */ public Set<Sample> getSamplesWithProperty(String key) { return sampleDataSource.getSamplesWithProperty(key); } /** * Returns a set of samples that have a property with a certain value * Value must be a string for now - could add a similar method for matching any objects in the future * * @param key Property key * @param value String property value * @return Set of samples that match key and value */ public Set<Sample> getSamplesWithProperty(String key, String value) { return sampleDataSource.getSamplesWithProperty(key, value); } /** * Returns a set of sample objects for the sample names in a variant context * * @param context Any variant context * @return a set of the sample objects */ public Set<Sample> getSamplesByVariantContext(VariantContext context) { Set<Sample> samples = new HashSet<Sample>(); for (String sampleName : context.getSampleNames()) { samples.add(sampleDataSource.getOrCreateSample(sampleName)); } return samples; } /** * Returns all samples that were referenced in the SAM file */ public Set<Sample> getSAMFileSamples() { return sampleDataSource.getSAMFileSamples(); } /** * Return a subcontext restricted to samples with a given property key/value * Gets the sample names from key/value and relies on VariantContext.subContextFromGenotypes for the filtering * @param context VariantContext to filter * @param key property key * @param value property value (must be string) * @return subcontext */ public VariantContext subContextFromSampleProperty(VariantContext context, String key, String value) { return sampleDataSource.subContextFromSampleProperty(context, key, value); } public Map<String,String> getApproximateCommandLineArguments(Object... argumentProviders) { return CommandLineUtils.getApproximateCommandLineArguments(parsingEngine,argumentProviders); } public String createApproximateCommandLineArgumentString(Object... argumentProviders) { return CommandLineUtils.createApproximateCommandLineArgumentString(parsingEngine,argumentProviders); } }
Create tmpdir if it doesn't exist. git-svn-id: 4561c0a8f080806b19201efb9525134c00b76d40@4936 348d0f76-0448-11de-a6fe-93d51630548a
java/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java
Create tmpdir if it doesn't exist.
<ide><path>ava/src/org/broadinstitute/sting/gatk/GenomeAnalysisEngine.java <ide> // our microscheduler, which is in charge of running everything <ide> MicroScheduler microScheduler = createMicroscheduler(); <ide> <add> // create temp directories as necessary <add> initializeTempDirectory(); <add> <ide> // create the output streams " <ide> initializeOutputStreams(microScheduler.getOutputTracker()); <ide> <ide> } <ide> <ide> /** <add> * Create the temp directory if it doesn't exist. <add> */ <add> private void initializeTempDirectory() { <add> File tempDir = new File(System.getProperty("java.io.tmpdir")); <add> tempDir.mkdirs(); <add> } <add> <add> /** <ide> * Initialize the output streams as specified by the user. <ide> * <ide> * @param outputTracker the tracker supplying the initialization data.
Java
mit
2a5e365f25ebc239418ee583ea1557754371e28a
0
fvasquezjatar/fermat-unused,fvasquezjatar/fermat-unused
/* * @#WalletPublisherModulePluginRoot.java - 2015 * Copyright bitDubai.com., All rights reserved.  * You may not modify, use, reproduce or distribute this software. * BITDUBAI/CONFIDENTIAL */ package com.bitdubai.fermat_dmp_plugin.layer.module.wallet_publisher.developer.bitdubai.version_1; import com.bitdubai.fermat_api.Service; import com.bitdubai.fermat_api.Plugin; import com.bitdubai.fermat_api.layer.all_definition.developer.LogManagerForDevelopers; import com.bitdubai.fermat_api.layer.all_definition.enums.ServiceStatus; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_factory.interfaces.WalletFactoryProjectProposal; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.exceptions.CantCheckPublicationException; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.exceptions.CantGetPublishedWalletsInformationException; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.exceptions.CantPublishWalletException; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.DealsWithWalletPublisher; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.DealsWithWalletPublisherMiddleware; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublishedInformation; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublisherManager; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublisherMiddlewareManager; import com.bitdubai.fermat_api.layer.osa_android.logger_system.DealsWithLogger; import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogLevel; import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogManager; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.DealsWithErrors; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.ErrorManager; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventManager; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.DealsWithEvents; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventHandler; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventListener; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; /** * The Class <code>com.bitdubai.fermat_dmp_plugin.layer.middleware.wallet_publisher.developer.bitdubai.version_1.WalletPublisherMiddlewarePluginRoot</code> is * the responsible to communicate the user interface whit the middleware layer. * <p/> * * Created by loui on 05/02/15. * Update by Roberto Requena - ([email protected]) on 04/08/2015 * * @version 1.0 * @since Java JDK 1.7 */ public class WalletPublisherModulePluginRoot implements Service, DealsWithWalletPublisherMiddleware, DealsWithEvents, DealsWithErrors, DealsWithLogger, LogManagerForDevelopers, Plugin, WalletPublisherManager { /** * Represent the logManager */ private LogManager logManager; /** * Represent the newLoggingLevel */ static Map<String, LogLevel> newLoggingLevel = new HashMap<>(); /** * Represent the errorManager */ private ErrorManager errorManager; /** * DealWithEvents Interface member variables. */ private EventManager eventManager; /** * Represent the plugin id */ private UUID pluginId; /** * Represent the status of the service */ private ServiceStatus serviceStatus; /** * Represent the listenersAdded */ private List<EventListener> listenersAdded; /** * Represent the walletPublisherMiddlewareManager */ private WalletPublisherMiddlewareManager walletPublisherMiddlewareManager; /** * Constructor */ public WalletPublisherModulePluginRoot() { serviceStatus = ServiceStatus.CREATED; listenersAdded = new ArrayList<>(); } /** * (non-Javadoc) * @see Service#start() */ @Override public void start() { /** * I will initialize the handling of com.bitdubai.platform events. */ EventListener eventListener; EventHandler eventHandler; this.serviceStatus = ServiceStatus.STARTED; } /** * (non-Javadoc) * @see Service#pause() */ @Override public void pause() { this.serviceStatus = ServiceStatus.PAUSED; } /** * (non-Javadoc) * @see Service#resume() */ @Override public void resume() { this.serviceStatus = ServiceStatus.STARTED; } /** * (non-Javadoc) * @see Service#stop() */ @Override public void stop() { /** * I will remove all the event listeners registered with the event manager. */ for (EventListener eventListener : listenersAdded) { eventManager.removeListener(eventListener); } listenersAdded.clear(); this.serviceStatus = ServiceStatus.STOPPED; } /** * (non-Javadoc) * @see Service#getStatus() */ @Override public ServiceStatus getStatus() { return this.serviceStatus; } /** * DealWithEvents Interface implementation. */ @Override public void setEventManager(EventManager eventManager) { this.eventManager = eventManager; } /** * (non-Javadoc) * @see DealsWithErrors#setErrorManager(ErrorManager) */ @Override public void setErrorManager(ErrorManager errorManager) { } /** * (non-Javadoc) * @see Plugin#setId(UUID) */ @Override public void setId(UUID pluginId) { this.pluginId = pluginId; } /** * (non-Javadoc) * @see DealsWithLogger#setLogManager(LogManager) */ @Override public void setLogManager(LogManager logManager) { this.logManager = logManager; } /** * (non-Javadoc) * @see LogManagerForDevelopers#getClassesFullPath() */ @Override public List<String> getClassesFullPath() { List<String> returnedClasses = new ArrayList<String>(); returnedClasses.add("com.bitdubai.fermat_dmp_plugin.layer.module.wallet_publisher.developer.bitdubai.version_1.WalletPublisherModulePluginRoot"); /** * I return the values. */ return returnedClasses; } /** * (non-Javadoc) * @see LogManagerForDevelopers#setLoggingLevelPerClass(Map<String, LogLevel>) */ @Override public void setLoggingLevelPerClass(Map<String, LogLevel> newLoggingLevel) { /** * I will check the current values and update the LogLevel in those which is different */ for (Map.Entry<String, LogLevel> pluginPair : newLoggingLevel.entrySet()) { /** * if this path already exists in the Root.bewLoggingLevel I'll update the value, else, I will put as new */ if (WalletPublisherModulePluginRoot.newLoggingLevel.containsKey(pluginPair.getKey())) { WalletPublisherModulePluginRoot.newLoggingLevel.remove(pluginPair.getKey()); WalletPublisherModulePluginRoot.newLoggingLevel.put(pluginPair.getKey(), pluginPair.getValue()); } else { WalletPublisherModulePluginRoot.newLoggingLevel.put(pluginPair.getKey(), pluginPair.getValue()); } } } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#showPublishedWallets() */ @Override public Map<String, List<WalletPublishedInformation>> showPublishedWallets() throws CantGetPublishedWalletsInformationException { return walletPublisherMiddlewareManager.showPublishedWallets(); } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#canBePublished(WalletFactoryProjectProposal) */ @Override public boolean canBePublished(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantCheckPublicationException { return walletPublisherMiddlewareManager.canBePublished(walletFactoryProjectProposal); } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#publishWallet(WalletFactoryProjectProposal) */ @Override public void publishWallet(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantPublishWalletException { } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#publishSkin(WalletFactoryProjectProposal) */ @Override public void publishSkin(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantPublishWalletException { } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#publishLanguage(WalletFactoryProjectProposal) */ @Override public void publishLanguage(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantPublishWalletException { } /** * (non-Javadoc) * @see DealsWithWalletPublisherMiddleware#setWalletPublisherMiddlewareManager(WalletPublisherMiddlewareManager) */ @Override public void setWalletPublisherMiddlewareManager(WalletPublisherMiddlewareManager walletPublisherMiddlewareManager) { this.walletPublisherMiddlewareManager = walletPublisherMiddlewareManager; } }
DMP/plugin/module/fermat-dmp-plugin-module-wallet-publisher-bitdubai/src/main/java/com/bitdubai/fermat_dmp_plugin/layer/module/wallet_publisher/developer/bitdubai/version_1/WalletPublisherModulePluginRoot.java
/* * @#WalletPublisherModulePluginRoot.java - 2015 * Copyright bitDubai.com., All rights reserved.  * You may not modify, use, reproduce or distribute this software. * BITDUBAI/CONFIDENTIAL */ package com.bitdubai.fermat_dmp_plugin.layer.module.wallet_publisher.developer.bitdubai.version_1; import com.bitdubai.fermat_api.Service; import com.bitdubai.fermat_api.Plugin; import com.bitdubai.fermat_api.layer.all_definition.developer.LogManagerForDevelopers; import com.bitdubai.fermat_api.layer.all_definition.enums.ServiceStatus; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_factory.interfaces.WalletFactoryProjectProposal; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.exceptions.CantCheckPublicationException; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.exceptions.CantGetPublishedWalletsInformationException; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.exceptions.CantPublishWalletException; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.DealsWithWalletPublisher; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublishedInformation; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublisherManager; import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublisherMiddlewareManager; import com.bitdubai.fermat_api.layer.osa_android.logger_system.DealsWithLogger; import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogLevel; import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogManager; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.DealsWithErrors; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.ErrorManager; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventManager; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.DealsWithEvents; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventHandler; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.event_manager.EventListener; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; /** * The Class <code>com.bitdubai.fermat_dmp_plugin.layer.middleware.wallet_publisher.developer.bitdubai.version_1.WalletPublisherMiddlewarePluginRoot</code> is * the responsible to communicate the user interface whit the middleware layer. * <p/> * * Created by loui on 05/02/15. * Update by Roberto Requena - ([email protected]) on 04/08/2015 * * @version 1.0 * @since Java JDK 1.7 */ public class WalletPublisherModulePluginRoot implements Service, DealsWithWalletPublisher, DealsWithEvents, DealsWithErrors, DealsWithLogger, LogManagerForDevelopers, Plugin, WalletPublisherManager { /** * Represent the logManager */ private LogManager logManager; /** * Represent the newLoggingLevel */ static Map<String, LogLevel> newLoggingLevel = new HashMap<>(); /** * Represent the errorManager */ private ErrorManager errorManager; /** * DealWithEvents Interface member variables. */ private EventManager eventManager; /** * Represent the plugin id */ private UUID pluginId; /** * Represent the status of the service */ private ServiceStatus serviceStatus; /** * Represent the listenersAdded */ private List<EventListener> listenersAdded; /** * Represent the walletPublisherManager */ private WalletPublisherManager walletPublisherManager; /** * Constructor */ public WalletPublisherModulePluginRoot() { serviceStatus = ServiceStatus.CREATED; listenersAdded = new ArrayList<>(); } /** * (non-Javadoc) * @see Service#start() */ @Override public void start() { /** * I will initialize the handling of com.bitdubai.platform events. */ EventListener eventListener; EventHandler eventHandler; this.serviceStatus = ServiceStatus.STARTED; } /** * (non-Javadoc) * @see Service#pause() */ @Override public void pause() { this.serviceStatus = ServiceStatus.PAUSED; } /** * (non-Javadoc) * @see Service#resume() */ @Override public void resume() { this.serviceStatus = ServiceStatus.STARTED; } /** * (non-Javadoc) * @see Service#stop() */ @Override public void stop() { /** * I will remove all the event listeners registered with the event manager. */ for (EventListener eventListener : listenersAdded) { eventManager.removeListener(eventListener); } listenersAdded.clear(); this.serviceStatus = ServiceStatus.STOPPED; } /** * (non-Javadoc) * @see Service#getStatus() */ @Override public ServiceStatus getStatus() { return this.serviceStatus; } /** * DealWithEvents Interface implementation. */ @Override public void setEventManager(EventManager eventManager) { this.eventManager = eventManager; } /** * (non-Javadoc) * @see DealsWithErrors#setErrorManager(ErrorManager) */ @Override public void setErrorManager(ErrorManager errorManager) { } /** * (non-Javadoc) * @see Plugin#setId(UUID) */ @Override public void setId(UUID pluginId) { this.pluginId = pluginId; } /** * (non-Javadoc) * @see DealsWithLogger#setLogManager(LogManager) */ @Override public void setLogManager(LogManager logManager) { this.logManager = logManager; } /** * (non-Javadoc) * @see LogManagerForDevelopers#getClassesFullPath() */ @Override public List<String> getClassesFullPath() { List<String> returnedClasses = new ArrayList<String>(); returnedClasses.add("com.bitdubai.fermat_dmp_plugin.layer.module.wallet_publisher.developer.bitdubai.version_1.WalletPublisherModulePluginRoot"); /** * I return the values. */ return returnedClasses; } /** * (non-Javadoc) * @see LogManagerForDevelopers#setLoggingLevelPerClass(Map<String, LogLevel>) */ @Override public void setLoggingLevelPerClass(Map<String, LogLevel> newLoggingLevel) { /** * I will check the current values and update the LogLevel in those which is different */ for (Map.Entry<String, LogLevel> pluginPair : newLoggingLevel.entrySet()) { /** * if this path already exists in the Root.bewLoggingLevel I'll update the value, else, I will put as new */ if (WalletPublisherModulePluginRoot.newLoggingLevel.containsKey(pluginPair.getKey())) { WalletPublisherModulePluginRoot.newLoggingLevel.remove(pluginPair.getKey()); WalletPublisherModulePluginRoot.newLoggingLevel.put(pluginPair.getKey(), pluginPair.getValue()); } else { WalletPublisherModulePluginRoot.newLoggingLevel.put(pluginPair.getKey(), pluginPair.getValue()); } } } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#showPublishedWallets() */ @Override public Map<String, List<WalletPublishedInformation>> showPublishedWallets() throws CantGetPublishedWalletsInformationException { return null; } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#canBePublished(WalletFactoryProjectProposal) */ @Override public boolean canBePublished(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantCheckPublicationException { return false; } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#publishWallet(WalletFactoryProjectProposal) */ @Override public void publishWallet(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantPublishWalletException { } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#publishSkin(WalletFactoryProjectProposal) */ @Override public void publishSkin(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantPublishWalletException { } /** * (non-Javadoc) * @see WalletPublisherMiddlewareManager#publishLanguage(WalletFactoryProjectProposal) */ @Override public void publishLanguage(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantPublishWalletException { } /** * (non-Javadoc) * @see DealsWithWalletPublisher#setWalletPublisherManager(WalletPublisherManager) */ @Override public void setWalletPublisherManager(WalletPublisherManager walletPublisherManager) { this.walletPublisherManager = walletPublisherManager; } }
Add DealsWithWalletPublisherMiddleware
DMP/plugin/module/fermat-dmp-plugin-module-wallet-publisher-bitdubai/src/main/java/com/bitdubai/fermat_dmp_plugin/layer/module/wallet_publisher/developer/bitdubai/version_1/WalletPublisherModulePluginRoot.java
Add DealsWithWalletPublisherMiddleware
<ide><path>MP/plugin/module/fermat-dmp-plugin-module-wallet-publisher-bitdubai/src/main/java/com/bitdubai/fermat_dmp_plugin/layer/module/wallet_publisher/developer/bitdubai/version_1/WalletPublisherModulePluginRoot.java <ide> import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.exceptions.CantGetPublishedWalletsInformationException; <ide> import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.exceptions.CantPublishWalletException; <ide> import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.DealsWithWalletPublisher; <add>import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.DealsWithWalletPublisherMiddleware; <ide> import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublishedInformation; <ide> import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublisherManager; <ide> import com.bitdubai.fermat_api.layer.dmp_middleware.wallet_publisher.interfaces.WalletPublisherMiddlewareManager; <ide> * @version 1.0 <ide> * @since Java JDK 1.7 <ide> */ <del>public class WalletPublisherModulePluginRoot implements Service, DealsWithWalletPublisher, DealsWithEvents, DealsWithErrors, DealsWithLogger, LogManagerForDevelopers, Plugin, WalletPublisherManager { <add>public class WalletPublisherModulePluginRoot implements Service, DealsWithWalletPublisherMiddleware, DealsWithEvents, DealsWithErrors, DealsWithLogger, LogManagerForDevelopers, Plugin, WalletPublisherManager { <ide> <ide> /** <ide> * Represent the logManager <ide> private List<EventListener> listenersAdded; <ide> <ide> /** <del> * Represent the walletPublisherManager <del> */ <del> private WalletPublisherManager walletPublisherManager; <add> * Represent the walletPublisherMiddlewareManager <add> */ <add> private WalletPublisherMiddlewareManager walletPublisherMiddlewareManager; <ide> <ide> /** <ide> * Constructor <ide> */ <ide> @Override <ide> public Map<String, List<WalletPublishedInformation>> showPublishedWallets() throws CantGetPublishedWalletsInformationException { <del> return null; <add> return walletPublisherMiddlewareManager.showPublishedWallets(); <ide> } <ide> <ide> /** <ide> */ <ide> @Override <ide> public boolean canBePublished(WalletFactoryProjectProposal walletFactoryProjectProposal) throws CantCheckPublicationException { <del> return false; <add> return walletPublisherMiddlewareManager.canBePublished(walletFactoryProjectProposal); <ide> } <ide> <ide> /** <ide> <ide> /** <ide> * (non-Javadoc) <del> * @see DealsWithWalletPublisher#setWalletPublisherManager(WalletPublisherManager) <del> */ <del> @Override <del> public void setWalletPublisherManager(WalletPublisherManager walletPublisherManager) { <del> this.walletPublisherManager = walletPublisherManager; <add> * @see DealsWithWalletPublisherMiddleware#setWalletPublisherMiddlewareManager(WalletPublisherMiddlewareManager) <add> */ <add> @Override <add> public void setWalletPublisherMiddlewareManager(WalletPublisherMiddlewareManager walletPublisherMiddlewareManager) { <add> this.walletPublisherMiddlewareManager = walletPublisherMiddlewareManager; <ide> } <ide> } <ide>
JavaScript
mit
58f6c0cb51b1484824225571c0408445a4f35a29
0
KCreate/leonardschuetz.ch,KCreate/leonardschuetz.ch,KCreate/leonardschuetz.ch
module.exports = (context) => { // Import libraries and other routes const express = context.express; const path = context.path; const auth = context.auth; const fs = context.fs; const production = context.production; const morgan = context.morgan; const compression = context.compression; const bodyParser = context.bodyParser; const app = context.app; const loggingStream = context.loggingStream; const vhost = context.vhost; const expressWs = context.expressWs; const webpackDevMiddleware = context.webpackDevMiddleware; const webpackHotMiddleware = context.webpackHotMiddleware; // Configuration app.enable("strict routing"); app.disable("x-powered-by"); app.use(compression()); // Middlewares app.use(morgan("combined", { stream: loggingStream, })); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(auth.router); // Domain specific routes app.use(vhost("todos.leonardschuetz.ch", (req, res, next) => res.redirect("https://leonardschuetz.ch/todos"))) app.use(vhost("livechat.leonardschuetz.ch", (req, res, next) => res.redirect("https://leonardschuetz.ch/livechat"))) app.use(vhost("www.bagbags.ch", (req, res) => res.redirect("https://bagbags.ch"))) app.use(vhost("bagbags.ch", (req, res, next) => { const router = express.Router() router.use("/instagram", (req, res) => res.redirect("https://instagram.com/bagbags.ch")) router.use("/facebook", (req, res) => res.redirect("https://www.facebook.com/BAGbags.ch")) router.use(express.static(path.resolve(__dirname, "./resources/documents/bagbags/"))) router.use((req, res) => { res.status(404) res.send("Error 404: Could not find " + req.path) }) router(req, res, next) })) // Content routes app.use("/charly", (req, res) => res.redirect("https://github.com/charly-lang")); app.use("/charly-lang", (req, res) => res.redirect("https://github.com/charly-lang")); app.use("/resources", require("./resources.js")); app.use("/d/:file", (req, res) => res.redirect("/resources/documents/" + req.params.file)); app.use("/apps", require("./apps.js")); app.use("/todosapi", auth.requiresAuthentication, require("./todos/index.js")); app.use("/documents", auth.requiresAuthentication, require("./documents.js")); app.use("/livechatapi", (req, res, next) => { req.expressWs = expressWs; next(); }, require("./livechat/route.js")); app.use("/tbz-va-2016", express.static(path.resolve(__dirname, "./resources/documents/tbz-va-2016/"))); app.use("/bagbags", express.static(path.resolve(__dirname, "./resources/documents/bagbags/"))); // If in development, include webpack middlewares if (!production) { app.use(webpackDevMiddleware); app.use(webpackHotMiddleware); } // All other routes are being handled by react-router app.use(express.static("./dist")); app.use("/", (req, res) => { if (production) { res.sendFile(path.resolve("./dist/index.html")); } else { res.sendFile(path.resolve("./client/app/index.html")); } }); return app; };
server/routes.js
module.exports = (context) => { // Import libraries and other routes const express = context.express; const path = context.path; const auth = context.auth; const fs = context.fs; const production = context.production; const morgan = context.morgan; const compression = context.compression; const bodyParser = context.bodyParser; const app = context.app; const loggingStream = context.loggingStream; const vhost = context.vhost; const expressWs = context.expressWs; const webpackDevMiddleware = context.webpackDevMiddleware; const webpackHotMiddleware = context.webpackHotMiddleware; // Configuration app.enable("strict routing"); app.disable("x-powered-by"); app.use(compression()); // Middlewares app.use(morgan("combined", { stream: loggingStream, })); app.use(bodyParser.json()); app.use(bodyParser.urlencoded({ extended: false })); app.use(auth.router); // Domain specific routes app.use(vhost("todos.leonardschuetz.ch", (req, res, next) => res.redirect("https://leonardschuetz.ch/todos"))) app.use(vhost("livechat.leonardschuetz.ch", (req, res, next) => res.redirect("https://leonardschuetz.ch/todos"))) app.use(vhost("www.bagbags.ch", (req, res) => res.redirect("https://bagbags.ch"))) app.use(vhost("bagbags.ch", (req, res, next) => { const router = express.Router() router.use("/instagram", (req, res) => res.redirect("https://instagram.com/bagbags.ch")) router.use("/facebook", (req, res) => res.redirect("https://www.facebook.com/BAGbags.ch")) router.use(express.static(path.resolve(__dirname, "./resources/documents/bagbags/"))) router.use((req, res) => { res.status(404) res.send("Error 404: Could not find " + req.path) }) router(req, res, next) })) // Content routes app.use("/charly", (req, res) => res.redirect("https://github.com/charly-lang")); app.use("/charly-lang", (req, res) => res.redirect("https://github.com/charly-lang")); app.use("/resources", require("./resources.js")); app.use("/d/:file", (req, res) => res.redirect("/resources/documents/" + req.params.file)); app.use("/apps", require("./apps.js")); app.use("/todosapi", auth.requiresAuthentication, require("./todos/index.js")); app.use("/documents", auth.requiresAuthentication, require("./documents.js")); app.use("/livechatapi", (req, res, next) => { req.expressWs = expressWs; next(); }, require("./livechat/route.js")); app.use("/tbz-va-2016", express.static(path.resolve(__dirname, "./resources/documents/tbz-va-2016/"))); app.use("/bagbags", express.static(path.resolve(__dirname, "./resources/documents/bagbags/"))); // If in development, include webpack middlewares if (!production) { app.use(webpackDevMiddleware); app.use(webpackHotMiddleware); } // All other routes are being handled by react-router app.use(express.static("./dist")); app.use("/", (req, res) => { if (production) { res.sendFile(path.resolve("./dist/index.html")); } else { res.sendFile(path.resolve("./client/app/index.html")); } }); return app; };
Redirect livechat.leonardschuetz.ch to leonardschuetz.ch/livechat
server/routes.js
Redirect livechat.leonardschuetz.ch to leonardschuetz.ch/livechat
<ide><path>erver/routes.js <ide> <ide> // Domain specific routes <ide> app.use(vhost("todos.leonardschuetz.ch", (req, res, next) => res.redirect("https://leonardschuetz.ch/todos"))) <del> app.use(vhost("livechat.leonardschuetz.ch", (req, res, next) => res.redirect("https://leonardschuetz.ch/todos"))) <add> app.use(vhost("livechat.leonardschuetz.ch", (req, res, next) => res.redirect("https://leonardschuetz.ch/livechat"))) <ide> app.use(vhost("www.bagbags.ch", (req, res) => res.redirect("https://bagbags.ch"))) <ide> app.use(vhost("bagbags.ch", (req, res, next) => { <ide> const router = express.Router()
Java
apache-2.0
594b3fda87ac2153c51e647527235983607f700a
0
philchand/mpdroid-2014,jcnoir/dmix,joansmith/dmix,joansmith/dmix,0359xiaodong/dmix,eisnerd/mupeace,abarisain/dmix,hurzl/dmix,abarisain/dmix,eisnerd/mupeace,0359xiaodong/dmix,philchand/mpdroid-2014,philchand/mpdroid-2014,hurzl/dmix,philchand/mpdroid-2014,eisnerd/mupeace,jcnoir/dmix
package com.namelessdev.mpdroid; import org.a0z.mpd.MPD; import org.a0z.mpd.MPDStatus; import org.a0z.mpd.exception.MPDServerException; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Intent; import android.content.SharedPreferences; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.StrictMode; import android.preference.PreferenceManager; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.view.KeyEvent; import android.view.View; import android.widget.ArrayAdapter; import com.actionbarsherlock.app.ActionBar; import com.actionbarsherlock.app.ActionBar.OnNavigationListener; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuItem; import com.namelessdev.mpdroid.MPDroidActivities.MPDroidFragmentActivity; import com.namelessdev.mpdroid.fragments.NowPlayingFragment; import com.namelessdev.mpdroid.fragments.PlaylistFragment; import com.namelessdev.mpdroid.fragments.PlaylistFragmentCompat; import com.namelessdev.mpdroid.library.LibraryTabActivity; import com.namelessdev.mpdroid.tools.Tools; public class MainMenuActivity extends MPDroidFragmentActivity implements OnNavigationListener { public static final int PLAYLIST = 1; public static final int ARTISTS = 2; public static final int SETTINGS = 5; public static final int STREAM = 6; public static final int LIBRARY = 7; public static final int CONNECT = 8; /** * The {@link android.support.v4.view.PagerAdapter} that will provide fragments for each of the * sections. We use a {@link android.support.v4.app.FragmentPagerAdapter} derivative, which will * keep every loaded fragment in memory. If this becomes too memory intensive, it may be best * to switch to a {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager; private int backPressExitCount; private Handler exitCounterReset; private boolean isDualPaneMode; @SuppressLint("NewApi") @TargetApi(11) @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main_activity); isDualPaneMode = (findViewById(R.id.playlist_fragment) != null); exitCounterReset = new Handler(); if (android.os.Build.VERSION.SDK_INT >= 9) { StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); StrictMode.setThreadPolicy(policy); } // Create the adapter that will return a fragment for each of the three primary sections // of the app. mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // Set up the action bar. final ActionBar actionBar = getSupportActionBar(); if (!isDualPaneMode) { actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); actionBar.setDisplayShowTitleEnabled(false); actionBar.setDisplayShowHomeEnabled(true); } else { actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setDisplayShowTitleEnabled(true); actionBar.setDisplayShowHomeEnabled(true); setTitle(R.string.nowPlaying); } ArrayAdapter<CharSequence> actionBarAdapter = new ArrayAdapter<CharSequence>(getSupportActionBar().getThemedContext(), R.layout.sherlock_spinner_item); actionBarAdapter.add(getString(R.string.nowPlaying)); actionBarAdapter.add(getString(R.string.playQueue)); if(Build.VERSION.SDK_INT >= 14) { //Bug on ICS with sherlock's layout actionBarAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); } else { actionBarAdapter.setDropDownViewResource(R.layout.sherlock_spinner_dropdown_item); } actionBar.setListNavigationCallbacks(actionBarAdapter, this); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setAdapter(mSectionsPagerAdapter); if (android.os.Build.VERSION.SDK_INT >= 9) mViewPager.setOverScrollMode(View.OVER_SCROLL_NEVER); // When swiping between different sections, select the corresponding tab. // We can also use ActionBar.Tab#select() to do this if we have a reference to the // Tab. mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { actionBar.setSelectedNavigationItem(position); } }); } @Override public void onStart() { super.onStart(); MPDApplication app = (MPDApplication) getApplicationContext(); app.setActivity(this); } @Override public void onStop() { super.onStop(); MPDApplication app = (MPDApplication) getApplicationContext(); app.unsetActivity(this); } @Override protected void onResume() { super.onResume(); backPressExitCount = 0; } /** * Called when Back button is pressed, displays message to user indicating the if back button is pressed again the application will exit. We keep a count of how many time back * button is pressed within 5 seconds. If the count is greater than 1 then call system.exit(0) * * Starts a post delay handler to reset the back press count to zero after 5 seconds * * @return None */ @Override public void onBackPressed() { final SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this); final boolean exitConfirmationRequired = settings.getBoolean("enableExitConfirmation", false); if (exitConfirmationRequired && backPressExitCount < 1) { Tools.notifyUser(String.format(getResources().getString(R.string.backpressToQuit)), this); backPressExitCount += 1; exitCounterReset.postDelayed(new Runnable() { @Override public void run() { backPressExitCount = 0; } }, 5000); } else { /* * Nasty force quit, should shutdown everything nicely but there just too many async tasks maybe I'll correctly implement app.terminateApplication(); */ System.exit(0); } return; } @Override public boolean onNavigationItemSelected(int itemPosition, long itemId) { mViewPager.setCurrentItem(itemPosition); return true; } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to one of the primary * sections of the app. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int i) { Fragment fragment = null; switch (i) { case 0: fragment = new NowPlayingFragment(); break; case 1: if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { fragment = new PlaylistFragment(); } else { fragment = new PlaylistFragmentCompat(); } break; } return fragment; } @Override public int getCount() { return isDualPaneMode ? 1 : 2; } @Override public CharSequence getPageTitle(int position) { switch (position) { case 0: return getString(R.string.nowPlaying); case 1: return getString(R.string.playQueue); } return null; } } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); getSupportMenuInflater().inflate(R.menu.mpd_mainmenu, menu); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { //Reminder : never disable buttons that are shown as actionbar actions here super.onPrepareOptionsMenu(menu); MPDApplication app = (MPDApplication) this.getApplication(); MPD mpd = app.oMPDAsyncHelper.oMPD; if (!mpd.isConnected()) { if (menu.findItem(CONNECT) == null) { menu.add(0, CONNECT, 0, R.string.connect); } } else { if (menu.findItem(CONNECT) != null) { menu.removeItem(CONNECT); } } setMenuChecked(menu.findItem(R.id.GMM_Stream), app.getApplicationState().streamingMode); final MPDStatus mpdStatus = app.getApplicationState().currentMpdStatus; if (mpdStatus != null) { setMenuChecked(menu.findItem(R.id.GMM_Single), mpdStatus.isSingle()); setMenuChecked(menu.findItem(R.id.GMM_Consume), mpdStatus.isConsume()); } return true; } private void setMenuChecked(MenuItem item, boolean checked) { // Set the icon to a checkbox so 2.x users also get one item.setChecked(checked); item.setIcon(checked ? R.drawable.btn_check_buttonless_on : R.drawable.btn_check_buttonless_off); } private void openLibrary() { final Intent i = new Intent(this, LibraryTabActivity.class); startActivity(i); } @Override public boolean onOptionsItemSelected(MenuItem item) { Intent i = null; final MPDApplication app = (MPDApplication) this.getApplication(); final MPD mpd = app.oMPDAsyncHelper.oMPD; // Handle item selection switch (item.getItemId()) { case R.id.menu_search: this.onSearchRequested(); return true; case R.id.GMM_LibTab: openLibrary(); return true; case R.id.GMM_Settings: i = new Intent(this, SettingsActivity.class); startActivityForResult(i, SETTINGS); return true; case R.id.GMM_Outputs: i = new Intent(this, SettingsActivity.class); i.putExtra(SettingsActivity.OPEN_OUTPUT, true); startActivityForResult(i, SETTINGS); return true; case CONNECT: ((MPDApplication) this.getApplication()).connect(); return true; case R.id.GMM_Stream: if (app.getApplicationState().streamingMode) { i = new Intent(this, StreamingService.class); i.setAction("com.namelessdev.mpdroid.DIE"); this.startService(i); ((MPDApplication) this.getApplication()).getApplicationState().streamingMode = false; // Toast.makeText(this, "MPD Streaming Stopped", Toast.LENGTH_SHORT).show(); } else { if (app.oMPDAsyncHelper.oMPD.isConnected()) { i = new Intent(this, StreamingService.class); i.setAction("com.namelessdev.mpdroid.START_STREAMING"); this.startService(i); ((MPDApplication) this.getApplication()).getApplicationState().streamingMode = true; // Toast.makeText(this, "MPD Streaming Started", Toast.LENGTH_SHORT).show(); } } return true; case R.id.GMM_bonjour: startActivity(new Intent(this, ServerListActivity.class)); return true; case R.id.GMM_Consume: try { mpd.setConsume(!mpd.getStatus().isConsume()); } catch (MPDServerException e) { } return true; case R.id.GMM_Single: try { mpd.setSingle(!mpd.getStatus().isSingle()); } catch (MPDServerException e) { } return true; default: return super.onOptionsItemSelected(item); } } @Override public boolean onKeyLongPress(int keyCode, KeyEvent event) { final MPDApplication app = (MPDApplication) getApplicationContext(); switch (event.getKeyCode()) { case KeyEvent.KEYCODE_VOLUME_UP: new Thread(new Runnable() { @Override public void run() { try { app.oMPDAsyncHelper.oMPD.next(); } catch (MPDServerException e) { e.printStackTrace(); } } }).start(); return true; case KeyEvent.KEYCODE_VOLUME_DOWN: new Thread(new Runnable() { @Override public void run() { try { app.oMPDAsyncHelper.oMPD.previous(); } catch (MPDServerException e) { e.printStackTrace(); } } }).start(); return true; } return super.onKeyLongPress(keyCode, event); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN || keyCode == KeyEvent.KEYCODE_VOLUME_UP) { // For onKeyLongPress to work event.startTracking(); return true; } return super.onKeyDown(keyCode, event); } @Override public boolean onKeyUp(int keyCode, final KeyEvent event) { final MPDApplication app = (MPDApplication) getApplicationContext(); switch (event.getKeyCode()) { case KeyEvent.KEYCODE_VOLUME_UP: case KeyEvent.KEYCODE_VOLUME_DOWN: if (event.isTracking() && !event.isCanceled() && !app.getApplicationState().streamingMode) { new Thread(new Runnable() { @Override public void run() { try { app.oMPDAsyncHelper.oMPD.adjustVolume(event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_UP ? NowPlayingFragment.VOLUME_STEP : -NowPlayingFragment.VOLUME_STEP); } catch (MPDServerException e) { e.printStackTrace(); } } }).start(); } return true; } return super.onKeyUp(keyCode, event); } }
MPDroid/src/com/namelessdev/mpdroid/MainMenuActivity.java
package com.namelessdev.mpdroid; import org.a0z.mpd.MPD; import org.a0z.mpd.MPDStatus; import org.a0z.mpd.exception.MPDServerException; import android.annotation.SuppressLint; import android.annotation.TargetApi; import android.content.Intent; import android.content.SharedPreferences; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.StrictMode; import android.preference.PreferenceManager; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentPagerAdapter; import android.support.v4.view.ViewPager; import android.view.KeyEvent; import android.view.View; import android.widget.ArrayAdapter; import com.actionbarsherlock.app.ActionBar; import com.actionbarsherlock.app.ActionBar.OnNavigationListener; import com.actionbarsherlock.view.Menu; import com.actionbarsherlock.view.MenuItem; import com.namelessdev.mpdroid.MPDroidActivities.MPDroidFragmentActivity; import com.namelessdev.mpdroid.fragments.NowPlayingFragment; import com.namelessdev.mpdroid.fragments.PlaylistFragment; import com.namelessdev.mpdroid.fragments.PlaylistFragmentCompat; import com.namelessdev.mpdroid.library.LibraryTabActivity; import com.namelessdev.mpdroid.tools.Tools; public class MainMenuActivity extends MPDroidFragmentActivity implements OnNavigationListener { public static final int PLAYLIST = 1; public static final int ARTISTS = 2; public static final int SETTINGS = 5; public static final int STREAM = 6; public static final int LIBRARY = 7; public static final int CONNECT = 8; /** * The {@link android.support.v4.view.PagerAdapter} that will provide fragments for each of the * sections. We use a {@link android.support.v4.app.FragmentPagerAdapter} derivative, which will * keep every loaded fragment in memory. If this becomes too memory intensive, it may be best * to switch to a {@link android.support.v4.app.FragmentStatePagerAdapter}. */ SectionsPagerAdapter mSectionsPagerAdapter; /** * The {@link ViewPager} that will host the section contents. */ ViewPager mViewPager; private int backPressExitCount; private Handler exitCounterReset; @SuppressLint("NewApi") @TargetApi(11) @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main_activity); exitCounterReset = new Handler(); if (android.os.Build.VERSION.SDK_INT >= 9) { StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build(); StrictMode.setThreadPolicy(policy); } // Create the adapter that will return a fragment for each of the three primary sections // of the app. mSectionsPagerAdapter = new SectionsPagerAdapter(getSupportFragmentManager()); // Set up the action bar. final ActionBar actionBar = getSupportActionBar(); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); actionBar.setDisplayShowTitleEnabled(false); actionBar.setDisplayShowHomeEnabled(true); ArrayAdapter<CharSequence> actionBarAdapter = new ArrayAdapter<CharSequence>(getSupportActionBar().getThemedContext(), R.layout.sherlock_spinner_item); actionBarAdapter.add(getString(R.string.nowPlaying)); actionBarAdapter.add(getString(R.string.playQueue)); if(Build.VERSION.SDK_INT >= 14) { //Bug on ICS with sherlock's layout actionBarAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); } else { actionBarAdapter.setDropDownViewResource(R.layout.sherlock_spinner_dropdown_item); } actionBar.setListNavigationCallbacks(actionBarAdapter, this); // Set up the ViewPager with the sections adapter. mViewPager = (ViewPager) findViewById(R.id.pager); mViewPager.setAdapter(mSectionsPagerAdapter); if (android.os.Build.VERSION.SDK_INT >= 9) mViewPager.setOverScrollMode(View.OVER_SCROLL_NEVER); // When swiping between different sections, select the corresponding tab. // We can also use ActionBar.Tab#select() to do this if we have a reference to the // Tab. mViewPager.setOnPageChangeListener(new ViewPager.SimpleOnPageChangeListener() { @Override public void onPageSelected(int position) { actionBar.setSelectedNavigationItem(position); } }); } @Override public void onStart() { super.onStart(); MPDApplication app = (MPDApplication) getApplicationContext(); app.setActivity(this); } @Override public void onStop() { super.onStop(); MPDApplication app = (MPDApplication) getApplicationContext(); app.unsetActivity(this); } @Override protected void onResume() { super.onResume(); backPressExitCount = 0; } /** * Called when Back button is pressed, displays message to user indicating the if back button is pressed again the application will exit. We keep a count of how many time back * button is pressed within 5 seconds. If the count is greater than 1 then call system.exit(0) * * Starts a post delay handler to reset the back press count to zero after 5 seconds * * @return None */ @Override public void onBackPressed() { final SharedPreferences settings = PreferenceManager.getDefaultSharedPreferences(this); final boolean exitConfirmationRequired = settings.getBoolean("enableExitConfirmation", false); if (exitConfirmationRequired && backPressExitCount < 1) { Tools.notifyUser(String.format(getResources().getString(R.string.backpressToQuit)), this); backPressExitCount += 1; exitCounterReset.postDelayed(new Runnable() { @Override public void run() { backPressExitCount = 0; } }, 5000); } else { /* * Nasty force quit, should shutdown everything nicely but there just too many async tasks maybe I'll correctly implement app.terminateApplication(); */ System.exit(0); } return; } @Override public boolean onNavigationItemSelected(int itemPosition, long itemId) { mViewPager.setCurrentItem(itemPosition); return true; } /** * A {@link FragmentPagerAdapter} that returns a fragment corresponding to one of the primary * sections of the app. */ public class SectionsPagerAdapter extends FragmentPagerAdapter { public SectionsPagerAdapter(FragmentManager fm) { super(fm); } @Override public Fragment getItem(int i) { Fragment fragment = null; switch (i) { case 0: fragment = new NowPlayingFragment(); break; case 1: if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) { fragment = new PlaylistFragment(); } else { fragment = new PlaylistFragmentCompat(); } break; } return fragment; } @Override public int getCount() { return 2; } @Override public CharSequence getPageTitle(int position) { switch (position) { case 0: return getString(R.string.nowPlaying); case 1: return getString(R.string.playQueue); } return null; } } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); getSupportMenuInflater().inflate(R.menu.mpd_mainmenu, menu); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { //Reminder : never disable buttons that are shown as actionbar actions here super.onPrepareOptionsMenu(menu); MPDApplication app = (MPDApplication) this.getApplication(); MPD mpd = app.oMPDAsyncHelper.oMPD; if (!mpd.isConnected()) { if (menu.findItem(CONNECT) == null) { menu.add(0, CONNECT, 0, R.string.connect); } } else { if (menu.findItem(CONNECT) != null) { menu.removeItem(CONNECT); } } setMenuChecked(menu.findItem(R.id.GMM_Stream), app.getApplicationState().streamingMode); final MPDStatus mpdStatus = app.getApplicationState().currentMpdStatus; if (mpdStatus != null) { setMenuChecked(menu.findItem(R.id.GMM_Single), mpdStatus.isSingle()); setMenuChecked(menu.findItem(R.id.GMM_Consume), mpdStatus.isConsume()); } return true; } private void setMenuChecked(MenuItem item, boolean checked) { // Set the icon to a checkbox so 2.x users also get one item.setChecked(checked); item.setIcon(checked ? R.drawable.btn_check_buttonless_on : R.drawable.btn_check_buttonless_off); } private void openLibrary() { final Intent i = new Intent(this, LibraryTabActivity.class); startActivity(i); } @Override public boolean onOptionsItemSelected(MenuItem item) { Intent i = null; final MPDApplication app = (MPDApplication) this.getApplication(); final MPD mpd = app.oMPDAsyncHelper.oMPD; // Handle item selection switch (item.getItemId()) { case R.id.menu_search: this.onSearchRequested(); return true; case R.id.GMM_LibTab: openLibrary(); return true; case R.id.GMM_Settings: i = new Intent(this, SettingsActivity.class); startActivityForResult(i, SETTINGS); return true; case R.id.GMM_Outputs: i = new Intent(this, SettingsActivity.class); i.putExtra(SettingsActivity.OPEN_OUTPUT, true); startActivityForResult(i, SETTINGS); return true; case CONNECT: ((MPDApplication) this.getApplication()).connect(); return true; case R.id.GMM_Stream: if (app.getApplicationState().streamingMode) { i = new Intent(this, StreamingService.class); i.setAction("com.namelessdev.mpdroid.DIE"); this.startService(i); ((MPDApplication) this.getApplication()).getApplicationState().streamingMode = false; // Toast.makeText(this, "MPD Streaming Stopped", Toast.LENGTH_SHORT).show(); } else { if (app.oMPDAsyncHelper.oMPD.isConnected()) { i = new Intent(this, StreamingService.class); i.setAction("com.namelessdev.mpdroid.START_STREAMING"); this.startService(i); ((MPDApplication) this.getApplication()).getApplicationState().streamingMode = true; // Toast.makeText(this, "MPD Streaming Started", Toast.LENGTH_SHORT).show(); } } return true; case R.id.GMM_bonjour: startActivity(new Intent(this, ServerListActivity.class)); return true; case R.id.GMM_Consume: try { mpd.setConsume(!mpd.getStatus().isConsume()); } catch (MPDServerException e) { } return true; case R.id.GMM_Single: try { mpd.setSingle(!mpd.getStatus().isSingle()); } catch (MPDServerException e) { } return true; default: return super.onOptionsItemSelected(item); } } @Override public boolean onKeyLongPress(int keyCode, KeyEvent event) { final MPDApplication app = (MPDApplication) getApplicationContext(); switch (event.getKeyCode()) { case KeyEvent.KEYCODE_VOLUME_UP: new Thread(new Runnable() { @Override public void run() { try { app.oMPDAsyncHelper.oMPD.next(); } catch (MPDServerException e) { e.printStackTrace(); } } }).start(); return true; case KeyEvent.KEYCODE_VOLUME_DOWN: new Thread(new Runnable() { @Override public void run() { try { app.oMPDAsyncHelper.oMPD.previous(); } catch (MPDServerException e) { e.printStackTrace(); } } }).start(); return true; } return super.onKeyLongPress(keyCode, event); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { if (keyCode == KeyEvent.KEYCODE_VOLUME_DOWN || keyCode == KeyEvent.KEYCODE_VOLUME_UP) { // For onKeyLongPress to work event.startTracking(); return true; } return super.onKeyDown(keyCode, event); } @Override public boolean onKeyUp(int keyCode, final KeyEvent event) { final MPDApplication app = (MPDApplication) getApplicationContext(); switch (event.getKeyCode()) { case KeyEvent.KEYCODE_VOLUME_UP: case KeyEvent.KEYCODE_VOLUME_DOWN: if (event.isTracking() && !event.isCanceled() && !app.getApplicationState().streamingMode) { new Thread(new Runnable() { @Override public void run() { try { app.oMPDAsyncHelper.oMPD.adjustVolume(event.getKeyCode() == KeyEvent.KEYCODE_VOLUME_UP ? NowPlayingFragment.VOLUME_STEP : -NowPlayingFragment.VOLUME_STEP); } catch (MPDServerException e) { e.printStackTrace(); } } }).start(); } return true; } return super.onKeyUp(keyCode, event); } }
Tablet layout code
MPDroid/src/com/namelessdev/mpdroid/MainMenuActivity.java
Tablet layout code
<ide><path>PDroid/src/com/namelessdev/mpdroid/MainMenuActivity.java <ide> ViewPager mViewPager; <ide> private int backPressExitCount; <ide> private Handler exitCounterReset; <add> private boolean isDualPaneMode; <ide> <ide> @SuppressLint("NewApi") <ide> @TargetApi(11) <ide> super.onCreate(savedInstanceState); <ide> setContentView(R.layout.main_activity); <ide> <add> isDualPaneMode = (findViewById(R.id.playlist_fragment) != null); <add> <ide> exitCounterReset = new Handler(); <ide> <ide> if (android.os.Build.VERSION.SDK_INT >= 9) { <ide> <ide> // Set up the action bar. <ide> final ActionBar actionBar = getSupportActionBar(); <del> actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); <del> actionBar.setDisplayShowTitleEnabled(false); <del> actionBar.setDisplayShowHomeEnabled(true); <add> if (!isDualPaneMode) { <add> actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_LIST); <add> actionBar.setDisplayShowTitleEnabled(false); <add> actionBar.setDisplayShowHomeEnabled(true); <add> } else { <add> actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); <add> actionBar.setDisplayShowTitleEnabled(true); <add> actionBar.setDisplayShowHomeEnabled(true); <add> setTitle(R.string.nowPlaying); <add> } <ide> <ide> ArrayAdapter<CharSequence> actionBarAdapter = new ArrayAdapter<CharSequence>(getSupportActionBar().getThemedContext(), <ide> R.layout.sherlock_spinner_item); <ide> <ide> @Override <ide> public int getCount() { <del> return 2; <add> return isDualPaneMode ? 1 : 2; <ide> } <ide> <ide> @Override
Java
mit
474a7893e95054292a8cebc31fd71d6688158f91
0
streamreasoning/heaven,streamreasoning/heaven,streamreasoning/heaven
package it.polimi.heaven.core.ts.streamer.flowrateprofiler; import it.polimi.heaven.core.enums.FlowRateProfile; import it.polimi.heaven.core.ts.events.Stimulus; import it.polimi.heaven.core.ts.events.TripleContainer; import java.util.HashSet; import java.util.Set; import lombok.Getter; import lombok.Setter; import lombok.extern.log4j.Log4j; @Setter @Getter @Log4j public abstract class TripleSetFlowRateProfiler implements FlowRateProfiler<Stimulus, TripleContainer> { protected Stimulus e; protected FlowRateProfile mode; protected int initSize, roundSize, eventNumber; protected int x, y; protected boolean sizeReached; protected int experimentNumber; private String id; protected long currentTimestamp; protected int timing; public TripleSetFlowRateProfiler(FlowRateProfile mode, int x, int y, int initSize, int experimentNumber, int timing) { this.x = x; this.y = y; this.initSize = roundSize = initSize; this.eventNumber = 1; this.mode = mode; this.sizeReached = false; this.currentTimestamp = 0L; this.timing = timing; id = "<http://example.org/" + experimentNumber + "/"; e = new Stimulus(id, new HashSet<TripleContainer>(), eventNumber, experimentNumber, currentTimestamp); } @Override public Stimulus getEvent() { currentTimestamp += timing; e.setTimestamp(currentTimestamp); return sizeReached ? e : null; } @Override public boolean isReady() { return sizeReached; } @Override public boolean append(TripleContainer triple) { if (sizeReached) { updateSize(); Set<TripleContainer> set = new HashSet<TripleContainer>(); if (roundSize > 0) { eventNumber++; set.add(triple); } e = e.rebuild(id, set, eventNumber, experimentNumber, currentTimestamp); log.debug("is Full Event Size [" + e.size() + "] roundSize [" + roundSize + "]"); } else { e.getEventTriples().add(triple); log.debug("NotFull Event Size [" + e.size() + "] roundSize [" + roundSize + "]"); } sizeReached = (e.size() == roundSize); return sizeReached; } public abstract void updateSize(); }
core/src/main/java/it/polimi/heaven/core/ts/streamer/flowrateprofiler/TripleSetFlowRateProfiler.java
package it.polimi.heaven.core.ts.streamer.flowrateprofiler; import it.polimi.heaven.core.enums.FlowRateProfile; import it.polimi.heaven.core.ts.events.Stimulus; import it.polimi.heaven.core.ts.events.TripleContainer; import java.util.HashSet; import java.util.Set; import lombok.Getter; import lombok.Setter; import lombok.extern.log4j.Log4j; @Setter @Getter @Log4j public abstract class TripleSetFlowRateProfiler implements FlowRateProfiler<Stimulus, TripleContainer> { protected Stimulus e; protected FlowRateProfile mode; protected int initSize, roundSize, eventNumber; protected int x, y; protected boolean sizeReached; protected int experimentNumber; private String id; protected long currentTimestamp; protected int timing; public TripleSetFlowRateProfiler(FlowRateProfile mode, int x, int y, int initSize, int experimentNumber, int timing) { this.x = x; this.y = y; this.initSize = roundSize = initSize; this.eventNumber = 1; this.mode = mode; this.sizeReached = false; this.currentTimestamp = 0L; this.timing = timing; id = "<http://example.org/" + experimentNumber + "/"; e = new Stimulus(id, new HashSet<TripleContainer>(), eventNumber, experimentNumber, currentTimestamp); } @Override public Stimulus getEvent() { return sizeReached ? e : null; } @Override public boolean isReady() { return sizeReached; } @Override public boolean append(TripleContainer triple) { if (sizeReached) { updateSize(); Set<TripleContainer> set = new HashSet<TripleContainer>(); if (roundSize > 0) { eventNumber++; set.add(triple); } e = e.rebuild(id, set, eventNumber, experimentNumber, currentTimestamp + timing); log.debug("is Full Event Size [" + e.size() + "] roundSize [" + roundSize + "]"); } else { e.getEventTriples().add(triple); log.debug("NotFull Event Size [" + e.size() + "] roundSize [" + roundSize + "]"); } sizeReached = (e.size() == roundSize); return sizeReached; } public abstract void updateSize(); }
added timestamp assignment
core/src/main/java/it/polimi/heaven/core/ts/streamer/flowrateprofiler/TripleSetFlowRateProfiler.java
added timestamp assignment
<ide><path>ore/src/main/java/it/polimi/heaven/core/ts/streamer/flowrateprofiler/TripleSetFlowRateProfiler.java <ide> <ide> @Override <ide> public Stimulus getEvent() { <add> currentTimestamp += timing; <add> e.setTimestamp(currentTimestamp); <ide> return sizeReached ? e : null; <ide> } <ide> <ide> eventNumber++; <ide> set.add(triple); <ide> } <del> e = e.rebuild(id, set, eventNumber, experimentNumber, currentTimestamp + timing); <add> e = e.rebuild(id, set, eventNumber, experimentNumber, currentTimestamp); <ide> log.debug("is Full Event Size [" + e.size() + "] roundSize [" + roundSize + "]"); <ide> } else { <ide> e.getEventTriples().add(triple);
Java
mit
c31707a6bc8c955120bc70c9b6a99c96322f84dd
0
mikroskeem/Shuriken
package eu.mikroskeem.shuriken.instrumentation; import eu.mikroskeem.shuriken.common.Ensure; import org.jetbrains.annotations.NotNull; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.MethodVisitor; import static org.objectweb.asm.Opcodes.*; public final class ClassTools { /** * Unqualify class name <br> * In other words, <pre>foo.bar.baz</pre> -&gt; <pre>foo/bar/baz</pre> * * @param className Class name * @return Unqualified class name */ @NotNull public static String unqualifyName(String className) { return Ensure.notNull(className, "Class name shouldn't be null!").replace(".", "/"); } /** * Unqualify class name * * @see #unqualifyName(String) * @param clazz Class * @return Unqualified class name */ @NotNull public static String unqualifyName(Class<?> clazz) { return unqualifyName(Ensure.notNull(clazz, "Class shouldn't be null!").getName()); } /** * Get class signature for generic class implementing/extending * * @param genericClass Generic class * @param types Generic types * @return Signature string */ @NotNull public static String getGenericSignature(Class<?> genericClass, Class<?>... types) { String genericName = unqualifyName(Ensure.notNull(genericClass, "Class shouldn't be null!").getName()); StringBuilder sb = new StringBuilder(); for(Class<?> type: types) sb.append("L").append(type.getSimpleName()).append(";"); return "L" + genericName + "<" + sb.toString() + ">;"; } /** * Generate simple <pre>super()</pre> calling constructor * * @param classWriter ClassWriter instance * @param superClass Super class name (use {@link Object} for non-extending classes * (or explictly extending Object, which is redundant anyway) * @deprecated Using ClassVisitor is better idea */ @Deprecated public static void generateSimpleSuperConstructor(@NotNull ClassWriter classWriter, @NotNull String superClass) { generateSimpleSuperConstructor((ClassVisitor)classWriter, superClass); } /** * Generate simple <pre>super()</pre> calling constructor * * @param classVisitor ClassVisitor instance * @param superClass Super class name (use {@link Object} for non-extending classes * (or explictly extending Object, which is redundant anyway) */ public static void generateSimpleSuperConstructor(@NotNull ClassVisitor classVisitor, @NotNull String superClass) { MethodVisitor mv = Ensure.notNull(classVisitor, "ClassWriter shouldn't be null!") .visitMethod(ACC_PUBLIC, "<init>", "()V", null, null); mv.visitCode(); mv.visitVarInsn(ALOAD, 0); mv.visitMethodInsn(INVOKESPECIAL, unqualifyName(superClass), "<init>", "()V", false); mv.visitInsn(RETURN); mv.visitMaxs(1, 0); mv.visitEnd(); } /** * Generate simple <pre>super()</pre> calling constructor * * @param classWriter ClassWriter instance * @param superClass Super class object (use {@link Object} for non-extending classes * (or explictly extending Object, which is redundant anyway) * @deprecated Using ClassVisitor is better idea */ @Deprecated public static void generateSimpleSuperConstructor(@NotNull ClassWriter classWriter, @NotNull Class<?> superClass) { generateSimpleSuperConstructor((ClassVisitor)classWriter, Ensure.notNull(superClass, "Class shouldn't be null").getName()); } /** * Generate simple <pre>super()</pre> calling constructor * * @param classVisitor ClassWriter instance * @param superClass Super class object (use {@link Object} for non-extending classes * (or explictly extending Object, which is redundant anyway) */ public static void generateSimpleSuperConstructor(@NotNull ClassVisitor classVisitor, @NotNull Class<?> superClass) { generateSimpleSuperConstructor(classVisitor, Ensure.notNull(superClass, "Class shouldn't be null").getName()); } }
instrumentation/src/main/java/eu/mikroskeem/shuriken/instrumentation/ClassTools.java
package eu.mikroskeem.shuriken.instrumentation; import eu.mikroskeem.shuriken.common.Ensure; import org.jetbrains.annotations.NotNull; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.MethodVisitor; import static org.objectweb.asm.Opcodes.*; public final class ClassTools { /** * Unqualify class name <br> * In other words, <pre>foo.bar.baz</pre> -&gt; <pre>foo/bar/baz</pre> * * @param className Class name * @return Unqualified class name */ @NotNull public static String unqualifyName(String className) { return Ensure.notNull(className, "Class name shouldn't be null!").replace(".", "/"); } /** * Unqualify class name * * @see #unqualifyName(String) * @param clazz Class * @return Unqualified class name */ @NotNull public static String unqualifyName(Class<?> clazz) { return unqualifyName(Ensure.notNull(clazz, "Class shouldn't be null!").getName()); } /** * Get class signature for generic class implementing/extending * * @param genericClass Generic class * @param types Generic types * @return Signature string */ @NotNull public static String getGenericSignature(Class<?> genericClass, Class<?>... types) { String genericName = unqualifyName(Ensure.notNull(genericClass, "Class shouldn't be null!").getName()); StringBuilder sb = new StringBuilder(); for(Class<?> type: types) sb.append("L").append(type.getSimpleName()).append(";"); return "L" + genericName + "<" + sb.toString() + ">;"; } /** * Generate simple <pre>super()</pre> calling constructor * * @param classWriter ClassWriter instance * @param superClass Super class name (use {@link Object} for non-extending classes * (or explictly extending Object, which is redundant anyway) */ public static void generateSimpleSuperConstructor(@NotNull ClassWriter classWriter, @NotNull String superClass) { MethodVisitor mv = Ensure.notNull(classWriter, "ClassWriter shouldn't be null!") .visitMethod(ACC_PUBLIC, "<init>", "()V", null, null); mv.visitVarInsn(ALOAD, 0); mv.visitMethodInsn(INVOKESPECIAL, unqualifyName(superClass), "<init>", "()V", false); mv.visitInsn(RETURN); mv.visitMaxs(1, 0); mv.visitEnd(); } /** * Generate simple <pre>super()</pre> calling constructor * * @param classWriter ClassWriter instance * @param superClass Super class object (use {@link Object} for non-extending classes * (or explictly extending Object, which is redundant anyway) */ public static void generateSimpleSuperConstructor(@NotNull ClassWriter classWriter, @NotNull Class<?> superClass) { generateSimpleSuperConstructor(classWriter, Ensure.notNull(superClass, "Class shouldn't be null").getName()); } }
Deprecate ctor gens using ClassWriter And add visitCode() to make CheckClassAdapter stop bitching
instrumentation/src/main/java/eu/mikroskeem/shuriken/instrumentation/ClassTools.java
Deprecate ctor gens using ClassWriter
<ide><path>nstrumentation/src/main/java/eu/mikroskeem/shuriken/instrumentation/ClassTools.java <ide> <ide> import eu.mikroskeem.shuriken.common.Ensure; <ide> import org.jetbrains.annotations.NotNull; <add>import org.objectweb.asm.ClassVisitor; <ide> import org.objectweb.asm.ClassWriter; <ide> import org.objectweb.asm.MethodVisitor; <ide> <ide> * @param classWriter ClassWriter instance <ide> * @param superClass Super class name (use {@link Object} for non-extending classes <ide> * (or explictly extending Object, which is redundant anyway) <add> * @deprecated Using ClassVisitor is better idea <ide> */ <add> @Deprecated <ide> public static void generateSimpleSuperConstructor(@NotNull ClassWriter classWriter, @NotNull String superClass) { <del> MethodVisitor mv = Ensure.notNull(classWriter, "ClassWriter shouldn't be null!") <add> generateSimpleSuperConstructor((ClassVisitor)classWriter, superClass); <add> } <add> <add> /** <add> * Generate simple <pre>super()</pre> calling constructor <add> * <add> * @param classVisitor ClassVisitor instance <add> * @param superClass Super class name (use {@link Object} for non-extending classes <add> * (or explictly extending Object, which is redundant anyway) <add> */ <add> public static void generateSimpleSuperConstructor(@NotNull ClassVisitor classVisitor, @NotNull String superClass) { <add> MethodVisitor mv = Ensure.notNull(classVisitor, "ClassWriter shouldn't be null!") <ide> .visitMethod(ACC_PUBLIC, "<init>", "()V", null, null); <add> mv.visitCode(); <ide> mv.visitVarInsn(ALOAD, 0); <ide> mv.visitMethodInsn(INVOKESPECIAL, unqualifyName(superClass), "<init>", "()V", false); <ide> mv.visitInsn(RETURN); <ide> * @param classWriter ClassWriter instance <ide> * @param superClass Super class object (use {@link Object} for non-extending classes <ide> * (or explictly extending Object, which is redundant anyway) <add> * @deprecated Using ClassVisitor is better idea <ide> */ <add> @Deprecated <ide> public static void generateSimpleSuperConstructor(@NotNull ClassWriter classWriter, @NotNull Class<?> superClass) { <del> generateSimpleSuperConstructor(classWriter, Ensure.notNull(superClass, "Class shouldn't be null").getName()); <add> generateSimpleSuperConstructor((ClassVisitor)classWriter, Ensure.notNull(superClass, "Class shouldn't be null").getName()); <add> } <add> <add> /** <add> * Generate simple <pre>super()</pre> calling constructor <add> * <add> * @param classVisitor ClassWriter instance <add> * @param superClass Super class object (use {@link Object} for non-extending classes <add> * (or explictly extending Object, which is redundant anyway) <add> */ <add> public static void generateSimpleSuperConstructor(@NotNull ClassVisitor classVisitor, @NotNull Class<?> superClass) { <add> generateSimpleSuperConstructor(classVisitor, Ensure.notNull(superClass, "Class shouldn't be null").getName()); <ide> } <ide> }
Java
mit
58843251588fcb80d481cd582796e491e5112436
0
Zegis/Mavis,Zegis/Mavis
package pl.kofun.mavis; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Calendar; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartUtilities; import org.jfree.chart.JFreeChart; import org.jfree.chart.axis.DateAxis; import org.jfree.chart.axis.DateTickUnit; import org.jfree.chart.axis.DateTickUnitType; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.axis.NumberTickUnit; import org.jfree.chart.plot.XYPlot; import org.jfree.data.time.Month; import org.jfree.data.time.TimeSeries; import org.jfree.data.time.TimeSeriesCollection; import org.jfree.data.time.TimeSeriesDataItem; import pl.kofun.mavis.Interfaces.MainTask; import pl.kofun.mavis.counters.BlogCounter; import pl.kofun.mavis.counters.Counter; import pl.kofun.mavis.counters.LinesCounter; import pl.kofun.mavis.counters.PeriodToCount; import pl.kofun.mavis.counters.TrelloCounter; import pl.kofun.mavis.utils.FileNameCreator; import pl.kofun.mavis.utils.FilterBuilder; import pl.kofun.mavis.utils.yearFileNameCreator; public class YearPlotter implements MainTask{ private LinesCounter booksFileCounter; private LinesCounter gamesFileCounter; private BlogCounter blogCounter; private BlogCounter devCounter; private TrelloCounter tasksCounter; private int yearToPlot; private FileNameCreator fileNameCreator; public YearPlotter(Options options) { if(options.validForPlot()) { booksFileCounter = new LinesCounter(options.get("booksfileName")); gamesFileCounter = new LinesCounter(options.get("gamesfileName")); blogCounter = new BlogCounter(options.get("blogUrl")); devCounter = new BlogCounter(options.get("devUrl")); tasksCounter = new TrelloCounter(options.get("apiKey"), options.get("apiToken")); } if(options.containsKey("yeartoPlot")) { yearToPlot = Integer.parseInt(options.get("yeartoPlot")); } else { yearToPlot = Calendar.getInstance().get(Calendar.YEAR); } fileNameCreator = new yearFileNameCreator(); } @Override public void execute() { if(booksFileCounter != null || gamesFileCounter != null) { try { TimeSeries books = new TimeSeries("Books"); TimeSeries games = new TimeSeries("Games"); TimeSeries posts = new TimeSeries("Posts"); TimeSeries devposts = new TimeSeries("Dev posts"); TimeSeries tasks = new TimeSeries("Tasks"); FilterBuilder filterMaker = new FilterBuilder(); Month currentMonth; PeriodToCount currentPeriod = new PeriodToCount(); currentPeriod.Year = yearToPlot; for(int i=1; i<13; ++i) { currentMonth = new Month(i,yearToPlot); currentPeriod.Filter = filterMaker.makeFilter(i-1,yearToPlot); currentPeriod.Month = i-1; books.add(createFileSeriesDataItem(currentMonth,currentPeriod, booksFileCounter)); games.add(createFileSeriesDataItem(currentMonth, currentPeriod, gamesFileCounter)); posts.add(createFileSeriesDataItem(currentMonth,currentPeriod,blogCounter)); devposts.add(createFileSeriesDataItem(currentMonth,currentPeriod, devCounter)); tasks.add(createFileSeriesDataItem(currentMonth,currentPeriod,tasksCounter)); } TimeSeriesCollection dataset = new TimeSeriesCollection(); dataset.addSeries(books); dataset.addSeries(games); dataset.addSeries(posts); dataset.addSeries(devposts); dataset.addSeries(tasks); JFreeChart chart = ChartFactory.createTimeSeriesChart( String.valueOf(yearToPlot), "Month", "Quantity", dataset, true, true, false ); String chartFilename = fileNameCreator.createName(yearToPlot); XYPlot plot = chart.getXYPlot(); NumberAxis yaxis = (NumberAxis) plot.getRangeAxis(); yaxis.setTickUnit(new NumberTickUnit(1)); DateAxis xaxis = (DateAxis) plot.getDomainAxis(); xaxis.setTickUnit(new DateTickUnit(DateTickUnitType.MONTH, 1)); xaxis.setDateFormatOverride(new SimpleDateFormat("MMM")); ChartUtilities.saveChartAsJPEG(new File(chartFilename), chart, 500, 300); }catch(IOException e) { System.out.println(e); } System.out.println("All green"); } else { this.usage(); } } private TimeSeriesDataItem createFileSeriesDataItem(Month currentMonth, PeriodToCount currentPeriod, Counter counter) { counter.setPeriodToCount(currentPeriod); return new TimeSeriesDataItem(currentMonth, counter.count()); } @Override public void usage() { System.out.println("For Year Plotter you must define:"); System.out.println("books filename as -b (argument) or booksfileName : (argument) inside txt file"); System.out.println("games filename as -g (argument) or gamesfileName : (argument) inside txt file"); System.out.println("You may want to specify"); System.out.println("Year to plot by -y (year) in command line"); } }
src/pl/kofun/mavis/YearPlotter.java
package pl.kofun.mavis; import java.io.File; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Calendar; import org.jfree.chart.ChartFactory; import org.jfree.chart.ChartUtilities; import org.jfree.chart.JFreeChart; import org.jfree.chart.axis.DateAxis; import org.jfree.chart.axis.DateTickUnit; import org.jfree.chart.axis.DateTickUnitType; import org.jfree.chart.axis.NumberAxis; import org.jfree.chart.axis.NumberTickUnit; import org.jfree.chart.plot.XYPlot; import org.jfree.data.time.Month; import org.jfree.data.time.TimeSeries; import org.jfree.data.time.TimeSeriesCollection; import org.jfree.data.time.TimeSeriesDataItem; import pl.kofun.mavis.Interfaces.MainTask; import pl.kofun.mavis.counters.BlogCounter; import pl.kofun.mavis.counters.Counter; import pl.kofun.mavis.counters.LinesCounter; import pl.kofun.mavis.counters.PeriodToCount; import pl.kofun.mavis.counters.TrelloCounter; import pl.kofun.mavis.utils.FileNameCreator; import pl.kofun.mavis.utils.FilterBuilder; import pl.kofun.mavis.utils.yearFileNameCreator; public class YearPlotter implements MainTask{ private LinesCounter booksFileCounter; private LinesCounter gamesFileCounter; private BlogCounter blogCounter; private BlogCounter devCounter; private TrelloCounter tasksCounter; private int yearToPlot; private FileNameCreator fileNameCreator; public YearPlotter(Options options) { if(options.validForPlot()) { booksFileCounter = new LinesCounter(options.get("booksfileName")); gamesFileCounter = new LinesCounter(options.get("gamesfileName")); blogCounter = new BlogCounter(options.get("blogUrl")); devCounter = new BlogCounter(options.get("devUrl")); tasksCounter = new TrelloCounter(options.get("apiKey"), options.get("apiToken")); } if(options.containsKey("yeartoPlot")) { yearToPlot = Integer.parseInt(options.get("yeartoPlot")); } else { yearToPlot = Calendar.getInstance().get(Calendar.YEAR); } fileNameCreator = new yearFileNameCreator(); } @Override public void execute() { if(booksFileCounter != null || gamesFileCounter != null) { try { TimeSeries books = new TimeSeries("Books"); TimeSeries games = new TimeSeries("Games"); TimeSeries posts = new TimeSeries("Posts"); TimeSeries devposts = new TimeSeries("Dev posts"); TimeSeries tasks = new TimeSeries("Tasks"); FilterBuilder filterMaker = new FilterBuilder(); Month currentMonth; PeriodToCount currentPeriod = new PeriodToCount(); currentPeriod.Year = yearToPlot; for(int i=1; i<13; ++i) { currentMonth = new Month(i,yearToPlot); currentPeriod.Filter = filterMaker.makeFilter(i-1,yearToPlot); currentPeriod.Month = i-1; books.add(createFileSeriesDataItem(currentMonth,currentPeriod, booksFileCounter)); games.add(createFileSeriesDataItem(currentMonth, currentPeriod, gamesFileCounter)); posts.add(createSeriesDataItem(currentMonth,currentPeriod,blogCounter)); devposts.add(createSeriesDataItem(currentMonth,currentPeriod, devCounter)); tasks.add(createFileSeriesDataItem(currentMonth,currentPeriod,tasksCounter)); } TimeSeriesCollection dataset = new TimeSeriesCollection(); dataset.addSeries(books); dataset.addSeries(games); dataset.addSeries(posts); dataset.addSeries(devposts); dataset.addSeries(tasks); JFreeChart chart = ChartFactory.createTimeSeriesChart( String.valueOf(yearToPlot), "Month", "Quantity", dataset, true, true, false ); String chartFilename = fileNameCreator.createName(yearToPlot); XYPlot plot = chart.getXYPlot(); NumberAxis yaxis = (NumberAxis) plot.getRangeAxis(); yaxis.setTickUnit(new NumberTickUnit(1)); DateAxis xaxis = (DateAxis) plot.getDomainAxis(); xaxis.setTickUnit(new DateTickUnit(DateTickUnitType.MONTH, 1)); xaxis.setDateFormatOverride(new SimpleDateFormat("MMM")); ChartUtilities.saveChartAsJPEG(new File(chartFilename), chart, 500, 300); }catch(IOException e) { System.out.println(e); } System.out.println("All green"); } else { this.usage(); } } private TimeSeriesDataItem createFileSeriesDataItem(Month currentMonth, PeriodToCount currentPeriod, Counter counter) { counter.setPeriodToCount(currentPeriod); return new TimeSeriesDataItem(currentMonth, counter.count()); } private TimeSeriesDataItem createSeriesDataItem(Month currentMonth, PeriodToCount currentPeriod, BlogCounter counter) { counter.setPeriodToCount(currentPeriod); return new TimeSeriesDataItem(currentMonth, counter.count()); } @Override public void usage() { System.out.println("For Year Plotter you must define:"); System.out.println("books filename as -b (argument) or booksfileName : (argument) inside txt file"); System.out.println("games filename as -g (argument) or gamesfileName : (argument) inside txt file"); System.out.println("You may want to specify"); System.out.println("Year to plot by -y (year) in command line"); } }
Removed unecessary method
src/pl/kofun/mavis/YearPlotter.java
Removed unecessary method
<ide><path>rc/pl/kofun/mavis/YearPlotter.java <ide> <ide> books.add(createFileSeriesDataItem(currentMonth,currentPeriod, booksFileCounter)); <ide> games.add(createFileSeriesDataItem(currentMonth, currentPeriod, gamesFileCounter)); <del> posts.add(createSeriesDataItem(currentMonth,currentPeriod,blogCounter)); <del> devposts.add(createSeriesDataItem(currentMonth,currentPeriod, devCounter)); <add> posts.add(createFileSeriesDataItem(currentMonth,currentPeriod,blogCounter)); <add> devposts.add(createFileSeriesDataItem(currentMonth,currentPeriod, devCounter)); <ide> tasks.add(createFileSeriesDataItem(currentMonth,currentPeriod,tasksCounter)); <ide> } <ide> <ide> counter.setPeriodToCount(currentPeriod); <ide> return new TimeSeriesDataItem(currentMonth, counter.count()); <ide> } <del> <del> private TimeSeriesDataItem createSeriesDataItem(Month currentMonth, PeriodToCount currentPeriod, BlogCounter counter) <del> { <del> counter.setPeriodToCount(currentPeriod); <del> return new TimeSeriesDataItem(currentMonth, counter.count()); <del> } <ide> <ide> @Override <ide> public void usage() {
Java
apache-2.0
014ee72126693f0d3b2cc5feac8e3b6ddf672aad
0
NitorCreations/willow,NitorCreations/willow,NitorCreations/willow,NitorCreations/willow,NitorCreations/willow
package com.nitorcreations.willow.auth; import java.util.TreeSet; import javax.inject.Singleton; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import org.apache.shiro.authc.AuthenticationToken; import org.apache.shiro.codec.Base64; import org.apache.shiro.web.filter.authc.BasicHttpAuthenticationFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Singleton public class PublicKeyAuthenticationFilter extends BasicHttpAuthenticationFilter { private static final Logger log = LoggerFactory.getLogger(PublicKeyAuthenticationFilter.class); private final TreeSet<Long> timeStamps = new TreeSet<>(); private static final String scheme = "PUBLICKEY"; public PublicKeyAuthenticationFilter() { this.setAuthcScheme(scheme); this.setAuthzScheme(scheme); } @Override protected AuthenticationToken createToken(ServletRequest request, ServletResponse response) { String authorizationHeader = getAuthzHeader(request); if (authorizationHeader == null || authorizationHeader.length() == 0) { return createToken("", null, null, request); } if (log.isDebugEnabled()) { log.debug("Attempting to execute login with headers [" + authorizationHeader + "]"); } String[] header = authorizationHeader.split("\\s+"); if (header.length < 3 || !scheme.equals(header[0].toUpperCase())) { return createToken("", null, null, request); } String[] unameNow = Base64.decodeToString(header[1]).split(":"); String username = unameNow[0]; long timestamp = Long.parseLong(unameNow[1]); long currentTime = System.currentTimeMillis(); long wStart = currentTime - 15000; long wEnd = currentTime + 15000; if (!((timestamp > wStart) && (timestamp < wEnd)) || timeStamps.contains(timestamp)) { return createToken("", null, null, request); } timeStamps.add(timestamp); while (timeStamps.first() < wStart) { timeStamps.pollFirst(); } PublicKeyAuthenticationToken token = createToken(username, Base64.decode(header[1]), Base64.decode(header[2]), request); for (int i=3; i<header.length;i++) { token.addSignature(Base64.decode(header[i])); } return token; } protected PublicKeyAuthenticationToken createToken(String username, byte[] sign, byte[] signature, ServletRequest request) { return new PublicKeyAuthenticationToken(username, sign, signature, getHost(request)); } }
willow-servers/src/main/java/com/nitorcreations/willow/auth/PublicKeyAuthenticationFilter.java
package com.nitorcreations.willow.auth; import javax.inject.Singleton; import javax.servlet.ServletRequest; import javax.servlet.ServletResponse; import org.apache.shiro.authc.AuthenticationToken; import org.apache.shiro.codec.Base64; import org.apache.shiro.web.filter.authc.BasicHttpAuthenticationFilter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Singleton public class PublicKeyAuthenticationFilter extends BasicHttpAuthenticationFilter { private static final Logger log = LoggerFactory.getLogger(PublicKeyAuthenticationFilter.class); private static final String scheme = "PUBLICKEY"; public PublicKeyAuthenticationFilter() { this.setAuthcScheme(scheme); this.setAuthzScheme(scheme); } @Override protected AuthenticationToken createToken(ServletRequest request, ServletResponse response) { String authorizationHeader = getAuthzHeader(request); if (authorizationHeader == null || authorizationHeader.length() == 0) { return createToken("", null, null, request); } if (log.isDebugEnabled()) { log.debug("Attempting to execute login with headers [" + authorizationHeader + "]"); } String[] header = authorizationHeader.split("\\s+"); if (header.length < 3 || !scheme.equals(header[0].toUpperCase())) { return createToken("", null, null, request); } String unameNow = Base64.decodeToString(header[1]); String username = unameNow.split(":")[0]; PublicKeyAuthenticationToken token = createToken(username, Base64.decode(header[1]), Base64.decode(header[2]), request); for (int i=3; i<header.length;i++) { token.addSignature(Base64.decode(header[i])); } return token; } protected PublicKeyAuthenticationToken createToken(String username, byte[] sign, byte[] signature, ServletRequest request) { return new PublicKeyAuthenticationToken(username, sign, signature, getHost(request)); } }
Add check for timestamp - needs to be in 30s window from now (+-15s) and needs to not have seen before
willow-servers/src/main/java/com/nitorcreations/willow/auth/PublicKeyAuthenticationFilter.java
Add check for timestamp - needs to be in 30s window from now (+-15s) and needs to not have seen before
<ide><path>illow-servers/src/main/java/com/nitorcreations/willow/auth/PublicKeyAuthenticationFilter.java <ide> package com.nitorcreations.willow.auth; <add> <add>import java.util.TreeSet; <ide> <ide> import javax.inject.Singleton; <ide> import javax.servlet.ServletRequest; <ide> @Singleton <ide> public class PublicKeyAuthenticationFilter extends BasicHttpAuthenticationFilter { <ide> private static final Logger log = LoggerFactory.getLogger(PublicKeyAuthenticationFilter.class); <del> <add> private final TreeSet<Long> timeStamps = new TreeSet<>(); <ide> private static final String scheme = "PUBLICKEY"; <ide> public PublicKeyAuthenticationFilter() { <ide> this.setAuthcScheme(scheme); <ide> if (header.length < 3 || !scheme.equals(header[0].toUpperCase())) { <ide> return createToken("", null, null, request); <ide> } <del> String unameNow = Base64.decodeToString(header[1]); <del> String username = unameNow.split(":")[0]; <add> String[] unameNow = Base64.decodeToString(header[1]).split(":"); <add> String username = unameNow[0]; <add> long timestamp = Long.parseLong(unameNow[1]); <add> long currentTime = System.currentTimeMillis(); <add> long wStart = currentTime - 15000; <add> long wEnd = currentTime + 15000; <add> if (!((timestamp > wStart) && (timestamp < wEnd)) || timeStamps.contains(timestamp)) { <add> return createToken("", null, null, request); <add> } <add> timeStamps.add(timestamp); <add> while (timeStamps.first() < wStart) { <add> timeStamps.pollFirst(); <add> } <ide> PublicKeyAuthenticationToken token = createToken(username, Base64.decode(header[1]), Base64.decode(header[2]), request); <ide> for (int i=3; i<header.length;i++) { <ide> token.addSignature(Base64.decode(header[i]));
Java
agpl-3.0
6e9bbbffcdcc69b7ea467da84493b0fc182e6a36
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
3f5b6d18-2e61-11e5-9284-b827eb9e62be
hello.java
3f55dc0e-2e61-11e5-9284-b827eb9e62be
3f5b6d18-2e61-11e5-9284-b827eb9e62be
hello.java
3f5b6d18-2e61-11e5-9284-b827eb9e62be
<ide><path>ello.java <del>3f55dc0e-2e61-11e5-9284-b827eb9e62be <add>3f5b6d18-2e61-11e5-9284-b827eb9e62be
Java
agpl-3.0
7f65e1dcef8f9f11365cde54317f5fc57c4107ad
0
paulmartel/voltdb,kobronson/cs-voltdb,VoltDB/voltdb,paulmartel/voltdb,kobronson/cs-voltdb,kumarrus/voltdb,flybird119/voltdb,ingted/voltdb,ingted/voltdb,zuowang/voltdb,kumarrus/voltdb,migue/voltdb,zuowang/voltdb,ingted/voltdb,paulmartel/voltdb,migue/voltdb,paulmartel/voltdb,kumarrus/voltdb,flybird119/voltdb,creative-quant/voltdb,wolffcm/voltdb,zuowang/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,VoltDB/voltdb,VoltDB/voltdb,paulmartel/voltdb,migue/voltdb,migue/voltdb,zuowang/voltdb,zuowang/voltdb,flybird119/voltdb,flybird119/voltdb,wolffcm/voltdb,flybird119/voltdb,creative-quant/voltdb,creative-quant/voltdb,ingted/voltdb,deerwalk/voltdb,migue/voltdb,wolffcm/voltdb,paulmartel/voltdb,zuowang/voltdb,ingted/voltdb,flybird119/voltdb,flybird119/voltdb,wolffcm/voltdb,kobronson/cs-voltdb,kumarrus/voltdb,wolffcm/voltdb,migue/voltdb,kumarrus/voltdb,kobronson/cs-voltdb,kobronson/cs-voltdb,creative-quant/voltdb,deerwalk/voltdb,VoltDB/voltdb,ingted/voltdb,wolffcm/voltdb,deerwalk/voltdb,simonzhangsm/voltdb,creative-quant/voltdb,simonzhangsm/voltdb,simonzhangsm/voltdb,zuowang/voltdb,kumarrus/voltdb,deerwalk/voltdb,deerwalk/voltdb,wolffcm/voltdb,paulmartel/voltdb,kumarrus/voltdb,ingted/voltdb,flybird119/voltdb,simonzhangsm/voltdb,ingted/voltdb,creative-quant/voltdb,paulmartel/voltdb,simonzhangsm/voltdb,kobronson/cs-voltdb,deerwalk/voltdb,kumarrus/voltdb,kobronson/cs-voltdb,migue/voltdb,deerwalk/voltdb,creative-quant/voltdb,deerwalk/voltdb,VoltDB/voltdb,wolffcm/voltdb,VoltDB/voltdb,zuowang/voltdb,migue/voltdb,creative-quant/voltdb,simonzhangsm/voltdb,kobronson/cs-voltdb,VoltDB/voltdb
/* This file is part of VoltDB. * Copyright (C) 2008-2013 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.iv2; import java.util.ArrayList; import java.util.concurrent.TimeUnit; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.LinkedBlockingDeque; import org.voltcore.logging.VoltLogger; import org.voltcore.messaging.Mailbox; import org.voltcore.messaging.TransactionInfoBaseMessage; import org.voltcore.utils.CoreUtils; import org.voltdb.messaging.DumpMessage; import org.voltdb.SiteProcedureConnection; import org.voltdb.StoredProcedureInvocation; import org.voltdb.VoltTable; import org.voltdb.client.ProcedureInvocationType; import org.voltdb.dtxn.TransactionState; import org.voltdb.messaging.BorrowTaskMessage; import org.voltdb.messaging.FragmentResponseMessage; import org.voltdb.messaging.FragmentTaskMessage; import org.voltdb.messaging.Iv2InitiateTaskMessage; public class MpTransactionState extends TransactionState { static VoltLogger tmLog = new VoltLogger("TM"); /** * This is thrown by the TransactionState instance when something * goes wrong mid-fragment, and execution needs to back all the way * out to the stored procedure call. */ // IZZY Consolidate me with MultiPartitionParticipantTransactionState // and perhaps make me more descriptive public static class FragmentFailureException extends RuntimeException { private static final long serialVersionUID = 1L; } final Iv2InitiateTaskMessage m_task; LinkedBlockingDeque<FragmentResponseMessage> m_newDeps = new LinkedBlockingDeque<FragmentResponseMessage>(); Map<Integer, Set<Long>> m_remoteDeps; Map<Integer, List<VoltTable>> m_remoteDepTables = new HashMap<Integer, List<VoltTable>>(); final List<Long> m_useHSIds = new ArrayList<Long>(); long m_buddyHSId; FragmentTaskMessage m_remoteWork = null; FragmentTaskMessage m_localWork = null; boolean m_haveDistributedInitTask = false; boolean m_isRestart = false; MpTransactionState(Mailbox mailbox, TransactionInfoBaseMessage notice, List<Long> useHSIds, long buddyHSId, boolean isRestart) { super(mailbox, notice); m_task = (Iv2InitiateTaskMessage)notice; m_useHSIds.addAll(useHSIds); m_buddyHSId = buddyHSId; m_isRestart = isRestart; } public void updateMasters(List<Long> masters) { m_useHSIds.clear(); m_useHSIds.addAll(masters); } /** * Used to reset the internal state of this transaction so it can be successfully restarted */ void restart() { // The poisoning path will, unfortunately, set this to true. Need to undo that. m_needsRollback = false; // Also need to make sure that we get the original invocation in the first fragment // since some masters may not have seen it. m_haveDistributedInitTask = false; m_isRestart = true; } @Override public boolean isSinglePartition() { return false; } @Override public boolean isCoordinator() { return true; } @Override public boolean isBlocked() { // Not clear this method is useful in the new world? return false; } @Override public boolean hasTransactionalWork() { return false; } @Override public boolean doWork(boolean recovering) { return false; } @Override public StoredProcedureInvocation getInvocation() { return m_task.getStoredProcedureInvocation(); } @Override public void handleSiteFaults(HashSet<Long> failedSites) { } // Overrides needed by MpProcedureRunner @Override public void setupProcedureResume(boolean isFinal, int[] dependencies) { // Reset state so we can run this batch cleanly m_localWork = null; m_remoteWork = null; m_remoteDeps = null; m_remoteDepTables.clear(); } // I met this List at bandcamp... public void setupProcedureResume(boolean isFinal, List<Integer> deps) { setupProcedureResume(isFinal, com.google.common.primitives.Ints.toArray(deps)); } @Override public void createLocalFragmentWork(FragmentTaskMessage task, boolean nonTransactional) { m_localWork = task; m_localWork.setTruncationHandle(m_task.getTruncationHandle()); } @Override public void createAllParticipatingFragmentWork(FragmentTaskMessage task) { // Don't generate remote work or dependency tracking or anything if // there are no fragments to be done in this message // At some point maybe ProcedureRunner.slowPath() can get smarter if (task.getFragmentCount() > 0) { // Distribute the initiate task for command log replay. // Command log must log the initiate task; // Only send the fragment once. if (!m_haveDistributedInitTask && !isForReplay() && !isReadOnly()) { m_haveDistributedInitTask = true; task.setInitiateTask((Iv2InitiateTaskMessage)getNotice()); } if (m_task.getStoredProcedureInvocation().getType() == ProcedureInvocationType.REPLICATED) { task.setOriginalTxnId(m_task.getStoredProcedureInvocation().getOriginalTxnId()); } m_remoteWork = task; m_remoteWork.setTruncationHandle(m_task.getTruncationHandle()); // Distribute fragments to remote destinations. long[] non_local_hsids = new long[m_useHSIds.size()]; for (int i = 0; i < m_useHSIds.size(); i++) { non_local_hsids[i] = m_useHSIds.get(i); } // send to all non-local sites if (non_local_hsids.length > 0) { m_mbox.send(non_local_hsids, m_remoteWork); } } else { m_remoteWork = null; } } private Map<Integer, Set<Long>> createTrackedDependenciesFromTask(FragmentTaskMessage task, List<Long> expectedHSIds) { Map<Integer, Set<Long>> depMap = new HashMap<Integer, Set<Long>>(); for (int i = 0; i < task.getFragmentCount(); i++) { int dep = task.getOutputDepId(i); Set<Long> scoreboard = new HashSet<Long>(); depMap.put(dep, scoreboard); for (long hsid : expectedHSIds) { scoreboard.add(hsid); } } return depMap; } @Override public Map<Integer, List<VoltTable>> recursableRun(SiteProcedureConnection siteConnection) { // if we're restarting this transaction, and we only have local work, add some dummy // remote work so that we can avoid injecting a borrow task into the local buddy site // before the CompleteTransactionMessage with the restart flag reaches it. // Right now, any read on a replicated table which has no distributed work will // generate these null fragments in the restarted transaction. boolean usedNullFragment = false; if (m_isRestart && m_remoteWork == null) { usedNullFragment = true; m_remoteWork = new FragmentTaskMessage(m_localWork.getInitiatorHSId(), m_localWork.getCoordinatorHSId(), m_localWork.getTxnId(), m_localWork.getUniqueId(), m_localWork.isReadOnly(), false, false); m_remoteWork.setEmptyForRestart(getNextDependencyId()); if (!m_haveDistributedInitTask && !isForReplay() && !isReadOnly()) { m_haveDistributedInitTask = true; m_remoteWork.setInitiateTask((Iv2InitiateTaskMessage)getNotice()); } // Distribute fragments to remote destinations. long[] non_local_hsids = new long[m_useHSIds.size()]; for (int i = 0; i < m_useHSIds.size(); i++) { non_local_hsids[i] = m_useHSIds.get(i); } // send to all non-local sites if (non_local_hsids.length > 0) { m_mbox.send(non_local_hsids, m_remoteWork); } } // Do distributed fragments, if any if (m_remoteWork != null) { // Create some record of expected dependencies for tracking m_remoteDeps = createTrackedDependenciesFromTask(m_remoteWork, m_useHSIds); // if there are remote deps, block on them // FragmentResponses indicating failure will throw an exception // which will propagate out of handleReceivedFragResponse and // cause ProcedureRunner to do the right thing and cause rollback. while (!checkDoneReceivingFragResponses()) { FragmentResponseMessage msg = pollForResponses(); handleReceivedFragResponse(msg); } } // satisified. Clear this defensively. Procedure runner is sloppy with // cleaning up if it decides new work is necessary that is local-only. m_remoteWork = null; BorrowTaskMessage borrowmsg = new BorrowTaskMessage(m_localWork); m_localWork.m_sourceHSId = m_mbox.getHSId(); // if we created a bogus fragment to distribute to serialize restart and borrow tasks, // don't include the empty dependencies we got back in the borrow fragment. if (!usedNullFragment) { borrowmsg.addInputDepMap(m_remoteDepTables); } m_mbox.send(m_buddyHSId, borrowmsg); FragmentResponseMessage msg = pollForResponses(); m_localWork = null; // Build results from the FragmentResponseMessage // This is similar to dependency tracking...maybe some // sane way to merge it Map<Integer, List<VoltTable>> results = new HashMap<Integer, List<VoltTable>>(); for (int i = 0; i < msg.getTableCount(); i++) { int this_depId = msg.getTableDependencyIdAtIndex(i); VoltTable this_dep = msg.getTableAtIndex(i); List<VoltTable> tables = results.get(this_depId); if (tables == null) { tables = new ArrayList<VoltTable>(); results.put(this_depId, tables); } tables.add(this_dep); } // Need some sanity check that we got all of the expected output dependencies? return results; } private FragmentResponseMessage pollForResponses() { FragmentResponseMessage msg = null; try { while (msg == null) { msg = m_newDeps.poll(60L * 5, TimeUnit.SECONDS); if (msg == null) { tmLog.warn("Possible multipartition transaction deadlock detected for: " + m_task); if (m_remoteWork == null) { tmLog.warn("Waiting on local BorrowTask response from site: " + CoreUtils.hsIdToString(m_buddyHSId)); } else { tmLog.warn("Waiting on remote dependencies: "); for (Entry<Integer, Set<Long>> e : m_remoteDeps.entrySet()) { tmLog.warn("Dep ID: " + e.getKey() + " waiting on: " + CoreUtils.hsIdCollectionToString(e.getValue())); } } m_mbox.send(com.google.common.primitives.Longs.toArray(m_useHSIds), new DumpMessage()); } } } catch (InterruptedException e) { // can't leave yet - the transaction is inconsistent. // could retry; but this is unexpected. Crash. throw new RuntimeException(e); } if (msg.getStatusCode() != FragmentResponseMessage.SUCCESS) { m_needsRollback = true; if (msg.getException() != null) { throw msg.getException(); } else { throw new FragmentFailureException(); } } return msg; } private void trackDependency(long hsid, int depId, VoltTable table) { // Remove the distributed fragment for this site from remoteDeps // for the dependency Id depId. Set<Long> localRemotes = m_remoteDeps.get(depId); if (localRemotes == null && m_isRestart) { // Tolerate weird deps showing up on restart // After Ariel separates unique ID from transaction ID, rewrite restart to restart with // a new transaction ID and make this and the fake distributed fragment stuff go away. return; } Object needed = localRemotes.remove(hsid); if (needed != null) { // add table to storage List<VoltTable> tables = m_remoteDepTables.get(depId); if (tables == null) { tables = new ArrayList<VoltTable>(); m_remoteDepTables.put(depId, tables); } tables.add(table); } else { System.out.println("No remote dep for local site: " + hsid); } } private void handleReceivedFragResponse(FragmentResponseMessage msg) { for (int i = 0; i < msg.getTableCount(); i++) { int this_depId = msg.getTableDependencyIdAtIndex(i); VoltTable this_dep = msg.getTableAtIndex(i); long src_hsid = msg.getExecutorSiteId(); trackDependency(src_hsid, this_depId, this_dep); } } private boolean checkDoneReceivingFragResponses() { boolean done = true; for (Set<Long> depid : m_remoteDeps.values()) { if (depid.size() != 0) { done = false; } } return done; } // Runs from Mailbox's network thread public void offerReceivedFragmentResponse(FragmentResponseMessage message) { // push into threadsafe queue m_newDeps.offer(message); } /** * Kill a transaction - maybe shutdown mid-transaction? Or a timeout * collecting fragments? This is a don't-know-what-to-do-yet * stub. * TODO: fix this. */ void terminateTransaction() { throw new RuntimeException("terminateTransaction is not yet implemented."); } }
src/frontend/org/voltdb/iv2/MpTransactionState.java
/* This file is part of VoltDB. * Copyright (C) 2008-2013 VoltDB Inc. * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with VoltDB. If not, see <http://www.gnu.org/licenses/>. */ package org.voltdb.iv2; import java.util.ArrayList; import java.util.concurrent.TimeUnit; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.LinkedBlockingDeque; import org.voltcore.logging.VoltLogger; import org.voltcore.messaging.Mailbox; import org.voltcore.messaging.TransactionInfoBaseMessage; import org.voltcore.utils.CoreUtils; import org.voltdb.messaging.DumpMessage; import org.voltdb.SiteProcedureConnection; import org.voltdb.StoredProcedureInvocation; import org.voltdb.VoltTable; import org.voltdb.client.ProcedureInvocationType; import org.voltdb.dtxn.TransactionState; import org.voltdb.messaging.BorrowTaskMessage; import org.voltdb.messaging.FragmentResponseMessage; import org.voltdb.messaging.FragmentTaskMessage; import org.voltdb.messaging.Iv2InitiateTaskMessage; public class MpTransactionState extends TransactionState { static VoltLogger tmLog = new VoltLogger("TM"); /** * This is thrown by the TransactionState instance when something * goes wrong mid-fragment, and execution needs to back all the way * out to the stored procedure call. */ // IZZY Consolidate me with MultiPartitionParticipantTransactionState // and perhaps make me more descriptive public static class FragmentFailureException extends RuntimeException { private static final long serialVersionUID = 1L; } final Iv2InitiateTaskMessage m_task; LinkedBlockingDeque<FragmentResponseMessage> m_newDeps = new LinkedBlockingDeque<FragmentResponseMessage>(); Map<Integer, Set<Long>> m_remoteDeps; Map<Integer, List<VoltTable>> m_remoteDepTables = new HashMap<Integer, List<VoltTable>>(); final List<Long> m_useHSIds = new ArrayList<Long>(); long m_buddyHSId; FragmentTaskMessage m_remoteWork = null; FragmentTaskMessage m_localWork = null; boolean m_haveDistributedInitTask = false; boolean m_isRestart = false; MpTransactionState(Mailbox mailbox, TransactionInfoBaseMessage notice, List<Long> useHSIds, long buddyHSId, boolean isRestart) { super(mailbox, notice); m_task = (Iv2InitiateTaskMessage)notice; m_useHSIds.addAll(useHSIds); m_buddyHSId = buddyHSId; m_isRestart = isRestart; } public void updateMasters(List<Long> masters) { m_useHSIds.clear(); m_useHSIds.addAll(masters); } /** * Used to reset the internal state of this transaction so it can be successfully restarted */ void restart() { // The poisoning path will, unfortunately, set this to true. Need to undo that. m_needsRollback = false; // Also need to make sure that we get the original invocation in the first fragment // since some masters may not have seen it. m_haveDistributedInitTask = false; m_isRestart = true; } @Override public boolean isSinglePartition() { return false; } @Override public boolean isCoordinator() { return true; } @Override public boolean isBlocked() { // Not clear this method is useful in the new world? return false; } @Override public boolean hasTransactionalWork() { return false; } @Override public boolean doWork(boolean recovering) { return false; } @Override public StoredProcedureInvocation getInvocation() { return m_task.getStoredProcedureInvocation(); } @Override public void handleSiteFaults(HashSet<Long> failedSites) { } // Overrides needed by MpProcedureRunner @Override public void setupProcedureResume(boolean isFinal, int[] dependencies) { // Reset state so we can run this batch cleanly m_localWork = null; m_remoteWork = null; m_remoteDeps = null; m_remoteDepTables.clear(); } // I met this List at bandcamp... public void setupProcedureResume(boolean isFinal, List<Integer> deps) { setupProcedureResume(isFinal, com.google.common.primitives.Ints.toArray(deps)); } @Override public void createLocalFragmentWork(FragmentTaskMessage task, boolean nonTransactional) { m_localWork = task; m_localWork.setTruncationHandle(m_task.getTruncationHandle()); } @Override public void createAllParticipatingFragmentWork(FragmentTaskMessage task) { // Don't generate remote work or dependency tracking or anything if // there are no fragments to be done in this message // At some point maybe ProcedureRunner.slowPath() can get smarter if (task.getFragmentCount() > 0) { // Distribute the initiate task for command log replay. // Command log must log the initiate task; // Only send the fragment once. if (!m_haveDistributedInitTask && !isForReplay() && !isReadOnly()) { m_haveDistributedInitTask = true; task.setInitiateTask((Iv2InitiateTaskMessage)getNotice()); } if (m_task.getStoredProcedureInvocation().getType() == ProcedureInvocationType.REPLICATED) { task.setOriginalTxnId(m_task.getStoredProcedureInvocation().getOriginalTxnId()); } m_remoteWork = task; m_remoteWork.setTruncationHandle(m_task.getTruncationHandle()); // Distribute fragments to remote destinations. long[] non_local_hsids = new long[m_useHSIds.size()]; for (int i = 0; i < m_useHSIds.size(); i++) { non_local_hsids[i] = m_useHSIds.get(i); } // send to all non-local sites if (non_local_hsids.length > 0) { m_mbox.send(non_local_hsids, m_remoteWork); } } else { m_remoteWork = null; } } private Map<Integer, Set<Long>> createTrackedDependenciesFromTask(FragmentTaskMessage task, List<Long> expectedHSIds) { Map<Integer, Set<Long>> depMap = new HashMap<Integer, Set<Long>>(); for (int i = 0; i < task.getFragmentCount(); i++) { int dep = task.getOutputDepId(i); Set<Long> scoreboard = new HashSet<Long>(); depMap.put(dep, scoreboard); for (long hsid : expectedHSIds) { scoreboard.add(hsid); } } return depMap; } @Override public Map<Integer, List<VoltTable>> recursableRun(SiteProcedureConnection siteConnection) { // if we're restarting this transaction, and we only have local work, add some dummy // remote work so that we can avoid injecting a borrow task into the local buddy site // before the CompleteTransactionMessage with the restart flag reaches it. // Right now, any read on a replicated table which has no distributed work will // generate these null fragments in the restarted transaction. boolean usedNullFragment = false; if (m_isRestart && m_remoteWork == null) { usedNullFragment = true; m_remoteWork = new FragmentTaskMessage(m_localWork.getInitiatorHSId(), m_localWork.getCoordinatorHSId(), m_localWork.getTxnId(), m_localWork.getUniqueId(), m_localWork.isReadOnly(), false, false); m_remoteWork.setEmptyForRestart(getNextDependencyId()); if (!m_haveDistributedInitTask && !isForReplay() && !isReadOnly()) { m_haveDistributedInitTask = true; m_remoteWork.setInitiateTask((Iv2InitiateTaskMessage)getNotice()); } // Distribute fragments to remote destinations. long[] non_local_hsids = new long[m_useHSIds.size()]; for (int i = 0; i < m_useHSIds.size(); i++) { non_local_hsids[i] = m_useHSIds.get(i); } // send to all non-local sites if (non_local_hsids.length > 0) { m_mbox.send(non_local_hsids, m_remoteWork); } } // Do distributed fragments, if any if (m_remoteWork != null) { // Create some record of expected dependencies for tracking m_remoteDeps = createTrackedDependenciesFromTask(m_remoteWork, m_useHSIds); // if there are remote deps, block on them // FragmentResponses indicating failure will throw an exception // which will propagate out of handleReceivedFragResponse and // cause ProcedureRunner to do the right thing and cause rollback. while (!checkDoneReceivingFragResponses()) { FragmentResponseMessage msg = pollForResponses(); handleReceivedFragResponse(msg); } } // satisified. Clear this defensively. Procedure runner is sloppy with // cleaning up if it decides new work is necessary that is local-only. m_remoteWork = null; BorrowTaskMessage borrowmsg = new BorrowTaskMessage(m_localWork); m_localWork.m_sourceHSId = m_mbox.getHSId(); // if we created a bogus fragment to distribute to serialize restart and borrow tasks, // don't include the empty dependencies we got back in the borrow fragment. if (!usedNullFragment) { borrowmsg.addInputDepMap(m_remoteDepTables); } m_mbox.send(m_buddyHSId, borrowmsg); FragmentResponseMessage msg = pollForResponses(); m_localWork = null; // Build results from the FragmentResponseMessage // This is similar to dependency tracking...maybe some // sane way to merge it Map<Integer, List<VoltTable>> results = new HashMap<Integer, List<VoltTable>>(); for (int i = 0; i < msg.getTableCount(); i++) { int this_depId = msg.getTableDependencyIdAtIndex(i); VoltTable this_dep = msg.getTableAtIndex(i); List<VoltTable> tables = results.get(this_depId); if (tables == null) { tables = new ArrayList<VoltTable>(); results.put(this_depId, tables); } tables.add(this_dep); } // Need some sanity check that we got all of the expected output dependencies? return results; } private FragmentResponseMessage pollForResponses() { FragmentResponseMessage msg = null; try { while (msg == null) { msg = m_newDeps.poll(60L, TimeUnit.SECONDS); if (msg == null) { tmLog.warn("Possible multipartition transaction deadlock detected for: " + m_task); if (m_remoteWork == null) { tmLog.warn("Waiting on local BorrowTask response from site: " + CoreUtils.hsIdToString(m_buddyHSId)); } else { tmLog.warn("Waiting on remote dependencies: "); for (Entry<Integer, Set<Long>> e : m_remoteDeps.entrySet()) { tmLog.warn("Dep ID: " + e.getKey() + " waiting on: " + CoreUtils.hsIdCollectionToString(e.getValue())); } } m_mbox.send(com.google.common.primitives.Longs.toArray(m_useHSIds), new DumpMessage()); } } } catch (InterruptedException e) { // can't leave yet - the transaction is inconsistent. // could retry; but this is unexpected. Crash. throw new RuntimeException(e); } if (msg.getStatusCode() != FragmentResponseMessage.SUCCESS) { m_needsRollback = true; if (msg.getException() != null) { throw msg.getException(); } else { throw new FragmentFailureException(); } } return msg; } private void trackDependency(long hsid, int depId, VoltTable table) { // Remove the distributed fragment for this site from remoteDeps // for the dependency Id depId. Set<Long> localRemotes = m_remoteDeps.get(depId); if (localRemotes == null && m_isRestart) { // Tolerate weird deps showing up on restart // After Ariel separates unique ID from transaction ID, rewrite restart to restart with // a new transaction ID and make this and the fake distributed fragment stuff go away. return; } Object needed = localRemotes.remove(hsid); if (needed != null) { // add table to storage List<VoltTable> tables = m_remoteDepTables.get(depId); if (tables == null) { tables = new ArrayList<VoltTable>(); m_remoteDepTables.put(depId, tables); } tables.add(table); } else { System.out.println("No remote dep for local site: " + hsid); } } private void handleReceivedFragResponse(FragmentResponseMessage msg) { for (int i = 0; i < msg.getTableCount(); i++) { int this_depId = msg.getTableDependencyIdAtIndex(i); VoltTable this_dep = msg.getTableAtIndex(i); long src_hsid = msg.getExecutorSiteId(); trackDependency(src_hsid, this_depId, this_dep); } } private boolean checkDoneReceivingFragResponses() { boolean done = true; for (Set<Long> depid : m_remoteDeps.values()) { if (depid.size() != 0) { done = false; } } return done; } // Runs from Mailbox's network thread public void offerReceivedFragmentResponse(FragmentResponseMessage message) { // push into threadsafe queue m_newDeps.offer(message); } /** * Kill a transaction - maybe shutdown mid-transaction? Or a timeout * collecting fragments? This is a don't-know-what-to-do-yet * stub. * TODO: fix this. */ void terminateTransaction() { throw new RuntimeException("terminateTransaction is not yet implemented."); } }
ENG-4185: Increase the MP deadlock notification timeout to 5 minutes.
src/frontend/org/voltdb/iv2/MpTransactionState.java
ENG-4185: Increase the MP deadlock notification timeout to 5 minutes.
<ide><path>rc/frontend/org/voltdb/iv2/MpTransactionState.java <ide> FragmentResponseMessage msg = null; <ide> try { <ide> while (msg == null) { <del> msg = m_newDeps.poll(60L, TimeUnit.SECONDS); <add> msg = m_newDeps.poll(60L * 5, TimeUnit.SECONDS); <ide> if (msg == null) { <ide> tmLog.warn("Possible multipartition transaction deadlock detected for: " + m_task); <ide> if (m_remoteWork == null) {
Java
mit
9e75b56ff11eefa358b8cfee518289252b4e41ed
0
modwizcode/SpongeAPI,joshgarde/SpongeAPI,ryantheleach/SpongeAPI,gabizou/SpongeAPI,boomboompower/SpongeAPI,jamierocks/SpongeAPI,Kiskae/SpongeAPI,joshgarde/SpongeAPI,gabizou/SpongeAPI,modwizcode/SpongeAPI,jamierocks/SpongeAPI,ryantheleach/SpongeAPI,SpongeHistory/SpongeAPI-History,DDoS/SpongeAPI,SpongePowered/SpongeAPI,SpongeHistory/SpongeAPI-History,kashike/SpongeAPI,kenzierocks/SpongeAPI,caseif/SpongeAPI,frogocomics/SpongeAPI,jonk1993/SpongeAPI,AlphaModder/SpongeAPI,SpongePowered/SpongeAPI,Kiskae/SpongeAPI,JBYoshi/SpongeAPI,boomboompower/SpongeAPI,Lergin/SpongeAPI,kashike/SpongeAPI,DDoS/SpongeAPI,caseif/SpongeAPI,jonk1993/SpongeAPI,SpongePowered/SpongeAPI,frogocomics/SpongeAPI,kenzierocks/SpongeAPI,AlphaModder/SpongeAPI,Lergin/SpongeAPI,JBYoshi/SpongeAPI
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered.org <http://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.util.command; import org.spongepowered.api.text.message.Message; /** * Something that can execute commands. * * <p>Examples of potential implementations include players, the server console, * RCON clients, web-based clients, command blocks, and so on.</p> */ public interface CommandSource { /** * Sends the plain text message(s) to source when possible. * <p>Use {@link #sendMessage(Message...)} for a formatted message.</p> * * @param messages The message(s) */ void sendMessage(String... messages); /** * Sends the formatted text message(s) to source when possible. If text formatting * is not supported in the implementation it will be displayed as plain text. * * @param messages The message(s) */ void sendMessage(Message<?>... messages); /** * Sends the formatted text message(s) to source when possible. If text formatting * is not supported in the implementation it will be displayed as plain text. * * @param messages The messages */ void sendMessage(Iterable<Message<?>> messages); }
src/main/java/org/spongepowered/api/util/command/CommandSource.java
/* * This file is part of Sponge, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered.org <http://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.util.command; import org.spongepowered.api.text.message.Message; /** * Something that can execute commands. * * <p>Examples of potential implementations include players, the server console, * RCON clients, web-based clients, command blocks, and so on.</p> */ public interface CommandSource { /** * Sends the plain text message(s) to source when possible. * <p>Use {@link #sendMessage(Message...)} for a formatted message.</p> * * @param messages The message(s) */ void sendMessage(String... messages); /** * Sends the formatted text message(s) to source when possible. If text formatting * is not supported in the implementation it will be displayed as plain text. * * @param messages The message(s) */ void sendMessage(Message<?>... messages); /** * Sends the formatted text message(s) to source when possible. If text formatting * is not supported in the implementation it will be displayed as plain text. * * @param messages The messages */ void sendMessage(Iterable<Message<?>>... messages); }
Don't use var args for Iterable in sendMessage
src/main/java/org/spongepowered/api/util/command/CommandSource.java
Don't use var args for Iterable in sendMessage
<ide><path>rc/main/java/org/spongepowered/api/util/command/CommandSource.java <ide> * <ide> * @param messages The messages <ide> */ <del> void sendMessage(Iterable<Message<?>>... messages); <add> void sendMessage(Iterable<Message<?>> messages); <ide> <ide> }
Java
apache-2.0
0d356a2ed63bac305df7cde4898f143e97a88dc6
0
gavanx/pdflearn,veraPDF/veraPDF-pdfbox,BezrukovM/veraPDF-pdfbox,joansmith/pdfbox,mdamt/pdfbox,mathieufortin01/pdfbox,gavanx/pdflearn,torakiki/sambox,ChunghwaTelecom/pdfbox,ChunghwaTelecom/pdfbox,BezrukovM/veraPDF-pdfbox,mathieufortin01/pdfbox,ZhenyaM/veraPDF-pdfbox,joansmith/pdfbox,benmccann/pdfbox,veraPDF/veraPDF-pdfbox,torakiki/sambox,benmccann/pdfbox,ZhenyaM/veraPDF-pdfbox,mdamt/pdfbox
/***************************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ****************************************************************************/ package org.apache.pdfbox.preflight.font.util; import java.awt.Image; import java.io.IOException; import java.util.List; import org.apache.fontbox.util.BoundingBox; import org.apache.pdfbox.cos.COSFloat; import org.apache.pdfbox.cos.COSInteger; import org.apache.pdfbox.cos.COSNumber; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.font.PDType3CharProc; import org.apache.pdfbox.pdmodel.graphics.image.PDInlineImage; import org.apache.pdfbox.preflight.PreflightContext; import org.apache.pdfbox.preflight.content.PreflightStreamEngine; import org.apache.pdfbox.contentstream.operator.Operator; /** * This class is used to parse a glyph of a Type3 font program. If the glyph is parsed without error, the width of the * glyph is accessible through the getWidth method. */ public class PreflightType3Stream extends PreflightStreamEngine { private final PDType3CharProc charProc; private boolean firstOperator = true; private float width = 0; private PDInlineImage image = null; private BoundingBox box = null; public PreflightType3Stream(PreflightContext context, PDPage page, PDType3CharProc charProc) { super(context, page); this.charProc = charProc; } public void showType3Character(PDType3CharProc charProc) throws IOException { processChildStream(charProc, new PDPage()); // dummy page (resource lookup may fail) } /** * This will parse a type3 stream and create an image from it. * * @return The image that was created. * * @throws IOException * If there is an error processing the stream. */ public Image createImage() throws IOException { showType3Character(charProc); return image.getImage(); } /** * This is used to handle an operation. * * @param operator * The operation to perform. * @param operands * The list of arguments. * * @throws IOException * If there is an error processing the operation. */ protected void processOperator(Operator operator, List operands) throws IOException { super.processOperator(operator, operands); String operation = operator.getName(); if (operation.equals("BI")) { image = new PDInlineImage(operator.getImageParameters(), operator.getImageData(), getResources()); validateImageFilter(operator); validateImageColorSpace(operator); } if (operation.equals("d0")) { checkType3FirstOperator(operands); } else if (operation.equals("d1")) { COSNumber llx = (COSNumber) operands.get(2); COSNumber lly = (COSNumber) operands.get(3); COSNumber urx = (COSNumber) operands.get(4); COSNumber ury = (COSNumber) operands.get(5); box = new BoundingBox(); box.setLowerLeftX(llx.floatValue()); box.setLowerLeftY(lly.floatValue()); box.setUpperRightX(urx.floatValue()); box.setUpperRightY(ury.floatValue()); checkType3FirstOperator(operands); } checkColorOperators(operation); validateRenderingIntent(operator, operands); checkSetColorSpaceOperators(operator, operands); validateNumberOfGraphicStates(operator); firstOperator = false; } /** * According to the PDF Reference, the first operator in a CharProc of a Type3 font must be "d0" or "d1". This * method process this validation. This method is called by the processOperator method. * * @param arguments * @throws IOException */ private void checkType3FirstOperator(List arguments) throws IOException { if (!firstOperator) { throw new IOException("Type3 CharProc : First operator must be d0 or d1"); } Object obj = arguments.get(0); if (obj instanceof Number) { width = ((Number) obj).intValue(); } else if (obj instanceof COSNumber) { width = ((COSNumber) obj).floatValue(); } else { throw new IOException("Unexpected argument type. Expected : COSInteger or Number / Received : " + obj.getClass().getName()); } } /** * @return the width of the CharProc glyph description */ public float getWidth() { return this.width; } }
preflight/src/main/java/org/apache/pdfbox/preflight/font/util/PreflightType3Stream.java
/***************************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ****************************************************************************/ package org.apache.pdfbox.preflight.font.util; import java.awt.Image; import java.io.IOException; import java.util.List; import org.apache.fontbox.util.BoundingBox; import org.apache.pdfbox.cos.COSFloat; import org.apache.pdfbox.cos.COSInteger; import org.apache.pdfbox.cos.COSNumber; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.font.PDType3CharProc; import org.apache.pdfbox.pdmodel.graphics.image.PDInlineImage; import org.apache.pdfbox.preflight.PreflightContext; import org.apache.pdfbox.preflight.content.PreflightStreamEngine; import org.apache.pdfbox.contentstream.operator.Operator; /** * This class is used to parse a glyph of a Type3 font program. If the glyph is parsed without error, the width of the * glyph is accessible through the getWidth method. */ public class PreflightType3Stream extends PreflightStreamEngine { private final PDType3CharProc charProc; private boolean firstOperator = true; private float width = 0; private PDInlineImage image = null; private BoundingBox box = null; public PreflightType3Stream(PreflightContext context, PDPage page, PDType3CharProc charProc) { super(context, page); this.charProc = charProc; } public void showType3Character(PDType3CharProc charProc) throws IOException { processChildStream(charProc, new PDPage()); // dummy page (resource lookup may fail) } /** * This will parse a type3 stream and create an image from it. * * @return The image that was created. * * @throws IOException * If there is an error processing the stream. */ public Image createImage() throws IOException { showType3Character(charProc); return image.getImage(); } /** * This is used to handle an operation. * * @param operator * The operation to perform. * @param operands * The list of arguments. * * @throws IOException * If there is an error processing the operation. */ protected void processOperator(Operator operator, List operands) throws IOException { super.processOperator(operator, operands); String operation = operator.getName(); if (operation.equals("BI")) { image = new PDInlineImage(operator.getImageParameters(), operator.getImageData(), getResources()); validateImageFilter(operator); validateImageColorSpace(operator); } if (operation.equals("d0")) { checkType3FirstOperator(operands); } else if (operation.equals("d1")) { COSNumber llx = (COSNumber) operands.get(2); COSNumber lly = (COSNumber) operands.get(3); COSNumber urx = (COSNumber) operands.get(4); COSNumber ury = (COSNumber) operands.get(5); box = new BoundingBox(); box.setLowerLeftX(llx.floatValue()); box.setLowerLeftY(lly.floatValue()); box.setUpperRightX(urx.floatValue()); box.setUpperRightY(ury.floatValue()); checkType3FirstOperator(operands); } checkColorOperators(operation); validateRenderingIntent(operator, operands); checkSetColorSpaceOperators(operator, operands); validateNumberOfGraphicStates(operator); firstOperator = false; } /** * According to the PDF Reference, the first operator in a CharProc of a Type3 font must be "d0" or "d1". This * method process this validation. This method is called by the processOperator method. * * @param arguments * @throws IOException */ private void checkType3FirstOperator(List arguments) throws IOException { if (!firstOperator) { throw new IOException("Type3 CharProc : First operator must be d0 or d1"); } Object obj = arguments.get(0); if (obj instanceof Number) { width = ((Number) obj).intValue(); } else if (obj instanceof COSInteger) { width = ((COSInteger) obj).floatValue(); } else if (obj instanceof COSFloat) { width = ((COSFloat) obj).floatValue(); } else { throw new IOException("Unexpected argument type. Expected : COSInteger or Number / Received : " + obj.getClass().getName()); } } /** * @return the width of the CharProc glyph description */ public float getWidth() { return this.width; } }
PDFBOX-2576: remove unneeded casts git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1666324 13f79535-47bb-0310-9956-ffa450edef68
preflight/src/main/java/org/apache/pdfbox/preflight/font/util/PreflightType3Stream.java
PDFBOX-2576: remove unneeded casts
<ide><path>reflight/src/main/java/org/apache/pdfbox/preflight/font/util/PreflightType3Stream.java <ide> { <ide> width = ((Number) obj).intValue(); <ide> } <del> else if (obj instanceof COSInteger) <add> else if (obj instanceof COSNumber) <ide> { <del> width = ((COSInteger) obj).floatValue(); <del> } <del> else if (obj instanceof COSFloat) <del> { <del> width = ((COSFloat) obj).floatValue(); <add> width = ((COSNumber) obj).floatValue(); <ide> } <ide> else <ide> {
Java
apache-2.0
82dd0ce52fe621f165ffe8f7a08d41d6fd8d66c6
0
CloudSlang/score,CloudSlang/score
/* * Copyright © 2014-2017 EntIT Software LLC, a Micro Focus company (L.P.) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudslang.runtime.impl.python.external; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.cloudslang.runtime.api.python.PythonEvaluationResult; import io.cloudslang.runtime.api.python.PythonExecutionResult; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.SystemUtils; import org.apache.log4j.Logger; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.attribute.PosixFilePermission; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; public class ExternalPythonExecutor { private static final String PYTHON_SCRIPT_FILENAME = "script"; private static final String PYTHON_MAIN_FILENAME = "main"; private static final String PYTHON_EVAL_FILENAME = "eval"; private static final String PYTHON_SUFFIX = ".py"; private static final Logger logger = Logger.getLogger(ExternalPythonExecutor.class); private static final ObjectMapper objectMapper = new ObjectMapper(); private static final long EXECUTION_TIMEOUT = Long.getLong("python.timeout", 30); private static final String PYTHON_FILENAME_SCRIPT_EXTENSION = ".py\""; private static final int PYTHON_FILENAME_DELIMITERS = 6; public PythonExecutionResult exec(String script, Map<String, Serializable> inputs) { TempExecutionEnvironment tempExecutionEnvironment = null; try { String pythonPath = checkPythonPath(); tempExecutionEnvironment = generateTempExecutionResources(script); String payload = generateExecutionPayload(tempExecutionEnvironment.userScriptName, inputs); addFilePermissions(tempExecutionEnvironment.parentFolder); return runPythonExecutionProcess(pythonPath, payload, tempExecutionEnvironment); } catch (IOException e) { String message = "Failed to generate execution resources"; logger.error(message, e); throw new RuntimeException(message); } finally { if (tempExecutionEnvironment != null && !FileUtils.deleteQuietly(tempExecutionEnvironment.parentFolder) && tempExecutionEnvironment.parentFolder != null) { logger.warn(String.format("Failed to cleanup python execution resources {%s}", tempExecutionEnvironment.parentFolder)); } } } public PythonEvaluationResult eval(String expression, String prepareEnvironmentScript, Map<String, Serializable> context) { TempEvalEnvironment tempEvalEnvironment = null; try { String pythonPath = checkPythonPath(); tempEvalEnvironment = generateTempEvalResources(); String payload = generateEvalPayload(expression, prepareEnvironmentScript, context); addFilePermissions(tempEvalEnvironment.parentFolder); return runPythonEvalProcess(pythonPath, payload, tempEvalEnvironment, context); } catch (IOException e) { String message = "Failed to generate execution resources"; logger.error(message, e); throw new RuntimeException(message); } finally { if (tempEvalEnvironment != null && !FileUtils.deleteQuietly(tempEvalEnvironment.parentFolder) && tempEvalEnvironment.parentFolder != null) { logger.warn(String.format("Failed to cleanup python execution resources {%s}", tempEvalEnvironment.parentFolder)); } } } private void addFilePermissions(File file) throws IOException { Set<PosixFilePermission> filePermissions = new HashSet<>(); filePermissions.add(PosixFilePermission.OWNER_READ); File[] fileChildren = file.listFiles(); if (fileChildren != null) { for (File child : fileChildren) { if (SystemUtils.IS_OS_WINDOWS) { child.setReadOnly(); } else { Files.setPosixFilePermissions(child.toPath(), filePermissions); } } } } private String checkPythonPath() { String pythonPath = System.getProperty("python.path"); if (StringUtils.isEmpty(pythonPath) || !new File(pythonPath).exists()) { throw new IllegalArgumentException("Missing or invalid python path"); } return pythonPath; } private PythonExecutionResult runPythonExecutionProcess(String pythonPath, String payload, TempExecutionEnvironment executionEnvironment) { ProcessBuilder processBuilder = preparePythonProcess(executionEnvironment, pythonPath); try { String returnResult = getResult(payload, processBuilder); ScriptResults scriptResults = objectMapper.readValue(returnResult, ScriptResults.class); String exception = formatException(scriptResults.getException(), scriptResults.getTraceback()); if (!StringUtils.isEmpty(exception)) { logger.error(String.format("Failed to execute script {%s}", exception)); throw new ExternalPythonScriptException(String.format("Failed to execute user script: %s", exception)); } //noinspection unchecked return new PythonExecutionResult(scriptResults.getReturnResult()); } catch (IOException | InterruptedException e) { logger.error("Failed to run script. ", e.getCause()); throw new RuntimeException("Failed to run script."); } } private PythonEvaluationResult runPythonEvalProcess(String pythonPath, String payload, TempEvalEnvironment executionEnvironment, Map<String, Serializable> context) { ProcessBuilder processBuilder = preparePythonProcess(executionEnvironment, pythonPath); try { String returnResult = getResult(payload, processBuilder); EvaluationResults scriptResults = objectMapper.readValue(returnResult, EvaluationResults.class); String exception = scriptResults.getException(); if (!StringUtils.isEmpty(exception)) { logger.error(String.format("Failed to execute script {%s}", exception)); throw new ExternalPythonEvalException("Exception is: " + exception); } context.put("accessed_resources_set", (Serializable) scriptResults.getAccessedResources()); //noinspection unchecked return new PythonEvaluationResult(processReturnResult(scriptResults), context); } catch (IOException | InterruptedException e) { logger.error("Failed to run script. ", e.getCause()); throw new RuntimeException("Failed to run script."); } } private Serializable processReturnResult(EvaluationResults results) { EvaluationResults.ReturnType returnType = results.getReturnType(); if (returnType == null) { throw new RuntimeException("Missing return type for return result."); } switch (returnType) { case BOOLEAN: return Boolean.valueOf(results.getReturnResult()); case INTEGER: return Integer.valueOf(results.getReturnResult()); default: return results.getReturnResult(); } } private String getResult(String payload, ProcessBuilder processBuilder) throws IOException, InterruptedException { Process process = processBuilder.start(); PrintWriter printWriter = new PrintWriter(new OutputStreamWriter(process.getOutputStream(), StandardCharsets.UTF_8)); printWriter.println(payload); printWriter.flush(); BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); String line; StringBuilder returnResult = new StringBuilder(); while ((line = reader.readLine()) != null) { returnResult.append(line); } boolean isInTime = process.waitFor(EXECUTION_TIMEOUT, TimeUnit.MINUTES); if (!isInTime) { process.destroy(); throw new RuntimeException("Execution timed out"); } else if (process.exitValue() != 0) { StringWriter writer = new StringWriter(); IOUtils.copy(process.getErrorStream(), writer, StandardCharsets.UTF_8); logger.error(writer.toString()); throw new RuntimeException("Execution returned non 0 result"); } return returnResult.toString(); } private ProcessBuilder preparePythonProcess(TempEnvironment executionEnvironment, String pythonPath) { ProcessBuilder processBuilder = new ProcessBuilder(Arrays.asList(Paths.get(pythonPath, "python").toString(), Paths.get(executionEnvironment.parentFolder.toString(), executionEnvironment.mainScriptName).toString())); processBuilder.environment().clear(); processBuilder.directory(executionEnvironment.parentFolder); return processBuilder; } private TempExecutionEnvironment generateTempExecutionResources(String script) throws IOException { Path execTempDirectory = Files.createTempDirectory("python_execution"); File tempUserScript = File.createTempFile(PYTHON_SCRIPT_FILENAME, PYTHON_SUFFIX, execTempDirectory.toFile()); FileUtils.writeStringToFile(tempUserScript, script, StandardCharsets.UTF_8); ClassLoader classLoader = ExternalPythonExecutor.class.getClassLoader(); Path mainScriptPath = Paths.get(execTempDirectory.toString(), PYTHON_MAIN_FILENAME + PYTHON_SUFFIX); Files.copy(classLoader.getResourceAsStream(PYTHON_MAIN_FILENAME + PYTHON_SUFFIX), mainScriptPath); String tempUserScriptName = FilenameUtils.getName(tempUserScript.toString()); String mainScriptName = FilenameUtils.getName(mainScriptPath.toString()); return new TempExecutionEnvironment(tempUserScriptName, mainScriptName, execTempDirectory.toFile()); } private TempEvalEnvironment generateTempEvalResources() throws IOException { Path execTempDirectory = Files.createTempDirectory("python_expression"); ClassLoader classLoader = ExternalPythonExecutor.class.getClassLoader(); Path mainScriptPath = Paths.get(execTempDirectory.toString(), PYTHON_EVAL_FILENAME + PYTHON_SUFFIX); Files.copy(classLoader.getResourceAsStream(PYTHON_EVAL_FILENAME + PYTHON_SUFFIX), mainScriptPath); String mainScriptName = FilenameUtils.getName(mainScriptPath.toString()); return new TempEvalEnvironment(mainScriptName, execTempDirectory.toFile()); } private String generateEvalPayload(String expression, String prepareEnvironmentScript, Map<String, Serializable> context) throws JsonProcessingException { Map<String, Serializable> payload = new HashMap<>(); payload.put("expression", expression); payload.put("envSetup", prepareEnvironmentScript); payload.put("context", (Serializable) context); return objectMapper.writeValueAsString(payload); } private String generateExecutionPayload(String userScript, Map<String, Serializable> inputs) throws JsonProcessingException { Map<String, Serializable> payload = new HashMap<>(); Map<String, String> parsedInputs = new HashMap<>(); inputs.forEach((key, value) -> parsedInputs.put(key, value.toString())); payload.put("script_name", FilenameUtils.removeExtension(userScript)); payload.put("inputs", (Serializable) parsedInputs); return objectMapper.writeValueAsString(payload); } private String formatException(String exception, List<String> traceback) { if (CollectionUtils.isEmpty(traceback)) { return exception; } return removeFileName(traceback.get(traceback.size() - 1)) + ", " + exception; } private String removeFileName(String trace) { int pythonFileNameIndex = trace.indexOf(PYTHON_FILENAME_SCRIPT_EXTENSION); return trace.substring(pythonFileNameIndex + PYTHON_FILENAME_DELIMITERS); } private class TempEnvironment { final String mainScriptName; final File parentFolder; private TempEnvironment(String mainScriptName, File parentFolder) { this.mainScriptName = mainScriptName; this.parentFolder = parentFolder; } } private class TempExecutionEnvironment extends TempEnvironment { private final String userScriptName; private TempExecutionEnvironment(String userScriptName, String mainScriptName, File parentFolder) { super(mainScriptName, parentFolder); this.userScriptName = userScriptName; } } private class TempEvalEnvironment extends TempEnvironment { private TempEvalEnvironment(String mainScriptName, File parentFolder) { super(mainScriptName, parentFolder); } } }
runtime-management/runtime-management-impl/src/main/java/io/cloudslang/runtime/impl/python/external/ExternalPythonExecutor.java
/* * Copyright © 2014-2017 EntIT Software LLC, a Micro Focus company (L.P.) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.cloudslang.runtime.impl.python.external; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import io.cloudslang.runtime.api.python.PythonEvaluationResult; import io.cloudslang.runtime.api.python.PythonExecutionResult; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.FilenameUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.SystemUtils; import org.apache.log4j.Logger; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.io.Serializable; import java.io.StringWriter; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.attribute.PosixFilePermission; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; public class ExternalPythonExecutor { private static final String PYTHON_SCRIPT_FILENAME = "script"; private static final String PYTHON_MAIN_FILENAME = "main"; private static final String PYTHON_EVAL_FILENAME = "eval"; private static final String PYTHON_SUFFIX = ".py"; private static final Logger logger = Logger.getLogger(ExternalPythonExecutor.class); private static final ObjectMapper objectMapper = new ObjectMapper(); private static final long EXECUTION_TIMEOUT = Long.getLong("python.timeout", 30); private static final String PYTHON_FILENAME_SCRIPT_EXTENSION = ".py\""; private static final int PYTHON_FILENAME_DELIMITERS = 6; public PythonExecutionResult exec(String script, Map<String, Serializable> inputs) { TempExecutionEnvironment tempExecutionEnvironment = null; try { String pythonPath = checkPythonPath(); tempExecutionEnvironment = generateTempExecutionResources(script); String payload = generateExecutionPayload(tempExecutionEnvironment.userScriptName, inputs); addFilePermissions(tempExecutionEnvironment.parentFolder); return runPythonExecutionProcess(pythonPath, payload, tempExecutionEnvironment); } catch (IOException e) { String message = "Failed to generate execution resources"; logger.error(message, e); throw new RuntimeException(message); } finally { if (tempExecutionEnvironment != null && !FileUtils.deleteQuietly(tempExecutionEnvironment.parentFolder) && tempExecutionEnvironment.parentFolder != null) { logger.warn(String.format("Failed to cleanup python execution resources {%s}", tempExecutionEnvironment.parentFolder)); } } } public PythonEvaluationResult eval(String expression, String prepareEnvironmentScript, Map<String, Serializable> context) { TempEvalEnvironment tempEvalEnvironment = null; try { String pythonPath = checkPythonPath(); tempEvalEnvironment = generateTempEvalResources(); String payload = generateEvalPayload(expression, prepareEnvironmentScript, context); addFilePermissions(tempEvalEnvironment.parentFolder); return runPythonEvalProcess(pythonPath, payload, tempEvalEnvironment, context); } catch (IOException e) { String message = "Failed to generate execution resources"; logger.error(message, e); throw new RuntimeException(message); } finally { if (tempEvalEnvironment != null && !FileUtils.deleteQuietly(tempEvalEnvironment.parentFolder) && tempEvalEnvironment.parentFolder != null) { logger.warn(String.format("Failed to cleanup python execution resources {%s}", tempEvalEnvironment.parentFolder)); } } } private void addFilePermissions(File file) throws IOException { Set<PosixFilePermission> filePermissions = new HashSet<>(); filePermissions.add(PosixFilePermission.OWNER_READ); File[] fileChildren = file.listFiles(); if (fileChildren != null) { for (File child : fileChildren) { if (isWindows()) { child.setReadOnly(); } else { Files.setPosixFilePermissions(child.toPath(), filePermissions); } } } } private static boolean isWindows() { return SystemUtils.IS_OS_WINDOWS; } private String checkPythonPath() { String pythonPath = System.getProperty("python.path"); if (StringUtils.isEmpty(pythonPath) || !new File(pythonPath).exists()) { throw new IllegalArgumentException("Missing or invalid python path"); } return pythonPath; } private PythonExecutionResult runPythonExecutionProcess(String pythonPath, String payload, TempExecutionEnvironment executionEnvironment) { ProcessBuilder processBuilder = preparePythonProcess(executionEnvironment, pythonPath); try { String returnResult = getResult(payload, processBuilder); ScriptResults scriptResults = objectMapper.readValue(returnResult, ScriptResults.class); String exception = formatException(scriptResults.getException(), scriptResults.getTraceback()); if (!StringUtils.isEmpty(exception)) { logger.error(String.format("Failed to execute script {%s}", exception)); throw new ExternalPythonScriptException(String.format("Failed to execute user script: %s", exception)); } //noinspection unchecked return new PythonExecutionResult(scriptResults.getReturnResult()); } catch (IOException | InterruptedException e) { logger.error("Failed to run script. ", e.getCause()); throw new RuntimeException("Failed to run script."); } } private PythonEvaluationResult runPythonEvalProcess(String pythonPath, String payload, TempEvalEnvironment executionEnvironment, Map<String, Serializable> context) { ProcessBuilder processBuilder = preparePythonProcess(executionEnvironment, pythonPath); try { String returnResult = getResult(payload, processBuilder); EvaluationResults scriptResults = objectMapper.readValue(returnResult, EvaluationResults.class); String exception = scriptResults.getException(); if (!StringUtils.isEmpty(exception)) { logger.error(String.format("Failed to execute script {%s}", exception)); throw new ExternalPythonEvalException("Exception is: " + exception); } context.put("accessed_resources_set", (Serializable) scriptResults.getAccessedResources()); //noinspection unchecked return new PythonEvaluationResult(processReturnResult(scriptResults), context); } catch (IOException | InterruptedException e) { logger.error("Failed to run script. ", e.getCause()); throw new RuntimeException("Failed to run script."); } } private Serializable processReturnResult(EvaluationResults results) { EvaluationResults.ReturnType returnType = results.getReturnType(); if (returnType == null) { throw new RuntimeException("Missing return type for return result."); } switch (returnType) { case BOOLEAN: return Boolean.valueOf(results.getReturnResult()); case INTEGER: return Integer.valueOf(results.getReturnResult()); default: return results.getReturnResult(); } } private String getResult(String payload, ProcessBuilder processBuilder) throws IOException, InterruptedException { Process process = processBuilder.start(); PrintWriter printWriter = new PrintWriter(new OutputStreamWriter(process.getOutputStream(), StandardCharsets.UTF_8)); printWriter.println(payload); printWriter.flush(); BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream())); String line; StringBuilder returnResult = new StringBuilder(); while ((line = reader.readLine()) != null) { returnResult.append(line); } boolean isInTime = process.waitFor(EXECUTION_TIMEOUT, TimeUnit.MINUTES); if (!isInTime) { process.destroy(); throw new RuntimeException("Execution timed out"); } else if (process.exitValue() != 0) { StringWriter writer = new StringWriter(); IOUtils.copy(process.getErrorStream(), writer, StandardCharsets.UTF_8); logger.error(writer.toString()); throw new RuntimeException("Execution returned non 0 result"); } return returnResult.toString(); } private ProcessBuilder preparePythonProcess(TempEnvironment executionEnvironment, String pythonPath) { ProcessBuilder processBuilder = new ProcessBuilder(Arrays.asList(Paths.get(pythonPath, "python").toString(), Paths.get(executionEnvironment.parentFolder.toString(), executionEnvironment.mainScriptName).toString())); processBuilder.environment().clear(); processBuilder.directory(executionEnvironment.parentFolder); return processBuilder; } private TempExecutionEnvironment generateTempExecutionResources(String script) throws IOException { Path execTempDirectory = Files.createTempDirectory("python_execution"); File tempUserScript = File.createTempFile(PYTHON_SCRIPT_FILENAME, PYTHON_SUFFIX, execTempDirectory.toFile()); FileUtils.writeStringToFile(tempUserScript, script, StandardCharsets.UTF_8); ClassLoader classLoader = ExternalPythonExecutor.class.getClassLoader(); Path mainScriptPath = Paths.get(execTempDirectory.toString(), PYTHON_MAIN_FILENAME + PYTHON_SUFFIX); Files.copy(classLoader.getResourceAsStream(PYTHON_MAIN_FILENAME + PYTHON_SUFFIX), mainScriptPath); String tempUserScriptName = FilenameUtils.getName(tempUserScript.toString()); String mainScriptName = FilenameUtils.getName(mainScriptPath.toString()); return new TempExecutionEnvironment(tempUserScriptName, mainScriptName, execTempDirectory.toFile()); } private TempEvalEnvironment generateTempEvalResources() throws IOException { Path execTempDirectory = Files.createTempDirectory("python_expression"); ClassLoader classLoader = ExternalPythonExecutor.class.getClassLoader(); Path mainScriptPath = Paths.get(execTempDirectory.toString(), PYTHON_EVAL_FILENAME + PYTHON_SUFFIX); Files.copy(classLoader.getResourceAsStream(PYTHON_EVAL_FILENAME + PYTHON_SUFFIX), mainScriptPath); String mainScriptName = FilenameUtils.getName(mainScriptPath.toString()); return new TempEvalEnvironment(mainScriptName, execTempDirectory.toFile()); } private String generateEvalPayload(String expression, String prepareEnvironmentScript, Map<String, Serializable> context) throws JsonProcessingException { Map<String, Serializable> payload = new HashMap<>(); payload.put("expression", expression); payload.put("envSetup", prepareEnvironmentScript); payload.put("context", (Serializable) context); return objectMapper.writeValueAsString(payload); } private String generateExecutionPayload(String userScript, Map<String, Serializable> inputs) throws JsonProcessingException { Map<String, Serializable> payload = new HashMap<>(); Map<String, String> parsedInputs = new HashMap<>(); inputs.forEach((key, value) -> parsedInputs.put(key, value.toString())); payload.put("script_name", FilenameUtils.removeExtension(userScript)); payload.put("inputs", (Serializable) parsedInputs); return objectMapper.writeValueAsString(payload); } private String formatException(String exception, List<String> traceback) { if (CollectionUtils.isEmpty(traceback)) { return exception; } return removeFileName(traceback.get(traceback.size() - 1)) + ", " + exception; } private String removeFileName(String trace) { int pythonFileNameIndex = trace.indexOf(PYTHON_FILENAME_SCRIPT_EXTENSION); return trace.substring(pythonFileNameIndex + PYTHON_FILENAME_DELIMITERS); } private class TempEnvironment { final String mainScriptName; final File parentFolder; private TempEnvironment(String mainScriptName, File parentFolder) { this.mainScriptName = mainScriptName; this.parentFolder = parentFolder; } } private class TempExecutionEnvironment extends TempEnvironment { private final String userScriptName; private TempExecutionEnvironment(String userScriptName, String mainScriptName, File parentFolder) { super(mainScriptName, parentFolder); this.userScriptName = userScriptName; } } private class TempEvalEnvironment extends TempEnvironment { private TempEvalEnvironment(String mainScriptName, File parentFolder) { super(mainScriptName, parentFolder); } } }
Review changes
runtime-management/runtime-management-impl/src/main/java/io/cloudslang/runtime/impl/python/external/ExternalPythonExecutor.java
Review changes
<ide><path>untime-management/runtime-management-impl/src/main/java/io/cloudslang/runtime/impl/python/external/ExternalPythonExecutor.java <ide> <ide> if (fileChildren != null) { <ide> for (File child : fileChildren) { <del> if (isWindows()) { <add> if (SystemUtils.IS_OS_WINDOWS) { <ide> child.setReadOnly(); <ide> } else { <ide> Files.setPosixFilePermissions(child.toPath(), filePermissions); <ide> } <ide> } <ide> } <del> } <del> <del> private static boolean isWindows() { <del> return SystemUtils.IS_OS_WINDOWS; <ide> } <ide> <ide> private String checkPythonPath() {
Java
apache-2.0
2511c2612ea5f3766d4f31233e842d0876e537cd
0
fengbaicanhe/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,Lekanich/intellij-community,da1z/intellij-community,xfournet/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,allotria/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,caot/intellij-community,slisson/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,MichaelNedzelsky/intellij-community,salguarnieri/intellij-community,petteyg/intellij-community,fnouama/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,hurricup/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,samthor/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,robovm/robovm-studio,clumsy/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,semonte/intellij-community,ibinti/intellij-community,izonder/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,consulo/consulo,diorcety/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,ernestp/consulo,samthor/intellij-community,MichaelNedzelsky/intellij-community,holmes/intellij-community,youdonghai/intellij-community,signed/intellij-community,kdwink/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,diorcety/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,consulo/consulo,pwoodworth/intellij-community,diorcety/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,signed/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,xfournet/intellij-community,da1z/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,consulo/consulo,supersven/intellij-community,izonder/intellij-community,semonte/intellij-community,ernestp/consulo,clumsy/intellij-community,youdonghai/intellij-community,akosyakov/intellij-community,FHannes/intellij-community,slisson/intellij-community,xfournet/intellij-community,allotria/intellij-community,adedayo/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,kool79/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,holmes/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,da1z/intellij-community,ibinti/intellij-community,petteyg/intellij-community,samthor/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,ernestp/consulo,da1z/intellij-community,nicolargo/intellij-community,allotria/intellij-community,izonder/intellij-community,fitermay/intellij-community,jagguli/intellij-community,samthor/intellij-community,dslomov/intellij-community,dslomov/intellij-community,adedayo/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,retomerz/intellij-community,semonte/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,adedayo/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,alphafoobar/intellij-community,caot/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,supersven/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,ernestp/consulo,retomerz/intellij-community,ibinti/intellij-community,holmes/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,fnouama/intellij-community,FHannes/intellij-community,retomerz/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,kool79/intellij-community,consulo/consulo,clumsy/intellij-community,idea4bsd/idea4bsd,Distrotech/intellij-community,vvv1559/intellij-community,vladmm/intellij-community,retomerz/intellij-community,diorcety/intellij-community,retomerz/intellij-community,blademainer/intellij-community,blademainer/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,orekyuu/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,adedayo/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,FHannes/intellij-community,dslomov/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,semonte/intellij-community,amith01994/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,xfournet/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,robovm/robovm-studio,signed/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,supersven/intellij-community,kool79/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,supersven/intellij-community,apixandru/intellij-community,jagguli/intellij-community,xfournet/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,muntasirsyed/intellij-community,adedayo/intellij-community,signed/intellij-community,caot/intellij-community,holmes/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,fnouama/intellij-community,pwoodworth/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,ibinti/intellij-community,clumsy/intellij-community,robovm/robovm-studio,xfournet/intellij-community,ftomassetti/intellij-community,gnuhub/intellij-community,youdonghai/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,signed/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,dslomov/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,allotria/intellij-community,Distrotech/intellij-community,da1z/intellij-community,asedunov/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,signed/intellij-community,adedayo/intellij-community,izonder/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,supersven/intellij-community,kool79/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,fitermay/intellij-community,semonte/intellij-community,slisson/intellij-community,adedayo/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,vladmm/intellij-community,holmes/intellij-community,asedunov/intellij-community,asedunov/intellij-community,supersven/intellij-community,fitermay/intellij-community,FHannes/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,izonder/intellij-community,ryano144/intellij-community,retomerz/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,SerCeMan/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,ibinti/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,fitermay/intellij-community,fnouama/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,samthor/intellij-community,petteyg/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,allotria/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,samthor/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,apixandru/intellij-community,ibinti/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,kool79/intellij-community,xfournet/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,dslomov/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,supersven/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,tmpgit/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,Distrotech/intellij-community,da1z/intellij-community,diorcety/intellij-community,ernestp/consulo,holmes/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,kdwink/intellij-community,slisson/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,allotria/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,MER-GROUP/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,kdwink/intellij-community,hurricup/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,asedunov/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,kdwink/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,ryano144/intellij-community,da1z/intellij-community,izonder/intellij-community,asedunov/intellij-community,fitermay/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,diorcety/intellij-community,holmes/intellij-community,holmes/intellij-community,ryano144/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,hurricup/intellij-community,diorcety/intellij-community,apixandru/intellij-community,samthor/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,fitermay/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,da1z/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,muntasirsyed/intellij-community,robovm/robovm-studio,caot/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,supersven/intellij-community,izonder/intellij-community,ThiagoGarciaAlves/intellij-community,slisson/intellij-community,signed/intellij-community,kdwink/intellij-community,slisson/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,caot/intellij-community,fnouama/intellij-community,allotria/intellij-community,robovm/robovm-studio,MER-GROUP/intellij-community,ahb0327/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,suncycheng/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,signed/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,jagguli/intellij-community,amith01994/intellij-community,robovm/robovm-studio,retomerz/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,ibinti/intellij-community,holmes/intellij-community,kool79/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,fitermay/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,petteyg/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,fnouama/intellij-community,fitermay/intellij-community,fnouama/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,petteyg/intellij-community,kdwink/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,ernestp/consulo,allotria/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,caot/intellij-community,TangHao1987/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,supersven/intellij-community,izonder/intellij-community,youdonghai/intellij-community,vladmm/intellij-community,ibinti/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,slisson/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,adedayo/intellij-community,diorcety/intellij-community,orekyuu/intellij-community,consulo/consulo,holmes/intellij-community,dslomov/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,samthor/intellij-community,nicolargo/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,caot/intellij-community,holmes/intellij-community,jagguli/intellij-community,hurricup/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,da1z/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,signed/intellij-community,blademainer/intellij-community,vladmm/intellij-community,clumsy/intellij-community,samthor/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,holmes/intellij-community,petteyg/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,kool79/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,signed/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,clumsy/intellij-community
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.concurrency; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Condition; import com.intellij.util.Consumer; import com.intellij.util.PairConsumer; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; /** * <p>QueueProcessor processes elements which are being added to a queue via {@link #add(Object)} and {@link #addFirst(Object)} methods.</p> * <p>Elements are processed one by one in a special single thread. * The processor itself is passed in the constructor and is called from that thread. * By default processing starts when the first element is added to the queue, though there is an 'autostart' option which holds * the processor until {@link #start()} is called.</p> * * @param <T> type of queue elements. */ public class QueueProcessor<T> { private static final Logger LOG = Logger.getInstance("#com.intellij.util.concurrency.QueueProcessor"); private final PairConsumer<T, Runnable> myProcessor; private final LinkedList<T> myQueue = new LinkedList<T>(); private final Runnable myContinuationContext; private boolean isProcessing; private boolean myStarted; private final ThreadToUse myThreadToUse; private final Condition<?> myDeathCondition; private final Map<MyOverrideEquals, ModalityState> myModalityState; /** * Constructs a QueueProcessor with the given processor and autostart setting. * By default QueueProcessor starts processing when it receives the first element. Pass <code>false</code> to alternate its behavior. * * @param processor processor of queue elements. * @param autostart if <code>true</code> (which is by default), the queue will be processed immediately when it receives the first element. * If <code>false</code>, then it will wait for the {@link #start()} command. * After QueueProcessor has started once, autostart setting doesn't matter anymore: all other elements will be processed immediately. */ public QueueProcessor(final PairConsumer<T, Runnable> processor, boolean autostart, final ThreadToUse threadToUse, final Condition<?> deathCondition) { myProcessor = processor; myStarted = autostart; myThreadToUse = threadToUse; myDeathCondition = deathCondition; myModalityState = new HashMap<MyOverrideEquals, ModalityState>(); myContinuationContext = new Runnable() { @Override public void run() { synchronized (myQueue) { isProcessing = false; if (myQueue.isEmpty()) { myQueue.notifyAll(); } else { startProcessing(); } } } }; } public QueueProcessor(final Consumer<T> processor, final Condition<?> deathCondition, boolean autostart) { this(wrappingProcessor(processor), autostart, ThreadToUse.POOLED, deathCondition); } public void add(T t, ModalityState state) { myModalityState.put(new MyOverrideEquals(t), state); doAdd(t, false); } private static <T> PairConsumer<T, Runnable> wrappingProcessor(final Consumer<T> processor) { return new PairConsumer<T, Runnable>() { @Override public void consume(T item, Runnable runnable) { try { processor.consume(item); } catch (Throwable e) { try { LOG.error(e); } catch (Exception ignore) { // should survive assertions } } runnable.run(); } }; } /** * Constructs a QueueProcessor, which will autostart as soon as the first element is added to it. */ public QueueProcessor(Consumer<T> processor, final Condition<?> deathCondition) { this(processor, deathCondition, true); } /** * Starts queue processing if it hasn't started yet. * Effective only if the QueueProcessor was created with no-autostart option: otherwise processing will start as soon as the first element * is added to the queue. * If there are several elements in the queue, processing starts from the first one. */ public void start() { synchronized (myQueue) { if (myStarted) return; myStarted = true; if (!myQueue.isEmpty()) { startProcessing(); } } } public void add(T element) { doAdd(element, false); } public void addFirst(T element) { doAdd(element, true); } private void doAdd(T element, boolean atHead) { synchronized (myQueue) { if (atHead) { myQueue.addFirst(element); } else { myQueue.add(element); } startProcessing(); } } public void clear() { synchronized (myQueue) { myQueue.clear(); } } public void waitFor() { synchronized (myQueue) { while (isProcessing) { try { myQueue.wait(); } catch (InterruptedException e) { //ok } } } } private boolean startProcessing() { LOG.assertTrue(Thread.holdsLock(myQueue)); if (isProcessing || !myStarted) { return false; } isProcessing = true; final T item = myQueue.removeFirst(); final Runnable runnable = new Runnable() { @Override public void run() { if (myDeathCondition.value(null)) return; try { myProcessor.consume(item, myContinuationContext); } catch (Throwable t) { try { LOG.error(t); } catch (Exception ignore) { // should survive assertions } } } }; final Application application = ApplicationManager.getApplication(); if (ThreadToUse.AWT.equals(myThreadToUse)) { final ModalityState state = myModalityState.remove(new MyOverrideEquals(item)); if (state != null) { application.invokeLater(runnable, state); } else { application.invokeLater(runnable); } } else { application.executeOnPooledThread(runnable); } return true; } public boolean isEmpty() { synchronized (myQueue) { return myQueue.isEmpty() && (!isProcessing); } } public static enum ThreadToUse { AWT, POOLED } private static class MyOverrideEquals { private final Object myDelegate; private MyOverrideEquals(Object delegate) { myDelegate = delegate; } @Override public int hashCode() { return myDelegate.hashCode(); } @Override public boolean equals(Object obj) { return ((MyOverrideEquals)obj).myDelegate == myDelegate; } } }
platform/platform-api/src/com/intellij/util/concurrency/QueueProcessor.java
/* * Copyright 2000-2010 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.concurrency; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Condition; import com.intellij.util.Consumer; import com.intellij.util.PairConsumer; import java.util.HashMap; import java.util.LinkedList; import java.util.Map; /** * <p>QueueProcessor processes elements which are being added to a queue via {@link #add(Object)} and {@link #addFirst(Object)} methods.</p> * <p>Elements are processed one by one in a special single thread. * The processor itself is passed in the constructor and is called from that thread. * By default processing starts when the first element is added to the queue, though there is an 'autostart' option which holds * the processor until {@link #start()} is called.</p> * * @param <T> type of queue elements. */ public class QueueProcessor<T> { private static final Logger LOG = Logger.getInstance("#com.intellij.util.concurrency.QueueProcessor"); private final PairConsumer<T, Runnable> myProcessor; private final LinkedList<T> myQueue = new LinkedList<T>(); private final Runnable myContinuationContext; private boolean isProcessing; private boolean myStarted; private final ThreadToUse myThreadToUse; private final Condition<?> myDeathCondition; private final Map<MyOverrideEquals, ModalityState> myModalityState; /** * Constructs a QueueProcessor with the given processor and autostart setting. * By default QueueProcessor starts processing when it receives the first element. Pass <code>false</code> to alternate its behavior. * * @param processor processor of queue elements. * @param autostart if <code>true</code> (which is by default), the queue will be processed immediately when it receives the first element. * If <code>false</code>, then it will wait for the {@link #start()} command. * After QueueProcessor has started once, autostart setting doesn't matter anymore: all other elements will be processed immediately. */ public QueueProcessor(final PairConsumer<T, Runnable> processor, boolean autostart, final ThreadToUse threadToUse, final Condition<?> deathCondition) { myProcessor = processor; myStarted = autostart; myThreadToUse = threadToUse; myDeathCondition = deathCondition; myModalityState = new HashMap<MyOverrideEquals, ModalityState>(); myContinuationContext = new Runnable() { @Override public void run() { synchronized (myQueue) { isProcessing = false; if (myQueue.isEmpty()) { myQueue.notifyAll(); } else { startProcessing(); } } } }; } public QueueProcessor(final Consumer<T> processor, final Condition<?> deathCondition, boolean autostart) { this(wrappingProcessor(processor), autostart, ThreadToUse.POOLED, deathCondition); } public void add(T t, ModalityState state) { myModalityState.put(new MyOverrideEquals(t), state); doAdd(t, false); } private static<T> PairConsumer<T, Runnable> wrappingProcessor(final Consumer<T> processor) { return new PairConsumer<T, Runnable>() { @Override public void consume(T item, Runnable runnable) { try { processor.consume(item); } catch (Throwable e) { LOG.warn(e); } runnable.run(); } }; } /** * Constructs a QueueProcessor, which will autostart as soon as the first element is added to it. */ public QueueProcessor(Consumer<T> processor, final Condition<?> deathCondition) { this(processor, deathCondition, true); } /** * Starts queue processing if it hasn't started yet. * Effective only if the QueueProcessor was created with no-autostart option: otherwise processing will start as soon as the first element * is added to the queue. * If there are several elements in the queue, processing starts from the first one. */ public void start() { synchronized (myQueue) { if (myStarted) return; myStarted = true; if (! myQueue.isEmpty()) { startProcessing(); } } } public void add(T element) { doAdd(element, false); } public void addFirst(T element) { doAdd(element, true); } private void doAdd(T element, boolean atHead) { synchronized (myQueue) { if (atHead) { myQueue.addFirst(element); } else { myQueue.add(element); } startProcessing(); } } public void clear() { synchronized (myQueue) { myQueue.clear(); } } public void waitFor() { synchronized (myQueue) { while (isProcessing) { try { myQueue.wait(); } catch (InterruptedException e) { //ok } } } } private boolean startProcessing() { LOG.assertTrue(Thread.holdsLock(myQueue)); if (isProcessing || ! myStarted) { return false; } isProcessing = true; final T item = myQueue.removeFirst(); final Runnable runnable = new Runnable() { @Override public void run() { if (myDeathCondition.value(null)) return; try { myProcessor.consume(item, myContinuationContext); } catch (Throwable t) { LOG.warn(t); } } }; final Application application = ApplicationManager.getApplication(); if (ThreadToUse.AWT.equals(myThreadToUse)) { final ModalityState state = myModalityState.remove(new MyOverrideEquals(item)); if (state != null) { application.invokeLater(runnable, state); } else { application.invokeLater(runnable); } } else { application.executeOnPooledThread(runnable); } return true; } public boolean isEmpty() { synchronized (myQueue) { return myQueue.isEmpty() && (! isProcessing); } } public static enum ThreadToUse { AWT, POOLED } private static class MyOverrideEquals { private final Object myDelegate; private MyOverrideEquals(Object delegate) { myDelegate = delegate; } @Override public int hashCode() { return myDelegate.hashCode(); } @Override public boolean equals(Object obj) { return ((MyOverrideEquals) obj).myDelegate == myDelegate; } } }
QueueProcessor: reporting errors from tasks instead of warnings AppCode:Debugger: exception in evaluator fixed when trying to resolve '<error getting type>' type
platform/platform-api/src/com/intellij/util/concurrency/QueueProcessor.java
QueueProcessor: reporting errors from tasks instead of warnings AppCode:Debugger: exception in evaluator fixed when trying to resolve '<error getting type>' type
<ide><path>latform/platform-api/src/com/intellij/util/concurrency/QueueProcessor.java <ide> isProcessing = false; <ide> if (myQueue.isEmpty()) { <ide> myQueue.notifyAll(); <del> } else { <add> } <add> else { <ide> startProcessing(); <ide> } <ide> } <ide> doAdd(t, false); <ide> } <ide> <del> private static<T> PairConsumer<T, Runnable> wrappingProcessor(final Consumer<T> processor) { <add> private static <T> PairConsumer<T, Runnable> wrappingProcessor(final Consumer<T> processor) { <ide> return new PairConsumer<T, Runnable>() { <ide> @Override <ide> public void consume(T item, Runnable runnable) { <ide> processor.consume(item); <ide> } <ide> catch (Throwable e) { <del> LOG.warn(e); <add> try { <add> LOG.error(e); <add> } <add> catch (Exception ignore) { <add> // should survive assertions <add> } <ide> } <ide> runnable.run(); <ide> } <ide> synchronized (myQueue) { <ide> if (myStarted) return; <ide> myStarted = true; <del> if (! myQueue.isEmpty()) { <add> if (!myQueue.isEmpty()) { <ide> startProcessing(); <ide> } <ide> } <ide> synchronized (myQueue) { <ide> if (atHead) { <ide> myQueue.addFirst(element); <del> } else { <add> } <add> else { <ide> myQueue.add(element); <ide> } <ide> startProcessing(); <ide> while (isProcessing) { <ide> try { <ide> myQueue.wait(); <del> } catch (InterruptedException e) { <add> } <add> catch (InterruptedException e) { <ide> //ok <ide> } <ide> } <ide> private boolean startProcessing() { <ide> LOG.assertTrue(Thread.holdsLock(myQueue)); <ide> <del> if (isProcessing || ! myStarted) { <add> if (isProcessing || !myStarted) { <ide> return false; <ide> } <ide> isProcessing = true; <ide> if (myDeathCondition.value(null)) return; <ide> try { <ide> myProcessor.consume(item, myContinuationContext); <del> } catch (Throwable t) { <del> LOG.warn(t); <add> } <add> catch (Throwable t) { <add> try { <add> LOG.error(t); <add> } <add> catch (Exception ignore) { <add> // should survive assertions <add> } <ide> } <ide> } <ide> }; <ide> final ModalityState state = myModalityState.remove(new MyOverrideEquals(item)); <ide> if (state != null) { <ide> application.invokeLater(runnable, state); <del> } else { <add> } <add> else { <ide> application.invokeLater(runnable); <ide> } <del> } else { <add> } <add> else { <ide> application.executeOnPooledThread(runnable); <ide> } <ide> return true; <ide> <ide> public boolean isEmpty() { <ide> synchronized (myQueue) { <del> return myQueue.isEmpty() && (! isProcessing); <add> return myQueue.isEmpty() && (!isProcessing); <ide> } <ide> } <ide> <ide> <ide> @Override <ide> public boolean equals(Object obj) { <del> return ((MyOverrideEquals) obj).myDelegate == myDelegate; <add> return ((MyOverrideEquals)obj).myDelegate == myDelegate; <ide> } <ide> } <ide> }
Java
lgpl-2.1
error: pathspec 'intermine/src/java/org/intermine/sql/precompute/QueryOptimiser.java' did not match any file(s) known to git
158e0743e103f22e458080fc5d85e6d0e8bf2006
1
kimrutherford/intermine,tomck/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,justincc/intermine,justincc/intermine,kimrutherford/intermine,tomck/intermine,tomck/intermine,kimrutherford/intermine,elsiklab/intermine,julie-sullivan/phytomine,kimrutherford/intermine,JoeCarlson/intermine,justincc/intermine,tomck/intermine,drhee/toxoMine,zebrafishmine/intermine,tomck/intermine,kimrutherford/intermine,JoeCarlson/intermine,zebrafishmine/intermine,justincc/intermine,justincc/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,zebrafishmine/intermine,JoeCarlson/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,elsiklab/intermine,justincc/intermine,Arabidopsis-Information-Portal/intermine,joshkh/intermine,elsiklab/intermine,julie-sullivan/phytomine,kimrutherford/intermine,JoeCarlson/intermine,JoeCarlson/intermine,zebrafishmine/intermine,joshkh/intermine,justincc/intermine,zebrafishmine/intermine,tomck/intermine,tomck/intermine,joshkh/intermine,julie-sullivan/phytomine,drhee/toxoMine,joshkh/intermine,drhee/toxoMine,kimrutherford/intermine,kimrutherford/intermine,Arabidopsis-Information-Portal/intermine,kimrutherford/intermine,elsiklab/intermine,drhee/toxoMine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,joshkh/intermine,elsiklab/intermine,joshkh/intermine,Arabidopsis-Information-Portal/intermine,JoeCarlson/intermine,zebrafishmine/intermine,JoeCarlson/intermine,julie-sullivan/phytomine,zebrafishmine/intermine,elsiklab/intermine,justincc/intermine,elsiklab/intermine,zebrafishmine/intermine,Arabidopsis-Information-Portal/intermine,drhee/toxoMine,elsiklab/intermine,julie-sullivan/phytomine,Arabidopsis-Information-Portal/intermine,tomck/intermine,drhee/toxoMine,julie-sullivan/phytomine,elsiklab/intermine,JoeCarlson/intermine,julie-sullivan/phytomine,joshkh/intermine,tomck/intermine,zebrafishmine/intermine,joshkh/intermine,joshkh/intermine
package org.flymine.sql.precompute; import org.flymine.sql.query.Query; import org.flymine.sql.query.AbstractTable; import org.flymine.sql.query.AbstractConstraint; import org.flymine.util.MappingUtil; import java.util.Comparator; import java.util.HashSet; import java.util.Iterator; import java.util.Set; import java.util.SortedMap; import java.util.Map; import java.util.TreeMap; import java.sql.Connection; import java.sql.SQLException; //import org.flymine.sql.query. /** * A static class providing the code to optimise a query, given a database (presumably with a table * describing the available precomputed tables). * * @author Matthew Wakeling * @author Andrew Varley */ public class QueryOptimiser { /** * Runs the optimiser through the query represented in the String, given the database. If * anything goes wrong, then the original String is returned. * * @param query the query to optimise * @param database the database to use to find precomputed tables * @return a String representing the optimised query * @throws SQLException if a database error occurs */ public static String optimise(String query, Connection database) throws SQLException { try { return optimise(new Query(query), database).getSQLString(); } catch (RuntimeException e) { // Query was not acceptable. } return query; } /** * Runs the optimiser through the query, given the database. * * @param query the Query to optimise * @param database the database to use to find precomputed tables * @return the optimised Query * @throws SQLException if a database error occurs */ public static Query optimise(Query query, Connection database) throws SQLException { BestQueryExplainer bestQuery = new BestQueryExplainer(); try { Set precomputedTables = null; //PrecomputedTableManager.getPrecomputedTables(database); recursiveOptimise(precomputedTables, query, bestQuery); } catch (BestQueryException e) { // Ignore - bestQuery decided to cut short the search } return bestQuery.getBestQuery(); } /** * Recursively optimises the query, given the set of precomputed tables, and updates the * BestQuery object with each Query found. * When this method returns, either bestQuery will hold the fastest Query, or the bestQuery * object will have decided to cut short proceedings. Either way, use the Query in bestQuery * for best results. * * @param precomputedTables a Set of PrecomputedTable objects to use * @param query a query to optimise * @param bestQuery a BestQuery object to update with each optimised Query object * @throws BestQueryException if the BestQuery decides to cut short the search * @throws SQLException if a database error occurs */ protected static void recursiveOptimise(Set precomputedTables, Query query, BestQuery bestQuery) throws BestQueryException, SQLException { // This line creates a Map from PrecomputedTable objects to Sets of optimised Query objects. SortedMap map = mergeMultiple(precomputedTables, query); // Now we want to iterate through every optimised Query in the Map of Sets. Iterator mapIter = map.entrySet().iterator(); while (mapIter.hasNext()) { Map.Entry mapEntry = (Map.Entry) mapIter.next(); PrecomputedTable p = (PrecomputedTable) mapEntry.getKey(); Set queries = (Set) mapEntry.getValue(); // We should prepare a Set of PrecomputedTable objects to reoptimise the Queries in this // Set. Set newPrecomputedTables = map.headMap(p).keySet(); // Now we want to iterator through every optimised Query in this Set. Iterator queryIter = queries.iterator(); while (queryIter.hasNext()) { Query optimisedQuery = (Query) queryIter.next(); // Now we want to call recursiveOptimise on each one. recursiveOptimise(newPrecomputedTables, optimisedQuery, bestQuery); // Also, we need to update BestQuery with this optimised Query. bestQuery.add(optimisedQuery); } } } /** * Iteratively calls merge on query with all the PrecomputedTables in a Set, returning the * results in a Map from the PrecomputedTable to the Set that merge returns. * * @param precomputedTables a Set of PrecomputedTable objects to iterate through * @param query the Query to pass in to merge * @return a Map from all PrecomputedTable objects that produced a non-empty Set, to the Set * that was produced by merge */ protected static SortedMap mergeMultiple(Set precomputedTables, Query query) { SortedMap result = new TreeMap(); Iterator precompIter = precomputedTables.iterator(); while (precompIter.hasNext()) { PrecomputedTable p = (PrecomputedTable) precompIter.next(); Set mergeResult = merge(p, query); if (!mergeResult.isEmpty()) { result.put(p, mergeResult); } } return result; } /** * Finds all the possible uses of a given precomputed table in the query, and returns them as * a Set of new Queries. * If there is no scope for the PrecomputedTable to replace any part of the Query, then this * method will return an empty Set. * If there are two independent opportunities to insert the PrecomputedTable, then this method * will return three Query objects in the Set - one with the first opportunity used, one with * the second opportunity used, and one with both. * A PrecomputedTable is deemed to "fit", if it <ol> * <li>Contains no other tables than those present in the Query</li> * <li>Contains no constraints that restrict the output more than the constraints of the Query. * Constraints that equal a constraint in the Query can be missed out of the resulting * Query</li> * <li>Contains all the items in the SELECT list that are present in the Query's SELECT from the * tables that are to be replaced</li> * </ol> * This type of precomputed table can be fitted multiple times. * Note that a subquery could be replaced completely by a precomputed table, or can be optimised * in-place by another precomputed table (in which case merge should call itself with the * subquery). * Alternatively, If the PrecomputedTable contains a GROUP BY, then all fields in the Query's * SELECT list and all primary keys, that are not to be replaced by the PrecomputedTable must * be in the Query's GROUP BY clause, and the fields in the GROUP BY for the tables that are * being replaced must match completely. Also, all the fields in the SELECT list of the query * must be present in the PrecomputedTable. This type of PrecomputedTable can only be fitted * once. * * @param precomputedTable the PrecomputedTable to use in the new Queries * @param query the Query object to try to fit the PrecomputedTable into * @return a Set of Query objects, one for each combination of the PrecomputedTable in the * query */ protected static Set merge(PrecomputedTable precomputedTable, Query query) { Query precompQuery = precomputedTable.getQuery(); if (!precompQuery.getGroupBy().isEmpty()) { return mergeGroupBy(precomputedTable, query); } return null; } /** * Tries to match a PrecomputedTable with a GROUP BY clause to this query. * @see merge for a description of how this works. In fact, we aren't implementing this * properly. We are imposing the restriction that there can be no more tables than in the * PrecomputedTable, therefore the first restriction mentioned above is followed automatically. * * @param precomputedTable the PrecomputedTable to use in the new Query * @param query the Query object to try to fit the PrecomputedTable into * @return a Set containing maybe a new Query object with the PrecomputedTable inserted */ protected static Set mergeGroupBy(PrecomputedTable precomputedTable, Query query) { Query precompQuery = precomputedTable.getQuery(); Set retval = new HashSet(); if (precompQuery.getGroupBy().size() != query.getGroupBy().size()) { // GROUP BY clauses are unequal in size. return retval; } if (precompQuery.getFrom().size() != query.getFrom().size()) { // FROM lists are unequal in size. return retval; } // Find the possible mappings from tables in the // PrecomputedTable query to tables in the Query Set mappings = MappingUtil.findCombinations(precompQuery.getFrom(), query.getFrom(), new AbstractTableComparator()); // Iterate through the mappings and compare combinations Iterator mappingsIter = mappings.iterator(); while (mappingsIter.hasNext()) { Map mapping = (Map) mappingsIter.next(); // Remap the aliases remapAliases(mapping); // For each constraint in the precomputed table, there // must be one in the query that is equal, or more // restrictive if (!compareConstraints(precompQuery.getWhere(), query.getWhere())) { continue; } } Iterator precompTableIter = precompQuery.getFrom().iterator(); while (precompTableIter.hasNext()) { AbstractTable precompTable = (AbstractTable) precompTableIter.next(); } return null; } /** * Compares 2 sets of AbstractConstraints * * @param set1 the first set * @param set2 the second set * @return true if every element of set1 is equal or less restrictive * than some element in set2 */ protected static boolean compareConstraints(Set set1, Set set2) { Iterator set1Iter = set1.iterator(); while (set1Iter.hasNext()) { AbstractConstraint constraint1 = (AbstractConstraint) set1Iter.next(); boolean match = false; Iterator set2Iter = set2.iterator(); while (set2Iter.hasNext()) { AbstractConstraint constraint2 = (AbstractConstraint) set2Iter.next(); int compareResult = constraint1.compare(constraint2); if (AbstractConstraint.checkComparisonImplies(compareResult)) { match = true; break; } } if (!match) { return false; } } return true; } /** * Alters all the aliases of the tables being mapped to, to equal the alias of the table mapping * to them. After this operation (where the AbstractTables of the PrecomputedTable are mapped * to the AbstractTables of the Query), the Constraint objects of the Query and the * PrecomputedTable can be directly compared with their standard compare methods. * * @param map the Map from AbstractTable objects with the source alias, to other AbstractTable * objects with the destination alias */ protected static void remapAliases(Map map) { Iterator mapIter = map.entrySet().iterator(); while (mapIter.hasNext()) { Map.Entry mapEntry = (Map.Entry) mapIter.next(); AbstractTable firstTable = (AbstractTable) mapEntry.getKey(); AbstractTable secondTable = (AbstractTable) mapEntry.getValue(); secondTable.setAlias(firstTable.getAlias()); } } /** * Compares two AbstractTables using their equalsIgnoreAlias() method */ protected static class AbstractTableComparator implements Comparator { /** * Constructor */ public AbstractTableComparator() { } /** * Compare two AbstractTables using equalsIgnoreAlias() * * @param a the first AbstractTable * @param b the second AbstractTable * @return zero if the two AbstractTables are equal */ public int compare(Object a, Object b) { return (((AbstractTable) a).equalsIgnoreAlias((AbstractTable) b) ? 0 : -1); } } }
intermine/src/java/org/intermine/sql/precompute/QueryOptimiser.java
Initial version.
intermine/src/java/org/intermine/sql/precompute/QueryOptimiser.java
Initial version.
<ide><path>ntermine/src/java/org/intermine/sql/precompute/QueryOptimiser.java <add>package org.flymine.sql.precompute; <add> <add>import org.flymine.sql.query.Query; <add>import org.flymine.sql.query.AbstractTable; <add>import org.flymine.sql.query.AbstractConstraint; <add>import org.flymine.util.MappingUtil; <add>import java.util.Comparator; <add>import java.util.HashSet; <add>import java.util.Iterator; <add>import java.util.Set; <add>import java.util.SortedMap; <add>import java.util.Map; <add>import java.util.TreeMap; <add>import java.sql.Connection; <add>import java.sql.SQLException; <add>//import org.flymine.sql.query. <add> <add>/** <add> * A static class providing the code to optimise a query, given a database (presumably with a table <add> * describing the available precomputed tables). <add> * <add> * @author Matthew Wakeling <add> * @author Andrew Varley <add> */ <add>public class QueryOptimiser <add>{ <add> /** <add> * Runs the optimiser through the query represented in the String, given the database. If <add> * anything goes wrong, then the original String is returned. <add> * <add> * @param query the query to optimise <add> * @param database the database to use to find precomputed tables <add> * @return a String representing the optimised query <add> * @throws SQLException if a database error occurs <add> */ <add> public static String optimise(String query, Connection database) throws SQLException { <add> try { <add> return optimise(new Query(query), database).getSQLString(); <add> } catch (RuntimeException e) { <add> // Query was not acceptable. <add> } <add> return query; <add> } <add> <add> /** <add> * Runs the optimiser through the query, given the database. <add> * <add> * @param query the Query to optimise <add> * @param database the database to use to find precomputed tables <add> * @return the optimised Query <add> * @throws SQLException if a database error occurs <add> */ <add> public static Query optimise(Query query, Connection database) throws SQLException { <add> BestQueryExplainer bestQuery = new BestQueryExplainer(); <add> try { <add> Set precomputedTables = null; //PrecomputedTableManager.getPrecomputedTables(database); <add> recursiveOptimise(precomputedTables, query, bestQuery); <add> } catch (BestQueryException e) { <add> // Ignore - bestQuery decided to cut short the search <add> } <add> return bestQuery.getBestQuery(); <add> } <add> <add> /** <add> * Recursively optimises the query, given the set of precomputed tables, and updates the <add> * BestQuery object with each Query found. <add> * When this method returns, either bestQuery will hold the fastest Query, or the bestQuery <add> * object will have decided to cut short proceedings. Either way, use the Query in bestQuery <add> * for best results. <add> * <add> * @param precomputedTables a Set of PrecomputedTable objects to use <add> * @param query a query to optimise <add> * @param bestQuery a BestQuery object to update with each optimised Query object <add> * @throws BestQueryException if the BestQuery decides to cut short the search <add> * @throws SQLException if a database error occurs <add> */ <add> protected static void recursiveOptimise(Set precomputedTables, Query query, <add> BestQuery bestQuery) throws BestQueryException, SQLException { <add> // This line creates a Map from PrecomputedTable objects to Sets of optimised Query objects. <add> SortedMap map = mergeMultiple(precomputedTables, query); <add> // Now we want to iterate through every optimised Query in the Map of Sets. <add> Iterator mapIter = map.entrySet().iterator(); <add> while (mapIter.hasNext()) { <add> Map.Entry mapEntry = (Map.Entry) mapIter.next(); <add> PrecomputedTable p = (PrecomputedTable) mapEntry.getKey(); <add> Set queries = (Set) mapEntry.getValue(); <add> // We should prepare a Set of PrecomputedTable objects to reoptimise the Queries in this <add> // Set. <add> Set newPrecomputedTables = map.headMap(p).keySet(); <add> // Now we want to iterator through every optimised Query in this Set. <add> Iterator queryIter = queries.iterator(); <add> while (queryIter.hasNext()) { <add> Query optimisedQuery = (Query) queryIter.next(); <add> // Now we want to call recursiveOptimise on each one. <add> recursiveOptimise(newPrecomputedTables, optimisedQuery, bestQuery); <add> // Also, we need to update BestQuery with this optimised Query. <add> bestQuery.add(optimisedQuery); <add> } <add> } <add> } <add> <add> /** <add> * Iteratively calls merge on query with all the PrecomputedTables in a Set, returning the <add> * results in a Map from the PrecomputedTable to the Set that merge returns. <add> * <add> * @param precomputedTables a Set of PrecomputedTable objects to iterate through <add> * @param query the Query to pass in to merge <add> * @return a Map from all PrecomputedTable objects that produced a non-empty Set, to the Set <add> * that was produced by merge <add> */ <add> protected static SortedMap mergeMultiple(Set precomputedTables, Query query) { <add> SortedMap result = new TreeMap(); <add> Iterator precompIter = precomputedTables.iterator(); <add> while (precompIter.hasNext()) { <add> PrecomputedTable p = (PrecomputedTable) precompIter.next(); <add> Set mergeResult = merge(p, query); <add> if (!mergeResult.isEmpty()) { <add> result.put(p, mergeResult); <add> } <add> } <add> return result; <add> } <add> <add> /** <add> * Finds all the possible uses of a given precomputed table in the query, and returns them as <add> * a Set of new Queries. <add> * If there is no scope for the PrecomputedTable to replace any part of the Query, then this <add> * method will return an empty Set. <add> * If there are two independent opportunities to insert the PrecomputedTable, then this method <add> * will return three Query objects in the Set - one with the first opportunity used, one with <add> * the second opportunity used, and one with both. <add> * A PrecomputedTable is deemed to "fit", if it <ol> <add> * <li>Contains no other tables than those present in the Query</li> <add> * <li>Contains no constraints that restrict the output more than the constraints of the Query. <add> * Constraints that equal a constraint in the Query can be missed out of the resulting <add> * Query</li> <add> * <li>Contains all the items in the SELECT list that are present in the Query's SELECT from the <add> * tables that are to be replaced</li> <add> * </ol> <add> * This type of precomputed table can be fitted multiple times. <add> * Note that a subquery could be replaced completely by a precomputed table, or can be optimised <add> * in-place by another precomputed table (in which case merge should call itself with the <add> * subquery). <add> * Alternatively, If the PrecomputedTable contains a GROUP BY, then all fields in the Query's <add> * SELECT list and all primary keys, that are not to be replaced by the PrecomputedTable must <add> * be in the Query's GROUP BY clause, and the fields in the GROUP BY for the tables that are <add> * being replaced must match completely. Also, all the fields in the SELECT list of the query <add> * must be present in the PrecomputedTable. This type of PrecomputedTable can only be fitted <add> * once. <add> * <add> * @param precomputedTable the PrecomputedTable to use in the new Queries <add> * @param query the Query object to try to fit the PrecomputedTable into <add> * @return a Set of Query objects, one for each combination of the PrecomputedTable in the <add> * query <add> */ <add> protected static Set merge(PrecomputedTable precomputedTable, Query query) { <add> Query precompQuery = precomputedTable.getQuery(); <add> if (!precompQuery.getGroupBy().isEmpty()) { <add> return mergeGroupBy(precomputedTable, query); <add> } <add> return null; <add> } <add> <add> /** <add> * Tries to match a PrecomputedTable with a GROUP BY clause to this query. <add> * @see merge for a description of how this works. In fact, we aren't implementing this <add> * properly. We are imposing the restriction that there can be no more tables than in the <add> * PrecomputedTable, therefore the first restriction mentioned above is followed automatically. <add> * <add> * @param precomputedTable the PrecomputedTable to use in the new Query <add> * @param query the Query object to try to fit the PrecomputedTable into <add> * @return a Set containing maybe a new Query object with the PrecomputedTable inserted <add> */ <add> protected static Set mergeGroupBy(PrecomputedTable precomputedTable, Query query) { <add> Query precompQuery = precomputedTable.getQuery(); <add> Set retval = new HashSet(); <add> if (precompQuery.getGroupBy().size() != query.getGroupBy().size()) { <add> // GROUP BY clauses are unequal in size. <add> return retval; <add> } <add> if (precompQuery.getFrom().size() != query.getFrom().size()) { <add> // FROM lists are unequal in size. <add> return retval; <add> } <add> <add> // Find the possible mappings from tables in the <add> // PrecomputedTable query to tables in the Query <add> Set mappings = MappingUtil.findCombinations(precompQuery.getFrom(), <add> query.getFrom(), <add> new AbstractTableComparator()); <add> <add> // Iterate through the mappings and compare combinations <add> Iterator mappingsIter = mappings.iterator(); <add> while (mappingsIter.hasNext()) { <add> Map mapping = (Map) mappingsIter.next(); <add> <add> // Remap the aliases <add> remapAliases(mapping); <add> <add> // For each constraint in the precomputed table, there <add> // must be one in the query that is equal, or more <add> // restrictive <add> <add> if (!compareConstraints(precompQuery.getWhere(), query.getWhere())) { <add> continue; <add> } <add> } <add> <add> <add> Iterator precompTableIter = precompQuery.getFrom().iterator(); <add> while (precompTableIter.hasNext()) { <add> AbstractTable precompTable = (AbstractTable) precompTableIter.next(); <add> } <add> return null; <add> } <add> <add> <add> /** <add> * Compares 2 sets of AbstractConstraints <add> * <add> * @param set1 the first set <add> * @param set2 the second set <add> * @return true if every element of set1 is equal or less restrictive <add> * than some element in set2 <add> */ <add> protected static boolean compareConstraints(Set set1, Set set2) { <add> Iterator set1Iter = set1.iterator(); <add> while (set1Iter.hasNext()) { <add> AbstractConstraint constraint1 = (AbstractConstraint) set1Iter.next(); <add> boolean match = false; <add> Iterator set2Iter = set2.iterator(); <add> while (set2Iter.hasNext()) { <add> AbstractConstraint constraint2 = (AbstractConstraint) set2Iter.next(); <add> int compareResult = constraint1.compare(constraint2); <add> if (AbstractConstraint.checkComparisonImplies(compareResult)) { <add> match = true; <add> break; <add> } <add> } <add> if (!match) { <add> return false; <add> } <add> } <add> return true; <add> } <add> <add> /** <add> * Alters all the aliases of the tables being mapped to, to equal the alias of the table mapping <add> * to them. After this operation (where the AbstractTables of the PrecomputedTable are mapped <add> * to the AbstractTables of the Query), the Constraint objects of the Query and the <add> * PrecomputedTable can be directly compared with their standard compare methods. <add> * <add> * @param map the Map from AbstractTable objects with the source alias, to other AbstractTable <add> * objects with the destination alias <add> */ <add> protected static void remapAliases(Map map) { <add> Iterator mapIter = map.entrySet().iterator(); <add> while (mapIter.hasNext()) { <add> Map.Entry mapEntry = (Map.Entry) mapIter.next(); <add> AbstractTable firstTable = (AbstractTable) mapEntry.getKey(); <add> AbstractTable secondTable = (AbstractTable) mapEntry.getValue(); <add> secondTable.setAlias(firstTable.getAlias()); <add> } <add> } <add> <add> <add> /** <add> * Compares two AbstractTables using their equalsIgnoreAlias() method <add> */ <add> protected static class AbstractTableComparator implements Comparator <add> { <add> /** <add> * Constructor <add> */ <add> public AbstractTableComparator() { <add> } <add> /** <add> * Compare two AbstractTables using equalsIgnoreAlias() <add> * <add> * @param a the first AbstractTable <add> * @param b the second AbstractTable <add> * @return zero if the two AbstractTables are equal <add> */ <add> public int compare(Object a, Object b) { <add> return (((AbstractTable) a).equalsIgnoreAlias((AbstractTable) b) ? 0 : -1); <add> } <add> } <add>}
Java
agpl-3.0
df3c7eff479a77c36ab73fb9cd044fc0d0fa8954
0
duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test
5ccf994a-2e62-11e5-9284-b827eb9e62be
hello.java
5cca345a-2e62-11e5-9284-b827eb9e62be
5ccf994a-2e62-11e5-9284-b827eb9e62be
hello.java
5ccf994a-2e62-11e5-9284-b827eb9e62be
<ide><path>ello.java <del>5cca345a-2e62-11e5-9284-b827eb9e62be <add>5ccf994a-2e62-11e5-9284-b827eb9e62be
Java
apache-2.0
5e74bbed3698e94a2b7d3e3353880bdf5f5d3205
0
saucam/incubator-parquet-mr,rdblue/parquet-mr,spena/parquet-mr,hassyma/parquet-mr,Zariel/parquet-mr,piyushnarang/parquet-mr,tsdeng/incubator-parquet-mr,nkhuyu/parquet-mr,laurentgo/parquet-mr,nitin2goyal/parquet-mr,hassyma/parquet-mr,sworisbreathing/parquet-mr,HyukjinKwon/parquet-mr,danielcweeks/incubator-parquet-mr,apache/parquet-mr,tsdeng/incubator-parquet-mr,hassyma/parquet-mr,DataDog/parquet-mr,SinghAsDev/parquet-mr,piyushnarang/parquet-mr,winningsix/incubator-parquet-mr,nitin2goyal/parquet-mr-1,SinghAsDev/parquet-mr,apache/parquet-mr,danielcweeks/incubator-parquet-mr,danielcweeks/incubator-parquet-mr,zhenxiao/parquet-mr,dongche/incubator-parquet-mr,nevillelyh/parquet-mr,HyukjinKwon/parquet-mr-1,cchang738/parquet-mr,nevillelyh/parquet-mr,laurentgo/parquet-mr,winningsix/incubator-parquet-mr,nkhuyu/parquet-mr,HyukjinKwon/parquet-mr-1,davidgin/parquet-mr,forcedotcom/incubator-parquet-mr,SaintBacchus/parquet-mr,apache/parquet-mr,sircodesalotOfTheRound/parquet-mr,MickDavies/incubator-parquet-mr,nezihyigitbasi-nflx/parquet-mr,nguyenvanthan/parquet-mr,forcedotcom/incubator-parquet-mr,dongche/incubator-parquet-mr,dongche/incubator-parquet-mr,nitin2goyal/parquet-mr-1,nitin2goyal/parquet-mr,coughman/incubator-parquet-mr,dlanza1/parquet-mr,jaltekruse/parquet-mr-1,coughman/incubator-parquet-mr,spena/parquet-mr,nezihyigitbasi-nflx/parquet-mr,davidgin/parquet-mr,nguyenvanthan/parquet-mr,nguyenvanthan/parquet-mr,davidgin/parquet-mr,jaltekruse/parquet-mr-1,spena/parquet-mr,SaintBacchus/parquet-mr,cchang738/parquet-mr,DataDog/parquet-mr,sircodesalotOfTheRound/parquet-mr,DataDog/parquet-mr,rdblue/parquet-mr,SinghAsDev/parquet-mr,SaintBacchus/parquet-mr,Zariel/parquet-mr,nevillelyh/parquet-mr,zhenxiao/parquet-mr,piyushnarang/parquet-mr,winningsix/incubator-parquet-mr,nitin2goyal/parquet-mr-1,pronix/parquet-mr,sworisbreathing/parquet-mr,laurentgo/parquet-mr,MickDavies/incubator-parquet-mr,cchang738/parquet-mr,zhenxiao/parquet-mr,HyukjinKwon/parquet-mr,dlanza1/parquet-mr,saucam/incubator-parquet-mr,HyukjinKwon/parquet-mr,HyukjinKwon/parquet-mr-1,pronix/parquet-mr,tsdeng/incubator-parquet-mr,jaltekruse/parquet-mr-1,coughman/incubator-parquet-mr,pronix/parquet-mr,sircodesalotOfTheRound/parquet-mr,rdblue/parquet-mr,nezihyigitbasi-nflx/parquet-mr,dlanza1/parquet-mr,nitin2goyal/parquet-mr,forcedotcom/incubator-parquet-mr,nkhuyu/parquet-mr,MickDavies/incubator-parquet-mr,saucam/incubator-parquet-mr,Zariel/parquet-mr,sworisbreathing/parquet-mr
/** * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package parquet.thrift; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.thrift.TBase; import parquet.hadoop.ParquetWriter; import parquet.hadoop.metadata.CompressionCodecName; import parquet.hadoop.thrift.ThriftWriteSupport; /** * To generate Parquet files using thrift * * @author Julien Le Dem * * @param <T> the type of the thrift class used to write data */ public class ThriftParquetWriter<T extends TBase<?,?>> extends ParquetWriter<T> { public ThriftParquetWriter(Path file, Class<T> thriftClass, CompressionCodecName compressionCodecName) throws IOException { super(file, new ThriftWriteSupport<T>(thriftClass), compressionCodecName, ParquetWriter.DEFAULT_BLOCK_SIZE, ParquetWriter.DEFAULT_PAGE_SIZE); } public ThriftParquetWriter(Path file, Class<T> thriftClass, CompressionCodecName compressionCodecName, int blockSize, int pageSize, boolean enableDictionary, boolean validating) throws IOException { super(file, new ThriftWriteSupport<T>(thriftClass), compressionCodecName, blockSize, pageSize, enableDictionary, validating); } public ThriftParquetWriter(Path file, Class<T> thriftClass, CompressionCodecName compressionCodecName, int blockSize, int pageSize, boolean enableDictionary, boolean validating, Configuration conf) throws IOException { super(file, new ThriftWriteSupport<T>(thriftClass), compressionCodecName, blockSize, pageSize, pageSize, enableDictionary, validating, DEFAULT_WRITER_VERSION, conf); } }
parquet-thrift/src/main/java/parquet/thrift/ThriftParquetWriter.java
/** * Copyright 2012 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package parquet.thrift; import java.io.IOException; import org.apache.hadoop.fs.Path; import org.apache.thrift.TBase; import parquet.hadoop.ParquetWriter; import parquet.hadoop.metadata.CompressionCodecName; import parquet.hadoop.thrift.ThriftWriteSupport; /** * To generate Parquet files using thrift * * @author Julien Le Dem * * @param <T> the type of the thrift class used to write data */ public class ThriftParquetWriter<T extends TBase<?,?>> extends ParquetWriter<T> { public ThriftParquetWriter(Path file, Class<T> thriftClass, CompressionCodecName compressionCodecName) throws IOException { super(file, new ThriftWriteSupport<T>(thriftClass), compressionCodecName, ParquetWriter.DEFAULT_BLOCK_SIZE, ParquetWriter.DEFAULT_PAGE_SIZE); } public ThriftParquetWriter(Path file, Class<T> thriftClass, CompressionCodecName compressionCodecName, int blockSize, int pageSize, boolean enableDictionary, boolean validating) throws IOException { super(file, new ThriftWriteSupport<T>(thriftClass), compressionCodecName, blockSize, pageSize, enableDictionary, validating); } }
Add Configuration constructor in thrift writer for #295.
parquet-thrift/src/main/java/parquet/thrift/ThriftParquetWriter.java
Add Configuration constructor in thrift writer for #295.
<ide><path>arquet-thrift/src/main/java/parquet/thrift/ThriftParquetWriter.java <ide> <ide> import java.io.IOException; <ide> <add>import org.apache.hadoop.conf.Configuration; <ide> import org.apache.hadoop.fs.Path; <ide> import org.apache.thrift.TBase; <ide> <ide> super(file, new ThriftWriteSupport<T>(thriftClass), compressionCodecName, blockSize, pageSize, enableDictionary, validating); <ide> } <ide> <add> public ThriftParquetWriter(Path file, Class<T> thriftClass, CompressionCodecName compressionCodecName, int blockSize, int pageSize, boolean enableDictionary, boolean validating, Configuration conf) throws IOException { <add> super(file, new ThriftWriteSupport<T>(thriftClass), compressionCodecName, <add> blockSize, pageSize, pageSize, enableDictionary, validating, <add> DEFAULT_WRITER_VERSION, conf); <add> } <add> <ide> }
Java
apache-2.0
1b0c89b854df3298b9fe4da5a89cccb73a875d3e
0
chasegawa/uPortal,apetro/uPortal,Mines-Albi/esup-uportal,drewwills/uPortal,Jasig/SSP-Platform,apetro/uPortal,jonathanmtran/uPortal,EdiaEducationTechnology/uPortal,mgillian/uPortal,andrewstuart/uPortal,kole9273/uPortal,ASU-Capstone/uPortal,chasegawa/uPortal,EdiaEducationTechnology/uPortal,doodelicious/uPortal,pspaude/uPortal,drewwills/uPortal,groybal/uPortal,vbonamy/esup-uportal,stalele/uPortal,cousquer/uPortal,bjagg/uPortal,andrewstuart/uPortal,apetro/uPortal,andrewstuart/uPortal,Jasig/SSP-Platform,Jasig/uPortal-start,GIP-RECIA/esup-uportal,pspaude/uPortal,EsupPortail/esup-uportal,timlevett/uPortal,jl1955/uPortal5,ASU-Capstone/uPortal,phillips1021/uPortal,GIP-RECIA/esup-uportal,ChristianMurphy/uPortal,EsupPortail/esup-uportal,mgillian/uPortal,joansmith/uPortal,MichaelVose2/uPortal,bjagg/uPortal,jhelmer-unicon/uPortal,pspaude/uPortal,timlevett/uPortal,pspaude/uPortal,doodelicious/uPortal,ChristianMurphy/uPortal,timlevett/uPortal,phillips1021/uPortal,ChristianMurphy/uPortal,ASU-Capstone/uPortal-Forked,jl1955/uPortal5,phillips1021/uPortal,jhelmer-unicon/uPortal,jameswennmacher/uPortal,EsupPortail/esup-uportal,Jasig/uPortal,apetro/uPortal,Jasig/uPortal-start,jhelmer-unicon/uPortal,Mines-Albi/esup-uportal,Jasig/uPortal,GIP-RECIA/esco-portail,groybal/uPortal,Mines-Albi/esup-uportal,doodelicious/uPortal,groybal/uPortal,stalele/uPortal,jameswennmacher/uPortal,ASU-Capstone/uPortal,joansmith/uPortal,jhelmer-unicon/uPortal,cousquer/uPortal,GIP-RECIA/esup-uportal,ASU-Capstone/uPortal-Forked,vbonamy/esup-uportal,joansmith/uPortal,groybal/uPortal,Mines-Albi/esup-uportal,GIP-RECIA/esco-portail,vertein/uPortal,andrewstuart/uPortal,chasegawa/uPortal,vertein/uPortal,jl1955/uPortal5,joansmith/uPortal,Jasig/uPortal,vertein/uPortal,Jasig/SSP-Platform,jameswennmacher/uPortal,apetro/uPortal,vertein/uPortal,jameswennmacher/uPortal,phillips1021/uPortal,doodelicious/uPortal,jl1955/uPortal5,phillips1021/uPortal,vbonamy/esup-uportal,drewwills/uPortal,stalele/uPortal,drewwills/uPortal,GIP-RECIA/esco-portail,Mines-Albi/esup-uportal,MichaelVose2/uPortal,kole9273/uPortal,EdiaEducationTechnology/uPortal,kole9273/uPortal,GIP-RECIA/esup-uportal,groybal/uPortal,jonathanmtran/uPortal,timlevett/uPortal,ASU-Capstone/uPortal-Forked,jl1955/uPortal5,mgillian/uPortal,joansmith/uPortal,GIP-RECIA/esup-uportal,chasegawa/uPortal,cousquer/uPortal,EsupPortail/esup-uportal,ASU-Capstone/uPortal-Forked,bjagg/uPortal,MichaelVose2/uPortal,jameswennmacher/uPortal,jonathanmtran/uPortal,ASU-Capstone/uPortal-Forked,andrewstuart/uPortal,kole9273/uPortal,MichaelVose2/uPortal,doodelicious/uPortal,MichaelVose2/uPortal,ASU-Capstone/uPortal,EsupPortail/esup-uportal,vbonamy/esup-uportal,chasegawa/uPortal,jhelmer-unicon/uPortal,stalele/uPortal,EdiaEducationTechnology/uPortal,vbonamy/esup-uportal,Jasig/SSP-Platform,ASU-Capstone/uPortal,stalele/uPortal,kole9273/uPortal,Jasig/SSP-Platform
/* Copyright 2001 The JA-SIG Collaborative. All rights reserved. * See license distributed with this file and * available online at http://www.uportal.org/license.html */ package org.jasig.portal.channels.permissionsmanager.commands; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import org.jasig.portal.channels.permissionsmanager.CommandFactory; import org.jasig.portal.channels.permissionsmanager.IPermissionCommand; import org.jasig.portal.channels.permissionsmanager.PermissionsSessionData; import org.jasig.portal.security.IPermission; import org.jasig.portal.security.IUpdatingPermissionManager; import org.jasig.portal.services.AuthorizationService; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.w3c.dom.Element; /** * An IPermissionCommand implementation that processes form data from the * CPermissionsManager matrix screen and records all permissions * * @author Alex Vigdor * @version $Revision$ */ public class AssignPermissions implements IPermissionCommand { private static final Log log = LogFactory.getLog(AssignPermissions.class); /** Creates new AssignPermissions */ public AssignPermissions () { } /* * expects to receive all permissions in form * key['permission//{owner}|{principal}|{activity}|{target}'] = value['INHERIT','GRANT','DENY'] */ public void execute (PermissionsSessionData session) throws Exception { if (log.isDebugEnabled()) log.debug("PermissionsManager->AssignPermissions processing"); Element root = session.XML.getDocumentElement(); Enumeration formkeys = session.runtimeData.getParameterNames(); HashMap owners = new HashMap(); while (formkeys.hasMoreElements()) { String key = (String)formkeys.nextElement(); if (key.indexOf("permission//") == 0) { PermissionHolder ph = new PermissionHolder(); String split1 = key.substring(12); ph.owner = split1.substring(0, split1.indexOf("|")); String split2 = split1.substring(split1.indexOf("|") + 1); ph.principal = split2.substring(0, split2.indexOf("|")); String split3 = split2.substring(split2.indexOf("|") + 1); ph.activity = split3.substring(0, split3.indexOf("|")); ph.target = split3.substring(split3.indexOf("|") + 1); ph.type = session.runtimeData.getParameter(key); if (log.isDebugEnabled()) log.debug("Processing " + ph.type + " permission o=" + ph.owner + " p=" + ph.principal + " a=" + ph.activity + " t=" + ph.target); if (!owners.containsKey(ph.owner)) { owners.put(ph.owner, new ArrayList()); } ((ArrayList)owners.get(ph.owner)).add(ph); } } String[] ownerkeys = (String[])owners.keySet().toArray(new String[0]); for (int i = 0; i < ownerkeys.length; i++) { String owner = ownerkeys[i]; IUpdatingPermissionManager upm = AuthorizationService.instance().newUpdatingPermissionManager(owner); ArrayList phs = (ArrayList)owners.get(owner); IPermission[] ipsd = pHolder2DeleteArray(upm, phs); if (log.isDebugEnabled()) log.debug("removing " + String.valueOf(ipsd.length) + " old permissions"); upm.removePermissions(ipsd); IPermission[] ipsa = pHolder2AddArray(upm, phs); if (log.isDebugEnabled()) log.debug("adding " + String.valueOf(ipsa.length) + " new permissions"); upm.addPermissions(ipsa); } IPermissionCommand wrapit = CommandFactory.get("Cancel"); wrapit.execute(session); } private class PermissionHolder { String owner; String principal; String activity; String target; String type; } private IPermission[] pHolder2DeleteArray (IUpdatingPermissionManager upm, ArrayList holders) { ArrayList rlist = new ArrayList(); for (int i = 0; i < holders.size(); i++) { try { PermissionHolder ph = (PermissionHolder)holders.get(i); IPermission p = upm.newPermission(null); p.setPrincipal(ph.principal); p.setActivity(ph.activity); p.setTarget(ph.target); rlist.add(p); } catch (Exception e) { log.error(e, e); } } return (IPermission[])rlist.toArray(new IPermission[0]); } private IPermission[] pHolder2AddArray (IUpdatingPermissionManager upm, ArrayList holders) { ArrayList rlist = new ArrayList(); for (int i = 0; i < holders.size(); i++) { try { PermissionHolder ph = (PermissionHolder)holders.get(i); IPermission p = upm.newPermission(null); p.setPrincipal(ph.principal); p.setActivity(ph.activity); p.setTarget(ph.target); if (ph.type.equals("GRANT")) { p.setType("GRANT"); rlist.add(p); } else if (ph.type.equals("DENY")) { p.setType("DENY"); rlist.add(p); } } catch (Exception e) { log.error(e, e); } } return (IPermission[])rlist.toArray(new IPermission[0]); } }
source/org/jasig/portal/channels/permissionsmanager/commands/AssignPermissions.java
/* Copyright 2001 The JA-SIG Collaborative. All rights reserved. * See license distributed with this file and * available online at http://www.uportal.org/license.html */ package org.jasig.portal.channels.permissionsmanager.commands; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import org.jasig.portal.channels.permissionsmanager.CommandFactory; import org.jasig.portal.channels.permissionsmanager.IPermissionCommand; import org.jasig.portal.channels.permissionsmanager.PermissionsSessionData; import org.jasig.portal.security.IPermission; import org.jasig.portal.security.IUpdatingPermissionManager; import org.jasig.portal.services.AuthorizationService; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.w3c.dom.Element; /** * An IPermissionCommand implementation that processes form data from the * CPermissionsManager matrix screen and records all permissions * * @author Alex Vigdor * @version $Revision$ */ public class AssignPermissions implements IPermissionCommand { private static final Log log = LogFactory.getLog(AssignPermissions.class); /** Creates new AssignPermissions */ public AssignPermissions () { } /* * expects to receive all permissions in form * key['permission//{owner}|{principal}|{activity}|{target}'] = value['INHERIT','GRANT','DENY'] */ public void execute (PermissionsSessionData session) throws Exception { log.debug("PermissionsManager->AssignPermissions processing"); Element root = session.XML.getDocumentElement(); Enumeration formkeys = session.runtimeData.getParameterNames(); HashMap owners = new HashMap(); while (formkeys.hasMoreElements()) { String key = (String)formkeys.nextElement(); if (key.indexOf("permission//") == 0) { PermissionHolder ph = new PermissionHolder(); String split1 = key.substring(12); ph.owner = split1.substring(0, split1.indexOf("|")); String split2 = split1.substring(split1.indexOf("|") + 1); ph.principal = split2.substring(0, split2.indexOf("|")); String split3 = split2.substring(split2.indexOf("|") + 1); ph.activity = split3.substring(0, split3.indexOf("|")); ph.target = split3.substring(split3.indexOf("|") + 1); ph.type = session.runtimeData.getParameter(key); log.debug("Processing " + ph.type + " permission o=" + ph.owner + " p=" + ph.principal + " a=" + ph.activity + " t=" + ph.target); if (!owners.containsKey(ph.owner)) { owners.put(ph.owner, new ArrayList()); } ((ArrayList)owners.get(ph.owner)).add(ph); } } String[] ownerkeys = (String[])owners.keySet().toArray(new String[0]); for (int i = 0; i < ownerkeys.length; i++) { String owner = ownerkeys[i]; IUpdatingPermissionManager upm = AuthorizationService.instance().newUpdatingPermissionManager(owner); ArrayList phs = (ArrayList)owners.get(owner); IPermission[] ipsd = pHolder2DeleteArray(upm, phs); log.debug("removing " + String.valueOf(ipsd.length) + " old permissions"); upm.removePermissions(ipsd); IPermission[] ipsa = pHolder2AddArray(upm, phs); log.debug("adding " + String.valueOf(ipsa.length) + " new permissions"); upm.addPermissions(ipsa); } IPermissionCommand wrapit = CommandFactory.get("Cancel"); wrapit.execute(session); } private class PermissionHolder { String owner; String principal; String activity; String target; String type; } private IPermission[] pHolder2DeleteArray (IUpdatingPermissionManager upm, ArrayList holders) { ArrayList rlist = new ArrayList(); for (int i = 0; i < holders.size(); i++) { try { PermissionHolder ph = (PermissionHolder)holders.get(i); IPermission p = upm.newPermission(null); p.setPrincipal(ph.principal); p.setActivity(ph.activity); p.setTarget(ph.target); rlist.add(p); } catch (Exception e) { log.error(e, e); } } return (IPermission[])rlist.toArray(new IPermission[0]); } private IPermission[] pHolder2AddArray (IUpdatingPermissionManager upm, ArrayList holders) { ArrayList rlist = new ArrayList(); for (int i = 0; i < holders.size(); i++) { try { PermissionHolder ph = (PermissionHolder)holders.get(i); IPermission p = upm.newPermission(null); p.setPrincipal(ph.principal); p.setActivity(ph.activity); p.setTarget(ph.target); if (ph.type.equals("GRANT")) { p.setType("GRANT"); rlist.add(p); } else if (ph.type.equals("DENY")) { p.setType("DENY"); rlist.add(p); } } catch (Exception e) { log.error(e); } } return (IPermission[])rlist.toArray(new IPermission[0]); } }
conditionalized logging below Error level. fixed one instance of log.error(e) to be log.error(e,e); git-svn-id: 477788cc2a8229a747c5b8073e47c1d0f6ec0604@9435 f5dbab47-78f9-eb45-b975-e544023573eb
source/org/jasig/portal/channels/permissionsmanager/commands/AssignPermissions.java
conditionalized logging below Error level. fixed one instance of log.error(e) to be log.error(e,e);
<ide><path>ource/org/jasig/portal/channels/permissionsmanager/commands/AssignPermissions.java <ide> * key['permission//{owner}|{principal}|{activity}|{target}'] = value['INHERIT','GRANT','DENY'] <ide> */ <ide> public void execute (PermissionsSessionData session) throws Exception { <del> <del> log.debug("PermissionsManager->AssignPermissions processing"); <add> if (log.isDebugEnabled()) <add> log.debug("PermissionsManager->AssignPermissions processing"); <ide> Element root = session.XML.getDocumentElement(); <ide> Enumeration formkeys = session.runtimeData.getParameterNames(); <ide> HashMap owners = new HashMap(); <ide> ph.activity = split3.substring(0, split3.indexOf("|")); <ide> ph.target = split3.substring(split3.indexOf("|") + 1); <ide> ph.type = session.runtimeData.getParameter(key); <del> log.debug("Processing " <add> if (log.isDebugEnabled()) <add> log.debug("Processing " <ide> + ph.type + " permission o=" + ph.owner + " p=" <ide> + ph.principal + " a=" + ph.activity + " t=" + <ide> ph.target); <ide> IUpdatingPermissionManager upm = AuthorizationService.instance().newUpdatingPermissionManager(owner); <ide> ArrayList phs = (ArrayList)owners.get(owner); <ide> IPermission[] ipsd = pHolder2DeleteArray(upm, phs); <del> log.debug("removing " + String.valueOf(ipsd.length) <add> if (log.isDebugEnabled()) <add> log.debug("removing " + String.valueOf(ipsd.length) <ide> + " old permissions"); <ide> upm.removePermissions(ipsd); <ide> IPermission[] ipsa = pHolder2AddArray(upm, phs); <del> log.debug("adding " + String.valueOf(ipsa.length) <add> if (log.isDebugEnabled()) <add> log.debug("adding " + String.valueOf(ipsa.length) <ide> + " new permissions"); <ide> upm.addPermissions(ipsa); <ide> } <ide> rlist.add(p); <ide> } <ide> } catch (Exception e) { <del> log.error(e); <add> log.error(e, e); <ide> } <ide> } <ide> return (IPermission[])rlist.toArray(new IPermission[0]);
Java
apache-2.0
a57aa914ee460cbbf0ba7982449a78f69c047583
0
cunningt/camel,gnodet/camel,christophd/camel,pax95/camel,tdiesler/camel,pax95/camel,mcollovati/camel,nikhilvibhav/camel,christophd/camel,tadayosi/camel,apache/camel,pax95/camel,cunningt/camel,adessaigne/camel,gnodet/camel,DariusX/camel,tadayosi/camel,nikhilvibhav/camel,DariusX/camel,nicolaferraro/camel,cunningt/camel,tdiesler/camel,mcollovati/camel,gnodet/camel,tadayosi/camel,alvinkwekel/camel,nikhilvibhav/camel,pax95/camel,alvinkwekel/camel,gnodet/camel,tdiesler/camel,tadayosi/camel,apache/camel,pmoerenhout/camel,tadayosi/camel,adessaigne/camel,cunningt/camel,nicolaferraro/camel,apache/camel,pmoerenhout/camel,christophd/camel,pax95/camel,nicolaferraro/camel,alvinkwekel/camel,adessaigne/camel,pmoerenhout/camel,pmoerenhout/camel,christophd/camel,apache/camel,adessaigne/camel,adessaigne/camel,mcollovati/camel,cunningt/camel,DariusX/camel,gnodet/camel,adessaigne/camel,alvinkwekel/camel,tdiesler/camel,pax95/camel,tdiesler/camel,nicolaferraro/camel,pmoerenhout/camel,apache/camel,DariusX/camel,nikhilvibhav/camel,apache/camel,tdiesler/camel,mcollovati/camel,pmoerenhout/camel,christophd/camel,cunningt/camel,christophd/camel,tadayosi/camel
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws2.ecs; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.support.DefaultProducer; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.URISupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.awscore.exception.AwsServiceException; import software.amazon.awssdk.services.ecs.EcsClient; import software.amazon.awssdk.services.ecs.model.CreateClusterRequest; import software.amazon.awssdk.services.ecs.model.CreateClusterResponse; import software.amazon.awssdk.services.ecs.model.DeleteClusterRequest; import software.amazon.awssdk.services.ecs.model.DeleteClusterResponse; import software.amazon.awssdk.services.ecs.model.DescribeClustersRequest; import software.amazon.awssdk.services.ecs.model.DescribeClustersResponse; import software.amazon.awssdk.services.ecs.model.ListClustersRequest; import software.amazon.awssdk.services.ecs.model.ListClustersRequest.Builder; import software.amazon.awssdk.services.ecs.model.ListClustersResponse; /** * A Producer which sends messages to the Amazon ECS Service SDK v2 * <a href="http://aws.amazon.com/ecs/">AWS ECS</a> */ public class ECS2Producer extends DefaultProducer { private static final Logger LOG = LoggerFactory.getLogger(ECS2Producer.class); private transient String ecsProducerToString; public ECS2Producer(Endpoint endpoint) { super(endpoint); } @Override public void process(Exchange exchange) throws Exception { switch (determineOperation(exchange)) { case listClusters: listClusters(getEndpoint().getEcsClient(), exchange); break; case describeCluster: describeCluster(getEndpoint().getEcsClient(), exchange); break; case createCluster: createCluster(getEndpoint().getEcsClient(), exchange); break; case deleteCluster: deleteCluster(getEndpoint().getEcsClient(), exchange); break; default: throw new IllegalArgumentException("Unsupported operation"); } } private ECS2Operations determineOperation(Exchange exchange) { ECS2Operations operation = exchange.getIn().getHeader(ECS2Constants.OPERATION, ECS2Operations.class); if (operation == null) { operation = getConfiguration().getOperation(); } return operation; } protected ECS2Configuration getConfiguration() { return getEndpoint().getConfiguration(); } @Override public String toString() { if (ecsProducerToString == null) { ecsProducerToString = "ECSProducer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]"; } return ecsProducerToString; } @Override public ECS2Endpoint getEndpoint() { return (ECS2Endpoint)super.getEndpoint(); } private void listClusters(EcsClient ecsClient, Exchange exchange) { if (getConfiguration().isPojoRequest()) { if (ObjectHelper.isNotEmpty(exchange.getIn().getBody())) { if (exchange.getIn().getBody() instanceof ListClustersRequest) { Object payload = exchange.getIn().getBody(); ListClustersResponse result; try { ListClustersRequest request = (ListClustersRequest)payload; result = ecsClient.listClusters(request); } catch (AwsServiceException ase) { LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } } else { Builder builder = ListClustersRequest.builder(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS))) { int maxRes = exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS, Integer.class); builder.maxResults(maxRes); } ListClustersResponse result; try { ListClustersRequest request = builder.build(); result = ecsClient.listClusters(request); } catch (AwsServiceException ase) { LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } private void createCluster(EcsClient ecsClient, Exchange exchange) { if (ObjectHelper.isNotEmpty(exchange.getIn().getBody())) { if (exchange.getIn().getBody() instanceof CreateClusterRequest) { Object payload = exchange.getIn().getBody(); CreateClusterResponse result; try { CreateClusterRequest request = (CreateClusterRequest)payload; result = ecsClient.createCluster(request); } catch (AwsServiceException ase) { LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } else { CreateClusterRequest.Builder builder = CreateClusterRequest.builder(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); builder.clusterName(name); } CreateClusterResponse result; try { CreateClusterRequest request = builder.build(); result = ecsClient.createCluster(request); } catch (AwsServiceException ase) { LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } private void describeCluster(EcsClient ecsClient, Exchange exchange) { if (ObjectHelper.isNotEmpty(exchange.getIn().getBody())) { if (exchange.getIn().getBody() instanceof DescribeClustersRequest) { Object payload = exchange.getIn().getBody(); DescribeClustersResponse result; try { DescribeClustersRequest request = (DescribeClustersRequest)payload; result = ecsClient.describeClusters(request); } catch (AwsServiceException ase) { LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } else { DescribeClustersRequest.Builder builder = DescribeClustersRequest.builder(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { String clusterName = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); builder.clusters(clusterName); } DescribeClustersResponse result; try { DescribeClustersRequest request = builder.build(); result = ecsClient.describeClusters(request); } catch (AwsServiceException ase) { LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } private void deleteCluster(EcsClient ecsClient, Exchange exchange) { if (ObjectHelper.isNotEmpty(exchange.getIn().getBody())) { if (exchange.getIn().getBody() instanceof DeleteClusterRequest) { Object payload = exchange.getIn().getBody(); DeleteClusterResponse result; try { DeleteClusterRequest request = (DeleteClusterRequest)payload; result = ecsClient.deleteCluster(request); } catch (AwsServiceException ase) { LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } else { DeleteClusterRequest.Builder builder = DeleteClusterRequest.builder(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); builder.cluster(name); } else { throw new IllegalArgumentException("Cluster name must be specified"); } DeleteClusterResponse result; try { DeleteClusterRequest request = builder.build(); result = ecsClient.deleteCluster(request); } catch (AwsServiceException ase) { LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } public static Message getMessageForResponse(final Exchange exchange) { return exchange.getMessage(); } }
components/camel-aws2-ecs/src/main/java/org/apache/camel/component/aws2/ecs/ECS2Producer.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws2.ecs; import org.apache.camel.Endpoint; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.support.DefaultProducer; import org.apache.camel.util.ObjectHelper; import org.apache.camel.util.URISupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.awscore.exception.AwsServiceException; import software.amazon.awssdk.services.ecs.EcsClient; import software.amazon.awssdk.services.ecs.model.CreateClusterRequest; import software.amazon.awssdk.services.ecs.model.CreateClusterResponse; import software.amazon.awssdk.services.ecs.model.DeleteClusterRequest; import software.amazon.awssdk.services.ecs.model.DeleteClusterResponse; import software.amazon.awssdk.services.ecs.model.DescribeClustersRequest; import software.amazon.awssdk.services.ecs.model.DescribeClustersResponse; import software.amazon.awssdk.services.ecs.model.ListClustersRequest; import software.amazon.awssdk.services.ecs.model.ListClustersRequest.Builder; import software.amazon.awssdk.services.ecs.model.ListClustersResponse; /** * A Producer which sends messages to the Amazon ECS Service SDK v2 * <a href="http://aws.amazon.com/ecs/">AWS ECS</a> */ public class ECS2Producer extends DefaultProducer { private static final Logger LOG = LoggerFactory.getLogger(ECS2Producer.class); private transient String ecsProducerToString; public ECS2Producer(Endpoint endpoint) { super(endpoint); } @Override public void process(Exchange exchange) throws Exception { switch (determineOperation(exchange)) { case listClusters: listClusters(getEndpoint().getEcsClient(), exchange); break; case describeCluster: describeCluster(getEndpoint().getEcsClient(), exchange); break; case createCluster: createCluster(getEndpoint().getEcsClient(), exchange); break; case deleteCluster: deleteCluster(getEndpoint().getEcsClient(), exchange); break; default: throw new IllegalArgumentException("Unsupported operation"); } } private ECS2Operations determineOperation(Exchange exchange) { ECS2Operations operation = exchange.getIn().getHeader(ECS2Constants.OPERATION, ECS2Operations.class); if (operation == null) { operation = getConfiguration().getOperation(); } return operation; } protected ECS2Configuration getConfiguration() { return getEndpoint().getConfiguration(); } @Override public String toString() { if (ecsProducerToString == null) { ecsProducerToString = "ECSProducer[" + URISupport.sanitizeUri(getEndpoint().getEndpointUri()) + "]"; } return ecsProducerToString; } @Override public ECS2Endpoint getEndpoint() { return (ECS2Endpoint)super.getEndpoint(); } private void listClusters(EcsClient ecsClient, Exchange exchange) { if (getConfiguration().isPojoRequest()) { if (ObjectHelper.isNotEmpty(exchange.getIn().getBody())) { if (exchange.getIn().getBody() instanceof ListClustersRequest) { Object payload = exchange.getIn().getBody(); ListClustersResponse result; try { ListClustersRequest request = (ListClustersRequest) payload; result = ecsClient.listClusters(request); } catch (AwsServiceException ase) { LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } } else { Builder builder = ListClustersRequest.builder(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS))) { int maxRes = exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS, Integer.class); builder.maxResults(maxRes); } ListClustersResponse result; try { ListClustersRequest request = builder.build(); result = ecsClient.listClusters(request); } catch (AwsServiceException ase) { LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } private void createCluster(EcsClient ecsClient, Exchange exchange) { if (ObjectHelper.isNotEmpty(exchange.getIn().getBody())) { if (exchange.getIn().getBody() instanceof CreateClusterRequest) { Object payload = exchange.getIn().getBody(); CreateClusterResponse result; try { CreateClusterRequest request = (CreateClusterRequest) payload; result = ecsClient.createCluster(request); } catch (AwsServiceException ase) { LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } else { CreateClusterRequest.Builder builder = CreateClusterRequest.builder(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); builder.clusterName(name); } CreateClusterResponse result; try { CreateClusterRequest request = builder.build(); result = ecsClient.createCluster(request); } catch (AwsServiceException ase) { LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } private void describeCluster(EcsClient ecsClient, Exchange exchange) { if (ObjectHelper.isNotEmpty(exchange.getIn().getBody())) { if (exchange.getIn().getBody() instanceof DescribeClustersRequest) { Object payload = exchange.getIn().getBody(); DescribeClustersResponse result; try { DescribeClustersRequest request = (DescribeClustersRequest) payload; result = ecsClient.describeClusters(request); } catch (AwsServiceException ase) { LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } else { DescribeClustersRequest.Builder builder = DescribeClustersRequest.builder(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { String clusterName = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); builder.clusters(clusterName); } DescribeClustersResponse result; try { DescribeClustersRequest request = builder.build(); result = ecsClient.describeClusters(request); } catch (AwsServiceException ase) { LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } private void deleteCluster(EcsClient ecsClient, Exchange exchange) { if (ObjectHelper.isNotEmpty(exchange.getIn().getBody())) { if (exchange.getIn().getBody() instanceof DeleteClusterRequest) { Object payload = exchange.getIn().getBody(); DeleteClusterResponse result; try { DeleteClusterRequest request = (DeleteClusterRequest) payload; result = ecsClient.deleteCluster(request); } catch (AwsServiceException ase) { LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } else { DeleteClusterRequest.Builder builder = DeleteClusterRequest.builder(); if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); builder.cluster(name); } else { throw new IllegalArgumentException("Cluster name must be specified"); } DeleteClusterResponse result; try { DeleteClusterRequest request = builder.build(); result = ecsClient.deleteCluster(request); } catch (AwsServiceException ase) { LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); throw ase; } Message message = getMessageForResponse(exchange); message.setBody(result); } } public static Message getMessageForResponse(final Exchange exchange) { return exchange.getMessage(); } }
CAMEL-14868 - Camel-AWS2-*: Where possible, give the possiblity to the end user to pass an AWS Request pojo as body, aws2-ecs fixed CS
components/camel-aws2-ecs/src/main/java/org/apache/camel/component/aws2/ecs/ECS2Producer.java
CAMEL-14868 - Camel-AWS2-*: Where possible, give the possiblity to the end user to pass an AWS Request pojo as body, aws2-ecs fixed CS
<ide><path>omponents/camel-aws2-ecs/src/main/java/org/apache/camel/component/aws2/ecs/ECS2Producer.java <ide> Object payload = exchange.getIn().getBody(); <ide> ListClustersResponse result; <ide> try { <del> ListClustersRequest request = (ListClustersRequest) payload; <add> ListClustersRequest request = (ListClustersRequest)payload; <ide> result = ecsClient.listClusters(request); <ide> } catch (AwsServiceException ase) { <ide> LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); <ide> } <ide> } <ide> } else { <del> Builder builder = ListClustersRequest.builder(); <del> if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS))) { <del> int maxRes = exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS, Integer.class); <del> builder.maxResults(maxRes); <del> } <del> ListClustersResponse result; <del> try { <del> ListClustersRequest request = builder.build(); <del> result = ecsClient.listClusters(request); <del> } catch (AwsServiceException ase) { <del> LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); <del> throw ase; <del> } <del> Message message = getMessageForResponse(exchange); <del> message.setBody(result); <add> Builder builder = ListClustersRequest.builder(); <add> if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS))) { <add> int maxRes = exchange.getIn().getHeader(ECS2Constants.MAX_RESULTS, Integer.class); <add> builder.maxResults(maxRes); <add> } <add> ListClustersResponse result; <add> try { <add> ListClustersRequest request = builder.build(); <add> result = ecsClient.listClusters(request); <add> } catch (AwsServiceException ase) { <add> LOG.trace("List Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); <add> throw ase; <add> } <add> Message message = getMessageForResponse(exchange); <add> message.setBody(result); <ide> } <ide> } <ide> <ide> Object payload = exchange.getIn().getBody(); <ide> CreateClusterResponse result; <ide> try { <del> CreateClusterRequest request = (CreateClusterRequest) payload; <add> CreateClusterRequest request = (CreateClusterRequest)payload; <ide> result = ecsClient.createCluster(request); <ide> } catch (AwsServiceException ase) { <ide> LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); <ide> Message message = getMessageForResponse(exchange); <ide> message.setBody(result); <ide> } <del> } else { <del> CreateClusterRequest.Builder builder = CreateClusterRequest.builder(); <del> if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { <del> String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); <del> builder.clusterName(name); <del> } <del> CreateClusterResponse result; <del> try { <del> CreateClusterRequest request = builder.build(); <del> result = ecsClient.createCluster(request); <del> } catch (AwsServiceException ase) { <del> LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); <del> throw ase; <del> } <del> Message message = getMessageForResponse(exchange); <del> message.setBody(result); <del> } <add> } else { <add> CreateClusterRequest.Builder builder = CreateClusterRequest.builder(); <add> if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { <add> String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); <add> builder.clusterName(name); <add> } <add> CreateClusterResponse result; <add> try { <add> CreateClusterRequest request = builder.build(); <add> result = ecsClient.createCluster(request); <add> } catch (AwsServiceException ase) { <add> LOG.trace("Create Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); <add> throw ase; <add> } <add> Message message = getMessageForResponse(exchange); <add> message.setBody(result); <add> } <ide> } <ide> <ide> private void describeCluster(EcsClient ecsClient, Exchange exchange) { <ide> Object payload = exchange.getIn().getBody(); <ide> DescribeClustersResponse result; <ide> try { <del> DescribeClustersRequest request = (DescribeClustersRequest) payload; <add> DescribeClustersRequest request = (DescribeClustersRequest)payload; <ide> result = ecsClient.describeClusters(request); <ide> } catch (AwsServiceException ase) { <ide> LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); <ide> Message message = getMessageForResponse(exchange); <ide> message.setBody(result); <ide> } <del> } else { <del> DescribeClustersRequest.Builder builder = DescribeClustersRequest.builder(); <del> if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { <del> String clusterName = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); <del> builder.clusters(clusterName); <del> } <del> DescribeClustersResponse result; <del> try { <del> DescribeClustersRequest request = builder.build(); <del> result = ecsClient.describeClusters(request); <del> } catch (AwsServiceException ase) { <del> LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); <del> throw ase; <del> } <del> Message message = getMessageForResponse(exchange); <del> message.setBody(result); <del> } <add> } else { <add> DescribeClustersRequest.Builder builder = DescribeClustersRequest.builder(); <add> if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { <add> String clusterName = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); <add> builder.clusters(clusterName); <add> } <add> DescribeClustersResponse result; <add> try { <add> DescribeClustersRequest request = builder.build(); <add> result = ecsClient.describeClusters(request); <add> } catch (AwsServiceException ase) { <add> LOG.trace("Describe Clusters command returned the error code {}", ase.awsErrorDetails().errorCode()); <add> throw ase; <add> } <add> Message message = getMessageForResponse(exchange); <add> message.setBody(result); <add> } <ide> } <ide> <ide> private void deleteCluster(EcsClient ecsClient, Exchange exchange) { <ide> Object payload = exchange.getIn().getBody(); <ide> DeleteClusterResponse result; <ide> try { <del> DeleteClusterRequest request = (DeleteClusterRequest) payload; <add> DeleteClusterRequest request = (DeleteClusterRequest)payload; <ide> result = ecsClient.deleteCluster(request); <ide> } catch (AwsServiceException ase) { <ide> LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); <ide> Message message = getMessageForResponse(exchange); <ide> message.setBody(result); <ide> } <del> } else { <del> DeleteClusterRequest.Builder builder = DeleteClusterRequest.builder(); <del> if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { <del> String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); <del> builder.cluster(name); <del> } else { <del> throw new IllegalArgumentException("Cluster name must be specified"); <del> } <del> DeleteClusterResponse result; <del> try { <del> DeleteClusterRequest request = builder.build(); <del> result = ecsClient.deleteCluster(request); <del> } catch (AwsServiceException ase) { <del> LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); <del> throw ase; <del> } <del> Message message = getMessageForResponse(exchange); <del> message.setBody(result); <del> } <add> } else { <add> DeleteClusterRequest.Builder builder = DeleteClusterRequest.builder(); <add> if (ObjectHelper.isNotEmpty(exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME))) { <add> String name = exchange.getIn().getHeader(ECS2Constants.CLUSTER_NAME, String.class); <add> builder.cluster(name); <add> } else { <add> throw new IllegalArgumentException("Cluster name must be specified"); <add> } <add> DeleteClusterResponse result; <add> try { <add> DeleteClusterRequest request = builder.build(); <add> result = ecsClient.deleteCluster(request); <add> } catch (AwsServiceException ase) { <add> LOG.trace("Delete Cluster command returned the error code {}", ase.awsErrorDetails().errorCode()); <add> throw ase; <add> } <add> Message message = getMessageForResponse(exchange); <add> message.setBody(result); <add> } <ide> } <ide> <ide> public static Message getMessageForResponse(final Exchange exchange) {
Java
lgpl-2.1
00f7b0b6b65b439da74f7ad15731ae38a6cb7932
0
nberger/jcifs,nberger/jcifs,kohsuke/jcifs,nberger/jcifs,eolivelli/jcifs,OpenDataSpace/jcifs
/* jcifs smb client library in Java * Copyright (C) 2000 "Michael B. Allen" <jcifs at samba dot org> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package jcifs.smb; import java.net.URL; import java.net.UnknownHostException; import java.net.MalformedURLException; import java.io.InputStream; import java.io.IOException; import java.io.InterruptedIOException; import jcifs.util.transport.TransportException; /** * This InputStream can read bytes from a file on an SMB file server. Offsets are 64 bits. */ public class SmbFileInputStream extends InputStream { private long fp; private int readSize, openFlags, access; private byte[] tmp = new byte[1]; private Long timeout; SmbFile file; /** * Creates an {@link java.io.InputStream} for reading bytes from a file on * an SMB server addressed by the <code>url</code> parameter. See {@link * jcifs.smb.SmbFile} for a detailed description and examples of the smb * URL syntax. * * @param url An smb URL string representing the file to read from */ public SmbFileInputStream( String url ) throws SmbException, MalformedURLException, UnknownHostException { this( new SmbFile( url )); } /** * Creates an {@link java.io.InputStream} for reading bytes from a file on * an SMB server represented by the {@link jcifs.smb.SmbFile} parameter. See * {@link jcifs.smb.SmbFile} for a detailed description and examples of * the smb URL syntax. * * @param file An <code>SmbFile</code> specifying the file to read from */ public SmbFileInputStream( SmbFile file ) throws SmbException, MalformedURLException, UnknownHostException { this( file, SmbFile.O_RDONLY ); } SmbFileInputStream( SmbFile file, int openFlags ) throws SmbException, MalformedURLException, UnknownHostException { this.file = file; this.openFlags = openFlags & 0xFFFF; this.access = (openFlags >>> 16) & 0xFFFF; if (file.type != SmbFile.TYPE_NAMED_PIPE) { file.open( openFlags, access, SmbFile.ATTR_NORMAL, 0 ); this.openFlags &= ~(SmbFile.O_CREAT | SmbFile.O_TRUNC); } else { file.connect0(); } readSize = Math.min( file.tree.session.transport.rcv_buf_size - 70, file.tree.session.transport.server.maxBufferSize - 70 ); } /** * Sets the timeout for read. */ public void setTimeout(long timeout) { timeout = new Long(timeout); } protected IOException seToIoe(SmbException se) { IOException ioe = se; Throwable root = se.getRootCause(); if (root instanceof TransportException) { ioe = (TransportException)root; root = ((TransportException)ioe).getRootCause(); } if (root instanceof InterruptedException) { ioe = new InterruptedIOException(root.getMessage()); ioe.initCause(root); } return ioe; } /** * Closes this input stream and releases any system resources associated with the stream. * * @throws IOException if a network error occurs */ public void close() throws IOException { try { file.close(); tmp = null; } catch (SmbException se) { throw seToIoe(se); } } /** * Reads a byte of data from this input stream. * * @throws IOException if a network error occurs */ public int read() throws IOException { // need oplocks to cache otherwise use BufferedInputStream if( read( tmp, 0, 1 ) == -1 ) { return -1; } return tmp[0] & 0xFF; } /** * Reads up to b.length bytes of data from this input stream into an array of bytes. * * @throws IOException if a network error occurs */ public int read( byte[] b ) throws IOException { return read( b, 0, b.length ); } /** * Reads up to len bytes of data from this input stream into an array of bytes. * * @throws IOException if a network error occurs */ public int read( byte[] b, int off, int len ) throws IOException { return readDirect(b, off, len); } public int readDirect( byte[] b, int off, int len ) throws IOException { if( len <= 0 ) { return 0; } long start = fp; if( tmp == null ) { throw new IOException( "Bad file descriptor" ); } // ensure file is open file.open( openFlags, access, SmbFile.ATTR_NORMAL, 0 ); /* * Read AndX Request / Response */ if( file.log.level >= 4 ) file.log.println( "read: fid=" + file.fid + ",off=" + off + ",len=" + len ); SmbComReadAndXResponse response = new SmbComReadAndXResponse( b, off ); if( file.type == SmbFile.TYPE_NAMED_PIPE ) { response.responseTimeout = 0; } int r, n; do { r = len > readSize ? readSize : len; if( file.log.level >= 4 ) file.log.println( "read: len=" + len + ",r=" + r + ",fp=" + fp ); try { SmbComReadAndX request = new SmbComReadAndX( file.fid, fp, r, null ); if( file.type == SmbFile.TYPE_NAMED_PIPE ) { request.minCount = request.maxCount = request.remaining = 1024; } request.timeout = timeout; file.send( request, response ); } catch( SmbException se ) { if( file.type == SmbFile.TYPE_NAMED_PIPE && se.getNtStatus() == NtStatus.NT_STATUS_PIPE_BROKEN ) { return -1; } throw seToIoe(se); } if(( n = response.dataLength ) <= 0 ) { return (int)((fp - start) > 0L ? fp - start : -1); } fp += n; len -= n; response.off += n; } while( len > 0 && n == r ); return (int)(fp - start); } /** * This stream class is unbuffered. Therefore this method will always * return 0 for streams connected to regular files. However, a * stream created from a Named Pipe this method will query the server using a * "peek named pipe" operation and return the number of available bytes * on the server. */ public int available() throws IOException { SmbNamedPipe pipe; TransPeekNamedPipe req; TransPeekNamedPipeResponse resp; if( file.type != SmbFile.TYPE_NAMED_PIPE ) { return 0; } try { pipe = (SmbNamedPipe)file; file.open(SmbFile.O_EXCL, pipe.pipeType & 0xFF0000, SmbFile.ATTR_NORMAL, 0 ); req = new TransPeekNamedPipe( file.unc, file.fid ); resp = new TransPeekNamedPipeResponse( pipe ); pipe.send( req, resp ); if( resp.status == TransPeekNamedPipeResponse.STATUS_DISCONNECTED || resp.status == TransPeekNamedPipeResponse.STATUS_SERVER_END_CLOSED ) { file.opened = false; return 0; } return resp.available; } catch (SmbException se) { throw seToIoe(se); } } /** * Skip n bytes of data on this stream. This operation will not result * in any IO with the server. Unlink <tt>InputStream</tt> value less than * the one provided will not be returned if it exceeds the end of the file * (if this is a problem let us know). */ public long skip( long n ) throws IOException { if (n > 0) { fp += n; return n; } return 0; } }
src/jcifs/smb/SmbFileInputStream.java
/* jcifs smb client library in Java * Copyright (C) 2000 "Michael B. Allen" <jcifs at samba dot org> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package jcifs.smb; import java.net.URL; import java.net.UnknownHostException; import java.net.MalformedURLException; import java.io.InputStream; import java.io.IOException; import java.io.InterruptedIOException; import jcifs.util.transport.TransportException; /** * This InputStream can read bytes from a file on an SMB file server. Offsets are 64 bits. */ public class SmbFileInputStream extends InputStream { private long fp; private int readSize, openFlags, access; private byte[] tmp = new byte[1]; SmbFile file; /** * Creates an {@link java.io.InputStream} for reading bytes from a file on * an SMB server addressed by the <code>url</code> parameter. See {@link * jcifs.smb.SmbFile} for a detailed description and examples of the smb * URL syntax. * * @param url An smb URL string representing the file to read from */ public SmbFileInputStream( String url ) throws SmbException, MalformedURLException, UnknownHostException { this( new SmbFile( url )); } /** * Creates an {@link java.io.InputStream} for reading bytes from a file on * an SMB server represented by the {@link jcifs.smb.SmbFile} parameter. See * {@link jcifs.smb.SmbFile} for a detailed description and examples of * the smb URL syntax. * * @param file An <code>SmbFile</code> specifying the file to read from */ public SmbFileInputStream( SmbFile file ) throws SmbException, MalformedURLException, UnknownHostException { this( file, SmbFile.O_RDONLY ); } SmbFileInputStream( SmbFile file, int openFlags ) throws SmbException, MalformedURLException, UnknownHostException { this.file = file; this.openFlags = openFlags & 0xFFFF; this.access = (openFlags >>> 16) & 0xFFFF; if (file.type != SmbFile.TYPE_NAMED_PIPE) { file.open( openFlags, access, SmbFile.ATTR_NORMAL, 0 ); this.openFlags &= ~(SmbFile.O_CREAT | SmbFile.O_TRUNC); } else { file.connect0(); } readSize = Math.min( file.tree.session.transport.rcv_buf_size - 70, file.tree.session.transport.server.maxBufferSize - 70 ); } protected IOException seToIoe(SmbException se) { IOException ioe = se; Throwable root = se.getRootCause(); if (root instanceof TransportException) { ioe = (TransportException)root; root = ((TransportException)ioe).getRootCause(); } if (root instanceof InterruptedException) { ioe = new InterruptedIOException(root.getMessage()); ioe.initCause(root); } return ioe; } /** * Closes this input stream and releases any system resources associated with the stream. * * @throws IOException if a network error occurs */ public void close() throws IOException { try { file.close(); tmp = null; } catch (SmbException se) { throw seToIoe(se); } } /** * Reads a byte of data from this input stream. * * @throws IOException if a network error occurs */ public int read() throws IOException { // need oplocks to cache otherwise use BufferedInputStream if( read( tmp, 0, 1 ) == -1 ) { return -1; } return tmp[0] & 0xFF; } /** * Reads up to b.length bytes of data from this input stream into an array of bytes. * * @throws IOException if a network error occurs */ public int read( byte[] b ) throws IOException { return read( b, 0, b.length ); } /** * Reads up to len bytes of data from this input stream into an array of bytes. * * @throws IOException if a network error occurs */ public int read( byte[] b, int off, int len ) throws IOException { return readDirect(b, off, len); } public int readDirect( byte[] b, int off, int len ) throws IOException { if( len <= 0 ) { return 0; } long start = fp; if( tmp == null ) { throw new IOException( "Bad file descriptor" ); } // ensure file is open file.open( openFlags, access, SmbFile.ATTR_NORMAL, 0 ); /* * Read AndX Request / Response */ if( file.log.level >= 4 ) file.log.println( "read: fid=" + file.fid + ",off=" + off + ",len=" + len ); SmbComReadAndXResponse response = new SmbComReadAndXResponse( b, off ); if( file.type == SmbFile.TYPE_NAMED_PIPE ) { response.responseTimeout = 0; } int r, n; do { r = len > readSize ? readSize : len; if( file.log.level >= 4 ) file.log.println( "read: len=" + len + ",r=" + r + ",fp=" + fp ); try { SmbComReadAndX request = new SmbComReadAndX( file.fid, fp, r, null ); if( file.type == SmbFile.TYPE_NAMED_PIPE ) { request.minCount = request.maxCount = request.remaining = 1024; } file.send( request, response ); } catch( SmbException se ) { if( file.type == SmbFile.TYPE_NAMED_PIPE && se.getNtStatus() == NtStatus.NT_STATUS_PIPE_BROKEN ) { return -1; } throw seToIoe(se); } if(( n = response.dataLength ) <= 0 ) { return (int)((fp - start) > 0L ? fp - start : -1); } fp += n; len -= n; response.off += n; } while( len > 0 && n == r ); return (int)(fp - start); } /** * This stream class is unbuffered. Therefore this method will always * return 0 for streams connected to regular files. However, a * stream created from a Named Pipe this method will query the server using a * "peek named pipe" operation and return the number of available bytes * on the server. */ public int available() throws IOException { SmbNamedPipe pipe; TransPeekNamedPipe req; TransPeekNamedPipeResponse resp; if( file.type != SmbFile.TYPE_NAMED_PIPE ) { return 0; } try { pipe = (SmbNamedPipe)file; file.open(SmbFile.O_EXCL, pipe.pipeType & 0xFF0000, SmbFile.ATTR_NORMAL, 0 ); req = new TransPeekNamedPipe( file.unc, file.fid ); resp = new TransPeekNamedPipeResponse( pipe ); pipe.send( req, resp ); if( resp.status == TransPeekNamedPipeResponse.STATUS_DISCONNECTED || resp.status == TransPeekNamedPipeResponse.STATUS_SERVER_END_CLOSED ) { file.opened = false; return 0; } return resp.available; } catch (SmbException se) { throw seToIoe(se); } } /** * Skip n bytes of data on this stream. This operation will not result * in any IO with the server. Unlink <tt>InputStream</tt> value less than * the one provided will not be returned if it exceeds the end of the file * (if this is a problem let us know). */ public long skip( long n ) throws IOException { if (n > 0) { fp += n; return n; } return 0; } }
added a mechanism to override the timeout
src/jcifs/smb/SmbFileInputStream.java
added a mechanism to override the timeout
<ide><path>rc/jcifs/smb/SmbFileInputStream.java <ide> private long fp; <ide> private int readSize, openFlags, access; <ide> private byte[] tmp = new byte[1]; <add> private Long timeout; <ide> <ide> SmbFile file; <ide> <ide> file.tree.session.transport.server.maxBufferSize - 70 ); <ide> } <ide> <add> /** <add> * Sets the timeout for read. <add> */ <add> public void setTimeout(long timeout) { <add> timeout = new Long(timeout); <add> } <add> <ide> protected IOException seToIoe(SmbException se) { <ide> IOException ioe = se; <ide> Throwable root = se.getRootCause(); <ide> if( file.type == SmbFile.TYPE_NAMED_PIPE ) { <ide> request.minCount = request.maxCount = request.remaining = 1024; <ide> } <add> request.timeout = timeout; <ide> file.send( request, response ); <ide> } catch( SmbException se ) { <ide> if( file.type == SmbFile.TYPE_NAMED_PIPE &&
Java
apache-2.0
ffc5289d8668373c865feaec5f8ef71ae4728c17
0
bf8086/alluxio,yuluo-ding/alluxio,bf8086/alluxio,uronce-cc/alluxio,jswudi/alluxio,riversand963/alluxio,madanadit/alluxio,maboelhassan/alluxio,uronce-cc/alluxio,wwjiang007/alluxio,Alluxio/alluxio,apc999/alluxio,maobaolong/alluxio,apc999/alluxio,WilliamZapata/alluxio,bf8086/alluxio,jswudi/alluxio,apc999/alluxio,madanadit/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,Alluxio/alluxio,PasaLab/tachyon,WilliamZapata/alluxio,maobaolong/alluxio,madanadit/alluxio,apc999/alluxio,yuluo-ding/alluxio,aaudiber/alluxio,apc999/alluxio,WilliamZapata/alluxio,Alluxio/alluxio,aaudiber/alluxio,ShailShah/alluxio,uronce-cc/alluxio,calvinjia/tachyon,maobaolong/alluxio,jsimsa/alluxio,yuluo-ding/alluxio,EvilMcJerkface/alluxio,jswudi/alluxio,bf8086/alluxio,bf8086/alluxio,Reidddddd/mo-alluxio,yuluo-ding/alluxio,ChangerYoung/alluxio,yuluo-ding/alluxio,riversand963/alluxio,EvilMcJerkface/alluxio,riversand963/alluxio,PasaLab/tachyon,riversand963/alluxio,wwjiang007/alluxio,riversand963/alluxio,maobaolong/alluxio,apc999/alluxio,WilliamZapata/alluxio,jswudi/alluxio,wwjiang007/alluxio,Reidddddd/alluxio,bf8086/alluxio,maboelhassan/alluxio,Reidddddd/alluxio,calvinjia/tachyon,PasaLab/tachyon,maboelhassan/alluxio,EvilMcJerkface/alluxio,jsimsa/alluxio,EvilMcJerkface/alluxio,calvinjia/tachyon,calvinjia/tachyon,PasaLab/tachyon,ShailShah/alluxio,riversand963/alluxio,wwjiang007/alluxio,ChangerYoung/alluxio,jsimsa/alluxio,Reidddddd/mo-alluxio,EvilMcJerkface/alluxio,maobaolong/alluxio,aaudiber/alluxio,Alluxio/alluxio,wwjiang007/alluxio,wwjiang007/alluxio,Reidddddd/mo-alluxio,maobaolong/alluxio,Alluxio/alluxio,jswudi/alluxio,maobaolong/alluxio,Reidddddd/mo-alluxio,Alluxio/alluxio,madanadit/alluxio,EvilMcJerkface/alluxio,wwjiang007/alluxio,aaudiber/alluxio,Reidddddd/mo-alluxio,maobaolong/alluxio,maobaolong/alluxio,uronce-cc/alluxio,jswudi/alluxio,PasaLab/tachyon,madanadit/alluxio,maboelhassan/alluxio,wwjiang007/alluxio,calvinjia/tachyon,wwjiang007/alluxio,Alluxio/alluxio,jsimsa/alluxio,ShailShah/alluxio,jsimsa/alluxio,maboelhassan/alluxio,uronce-cc/alluxio,EvilMcJerkface/alluxio,ChangerYoung/alluxio,bf8086/alluxio,ShailShah/alluxio,WilliamZapata/alluxio,maboelhassan/alluxio,apc999/alluxio,uronce-cc/alluxio,maboelhassan/alluxio,Alluxio/alluxio,calvinjia/tachyon,Reidddddd/mo-alluxio,PasaLab/tachyon,PasaLab/tachyon,Reidddddd/alluxio,ChangerYoung/alluxio,ShailShah/alluxio,madanadit/alluxio,Reidddddd/alluxio,ShailShah/alluxio,Reidddddd/alluxio,aaudiber/alluxio,aaudiber/alluxio,calvinjia/tachyon,ChangerYoung/alluxio,Reidddddd/alluxio,Alluxio/alluxio,maobaolong/alluxio,WilliamZapata/alluxio,madanadit/alluxio,ChangerYoung/alluxio,wwjiang007/alluxio,yuluo-ding/alluxio,bf8086/alluxio,madanadit/alluxio,aaudiber/alluxio,jsimsa/alluxio,Reidddddd/alluxio,Alluxio/alluxio
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.client.lineage; import alluxio.AlluxioURI; import alluxio.Configuration; import alluxio.Constants; import alluxio.annotation.PublicApi; import alluxio.client.lineage.options.CreateLineageOptions; import alluxio.client.lineage.options.DeleteLineageOptions; import alluxio.client.lineage.options.GetLineageInfoListOptions; import alluxio.exception.AlluxioException; import alluxio.exception.FileDoesNotExistException; import alluxio.exception.LineageDeletionException; import alluxio.exception.LineageDoesNotExistException; import alluxio.job.Job; import alluxio.wire.LineageInfo; import java.io.IOException; import java.util.List; import javax.annotation.concurrent.ThreadSafe; /** * A {@link LineageClient} implementation. This class does not access the master client directly * but goes through the implementations provided in {@link AbstractLineageClient}. */ @PublicApi @ThreadSafe public final class AlluxioLineage extends AbstractLineageClient { private static AlluxioLineage sAlluxioLineage; /** * @return the current lineage for Alluxio */ public static synchronized AlluxioLineage get() { return get(LineageContext.INSTANCE); } /** * @param context lineage context * @return the current lineage for Alluxio */ public static synchronized AlluxioLineage get(LineageContext context) { if (sAlluxioLineage == null) { if (!Configuration.getBoolean(Constants.USER_LINEAGE_ENABLED)) { throw new IllegalStateException("Lineage is not enabled in the configuration."); } sAlluxioLineage = new AlluxioLineage(context); } return sAlluxioLineage; } protected AlluxioLineage(LineageContext context) { super(context); } /** * Convenience method for {@link #createLineage(List, List, Job, CreateLineageOptions)} with * default options. * * @param inputFiles the files that the job depends on * @param outputFiles the files that the job outputs * @param job the job that takes the listed input file and computes the output file * @return the lineage id * @throws FileDoesNotExistException an input file does not exist in Alluxio storage, nor is added * as an output file of an existing lineage * @throws AlluxioException if an unexpected alluxio error occurs * @throws IOException if the master cannot create the lineage */ public long createLineage(List<AlluxioURI> inputFiles, List<AlluxioURI> outputFiles, Job job) throws FileDoesNotExistException, AlluxioException, IOException { return createLineage(inputFiles, outputFiles, job, CreateLineageOptions.defaults()); } /** * Convenience method for {@link #deleteLineage(long, DeleteLineageOptions)} with default options. * * @param lineageId the id of the lineage * @return true if the lineage deletion is successful, false otherwise * @throws IOException if the master cannot delete the lineage * @throws LineageDoesNotExistException if the lineage does not exist * @throws LineageDeletionException if the deletion is cascade but the lineage has children * @throws AlluxioException if an unexpected alluxio error occurs */ public boolean deleteLineage(long lineageId) throws IOException, LineageDoesNotExistException, LineageDeletionException, AlluxioException { return deleteLineage(lineageId, DeleteLineageOptions.defaults()); } /** * Convenience method for {@link #getLineageInfoList(GetLineageInfoListOptions)} with default * options. * * @return the information about lineages * @throws IOException if the master cannot list the lineage info */ public List<LineageInfo> getLineageInfoList() throws IOException { return getLineageInfoList(GetLineageInfoListOptions.defaults()); } }
core/client/src/main/java/alluxio/client/lineage/AlluxioLineage.java
/* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.client.lineage; import alluxio.AlluxioURI; import alluxio.Configuration; import alluxio.Constants; import alluxio.annotation.PublicApi; import alluxio.client.lineage.options.CreateLineageOptions; import alluxio.client.lineage.options.DeleteLineageOptions; import alluxio.client.lineage.options.GetLineageInfoListOptions; import alluxio.exception.AlluxioException; import alluxio.exception.FileDoesNotExistException; import alluxio.exception.LineageDeletionException; import alluxio.exception.LineageDoesNotExistException; import alluxio.job.Job; import alluxio.wire.LineageInfo; import java.io.IOException; import java.util.List; import javax.annotation.concurrent.ThreadSafe; /** * A {@link LineageClient} implementation. This class does not access the master client directly * but goes through the implementations provided in {@link AbstractLineageClient}. */ @PublicApi @ThreadSafe public final class AlluxioLineage extends AbstractLineageClient { private static AlluxioLineage sAlluxioLineage; /** * @return the current lineage for Alluxio */ public static synchronized AlluxioLineage get() { return get(LineageContext.INSTANCE); } /** * @return the current lineage for Alluxio */ public static synchronized AlluxioLineage get(LineageContext context) { if (sAlluxioLineage == null) { if (!Configuration.getBoolean(Constants.USER_LINEAGE_ENABLED)) { throw new IllegalStateException("Lineage is not enabled in the configuration."); } sAlluxioLineage = new AlluxioLineage(context); } return sAlluxioLineage; } protected AlluxioLineage(LineageContext context) { super(context); } /** * Convenience method for {@link #createLineage(List, List, Job, CreateLineageOptions)} with * default options. * * @param inputFiles the files that the job depends on * @param outputFiles the files that the job outputs * @param job the job that takes the listed input file and computes the output file * @return the lineage id * @throws FileDoesNotExistException an input file does not exist in Alluxio storage, nor is added * as an output file of an existing lineage * @throws AlluxioException if an unexpected alluxio error occurs * @throws IOException if the master cannot create the lineage */ public long createLineage(List<AlluxioURI> inputFiles, List<AlluxioURI> outputFiles, Job job) throws FileDoesNotExistException, AlluxioException, IOException { return createLineage(inputFiles, outputFiles, job, CreateLineageOptions.defaults()); } /** * Convenience method for {@link #deleteLineage(long, DeleteLineageOptions)} with default options. * * @param lineageId the id of the lineage * @return true if the lineage deletion is successful, false otherwise * @throws IOException if the master cannot delete the lineage * @throws LineageDoesNotExistException if the lineage does not exist * @throws LineageDeletionException if the deletion is cascade but the lineage has children * @throws AlluxioException if an unexpected alluxio error occurs */ public boolean deleteLineage(long lineageId) throws IOException, LineageDoesNotExistException, LineageDeletionException, AlluxioException { return deleteLineage(lineageId, DeleteLineageOptions.defaults()); } /** * Convenience method for {@link #getLineageInfoList(GetLineageInfoListOptions)} with default * options. * * @return the information about lineages * @throws IOException if the master cannot list the lineage info */ public List<LineageInfo> getLineageInfoList() throws IOException { return getLineageInfoList(GetLineageInfoListOptions.defaults()); } }
Add missing @param javadoc
core/client/src/main/java/alluxio/client/lineage/AlluxioLineage.java
Add missing @param javadoc
<ide><path>ore/client/src/main/java/alluxio/client/lineage/AlluxioLineage.java <ide> } <ide> <ide> /** <add> * @param context lineage context <ide> * @return the current lineage for Alluxio <ide> */ <ide> public static synchronized AlluxioLineage get(LineageContext context) {
Java
apache-2.0
2a76512db9d97576b5cb99873242967a6032a8b4
0
apache/commons-math,sdinot/hipparchus,sdinot/hipparchus,apache/commons-math,sdinot/hipparchus,sdinot/hipparchus,apache/commons-math,Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus,apache/commons-math,Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math.analysis.function; import org.apache.commons.math.analysis.UnivariateRealFunction; import org.apache.commons.math.exception.NotStrictlyPositiveException; import org.apache.commons.math.util.FastMath; /** * <a href="http://en.wikipedia.org/wiki/Generalised_logistic_function"> * Generalised logistic</a> function. * * @version $Revision$ $Date$ * @since 3.0 */ public class Logistic implements UnivariateRealFunction { /** Lower asymptote. */ private final double a; /** Upper asymptote. */ private final double k; /** Growth rate. */ private final double b; /** Parameter that affects near which asymptote maximum growth occurs. */ private final double n; /** Parameter that affects the position of the curve along the ordinate axis. */ private final double q; /** Abscissa of maximum growth. */ private final double m; /** * @param k If {@code b > 0}, value of the function for x going towards +&infin;. * If {@code b < 0}, value of the function for x going towards -&infin;. * @param m Abscissa of maximum growth. * @param b Growth rate. * @param q Parameter that affects the position of the curve along the * ordinate axis. * @param a If {@code b > 0}, value of the function for x going towards -&infin;. * If {@code b < 0}, value of the function for x going towards +&infin;. * @param n Parameter that affects near which asymptote the maximum * growth occurs. * @throws NotStrictlyPositiveException if {@code n <= 0}. */ public Logistic(double k, double m, double b, double q, double a, double n) { if (n <= 0) { throw new NotStrictlyPositiveException(n); } this.k = k; this.m = m; this.b = b; this.q = q; this.a = a; this.n = n; } /** {@inheritDoc} */ public double value(double x) { return a + (k - a) / FastMath.pow(1 + q * FastMath.exp(b * (m - x)), 1 / n); } }
src/main/java/org/apache/commons/math/analysis/function/Logistic.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.math.analysis.function; import org.apache.commons.math.analysis.UnivariateRealFunction; import org.apache.commons.math.exception.NotStrictlyPositiveException; import org.apache.commons.math.util.FastMath; /** * <a href="http://en.wikipedia.org/wiki/Generalised_logistic_function"> * Generalised logistic</a> function. * * @version $Revision$ $Date$ * @since 3.0 */ public class Logistic implements UnivariateRealFunction { /** Lower asymptote. */ private final double a; /** Upper asymptote. */ private final double k; /** Growth rate. */ private final double b; /** Parameter that affects near which asymptote maximum growth occurs. */ private final double n; /** Parameter that affects the position of the curve along the ordinate axis. */ private final double q; /** Abscissa of maximum growth. */ private final double m; /** * @param k Upper asymptote. * @param m Abscissa of maximum growth. * @param b Growth rate. * @param q Parameter that affects the position of the curve along the * ordinate axis. * @param a Lower asymptote. * @param n Parameter that affects near which asymptote the maximum * growth occurs. * @throws NotStrictlyPositiveException if {@code n <= 0}. */ public Logistic(double k, double m, double b, double q, double a, double n) { if (n <= 0) { throw new NotStrictlyPositiveException(n); } this.k = k; this.m = m; this.b = b; this.q = q; this.a = a; this.n = n; } /** {@inheritDoc} */ public double value(double x) { return a + (k - a) / FastMath.pow((1 + q * FastMath.exp(b * (m - x))), 1 / n); } }
Javadoc update. git-svn-id: 80d496c472b8b763a5e941dba212da9bf48aeceb@1065614 13f79535-47bb-0310-9956-ffa450edef68
src/main/java/org/apache/commons/math/analysis/function/Logistic.java
Javadoc update.
<ide><path>rc/main/java/org/apache/commons/math/analysis/function/Logistic.java <ide> private final double m; <ide> <ide> /** <del> * @param k Upper asymptote. <add> * @param k If {@code b > 0}, value of the function for x going towards +&infin;. <add> * If {@code b < 0}, value of the function for x going towards -&infin;. <ide> * @param m Abscissa of maximum growth. <ide> * @param b Growth rate. <ide> * @param q Parameter that affects the position of the curve along the <ide> * ordinate axis. <del> * @param a Lower asymptote. <add> * @param a If {@code b > 0}, value of the function for x going towards -&infin;. <add> * If {@code b < 0}, value of the function for x going towards +&infin;. <ide> * @param n Parameter that affects near which asymptote the maximum <ide> * growth occurs. <ide> * @throws NotStrictlyPositiveException if {@code n <= 0}. <ide> <ide> /** {@inheritDoc} */ <ide> public double value(double x) { <del> return a + (k - a) / FastMath.pow((1 + q * FastMath.exp(b * (m - x))), 1 / n); <add> return a + (k - a) / FastMath.pow(1 + q * FastMath.exp(b * (m - x)), 1 / n); <ide> } <ide> }
Java
apache-2.0
error: pathspec 'org.jenetics.xml/src/test/java/org/jenetics/xml/Usage.java' did not match any file(s) known to git
4b3db5e05a5858c96a9d61235e306964ed823b5e
1
jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics,jenetics/jenetics
/* * Java Genetic Algorithm Library (@__identifier__@). * Copyright (c) @__year__@ Franz Wilhelmstötter * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Author: * Franz Wilhelmstötter ([email protected]) */ package org.jenetics.xml; import java.util.List; import java.util.stream.Collectors; import org.jenetics.BitChromosome; import org.jenetics.BitGene; import org.jenetics.EnumGene; import org.jenetics.Genotype; import org.jenetics.engine.Codec; import org.jenetics.engine.Engine; import org.jenetics.engine.EvolutionResult; import org.jenetics.engine.Problem; /** * @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a> * @version !__version__! * @since !__version__! */ public class Usage { public static void main(final String[] args) throws Exception { final Problem<BitChromosome, BitGene, Integer> count = Problem.of( BitChromosome::bitCount, Codec.of( Genotype.of(BitChromosome.of(10)), gt -> gt.getChromosome().as(BitChromosome.class) ) ); final Engine<BitGene, Integer> engine = Engine.builder(count).build(); final EvolutionResult<BitGene, Integer> result = engine.stream() .limit(10) .collect(EvolutionResult.toBestEvolutionResult()); final List<Genotype<BitGene>> genotypes = result.getGenotypes(); Writers.write(System.out, genotypes, Writers.BitChromosome.writer()); Readers.read(System.in, Readers.BitChromosome.reader()); engine.stream(genotypes); } }
org.jenetics.xml/src/test/java/org/jenetics/xml/Usage.java
XML writer/reader usage example.
org.jenetics.xml/src/test/java/org/jenetics/xml/Usage.java
XML writer/reader usage example.
<ide><path>rg.jenetics.xml/src/test/java/org/jenetics/xml/Usage.java <add>/* <add> * Java Genetic Algorithm Library (@__identifier__@). <add> * Copyright (c) @__year__@ Franz Wilhelmstötter <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> * <add> * Author: <add> * Franz Wilhelmstötter ([email protected]) <add> */ <add>package org.jenetics.xml; <add> <add>import java.util.List; <add>import java.util.stream.Collectors; <add> <add>import org.jenetics.BitChromosome; <add>import org.jenetics.BitGene; <add>import org.jenetics.EnumGene; <add>import org.jenetics.Genotype; <add>import org.jenetics.engine.Codec; <add>import org.jenetics.engine.Engine; <add>import org.jenetics.engine.EvolutionResult; <add>import org.jenetics.engine.Problem; <add> <add>/** <add> * @author <a href="mailto:[email protected]">Franz Wilhelmstötter</a> <add> * @version !__version__! <add> * @since !__version__! <add> */ <add>public class Usage { <add> <add> public static void main(final String[] args) throws Exception { <add> final Problem<BitChromosome, BitGene, Integer> count = Problem.of( <add> BitChromosome::bitCount, <add> Codec.of( <add> Genotype.of(BitChromosome.of(10)), <add> gt -> gt.getChromosome().as(BitChromosome.class) <add> ) <add> ); <add> <add> final Engine<BitGene, Integer> engine = Engine.builder(count).build(); <add> <add> final EvolutionResult<BitGene, Integer> result = engine.stream() <add> .limit(10) <add> .collect(EvolutionResult.toBestEvolutionResult()); <add> <add> final List<Genotype<BitGene>> genotypes = result.getGenotypes(); <add> <add> Writers.write(System.out, genotypes, Writers.BitChromosome.writer()); <add> Readers.read(System.in, Readers.BitChromosome.reader()); <add> <add> engine.stream(genotypes); <add> } <add> <add>}
Java
apache-2.0
fdb9de1445c92e3b80f64c57215713ef9ccb689b
0
spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework
/* * Copyright 2002-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.core.type.classreading; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import org.springframework.asm.ClassReader; import org.springframework.core.io.Resource; import org.springframework.core.type.AnnotationMetadata; import org.springframework.core.type.ClassMetadata; /** * {@link MetadataReader} implementation based on an ASM * {@link org.springframework.asm.ClassReader}. * * <p>Package-visible in order to allow for repackaging the ASM library * without effect on users of the <code>core.type</code> package. * * @author Juergen Hoeller * @author Costin Leau * @since 2.5 */ final class SimpleMetadataReader implements MetadataReader { private final Resource resource; private final ClassMetadata classMetadata; private final AnnotationMetadata annotationMetadata; SimpleMetadataReader(Resource resource, ClassLoader classLoader) throws IOException { InputStream is = new BufferedInputStream(resource.getInputStream()); ClassReader classReader = null; try { classReader = new ClassReader(is); } finally { is.close(); } AnnotationMetadataReadingVisitor visitor = new AnnotationMetadataReadingVisitor(classLoader); classReader.accept(visitor, true); this.annotationMetadata = visitor; // (since AnnotationMetadataReader extends ClassMetadataReadingVisitor) this.classMetadata = visitor; this.resource = resource; } public Resource getResource() { return this.resource; } public ClassMetadata getClassMetadata() { return this.classMetadata; } public AnnotationMetadata getAnnotationMetadata() { return this.annotationMetadata; } }
spring-core/src/main/java/org/springframework/core/type/classreading/SimpleMetadataReader.java
/* * Copyright 2002-2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.core.type.classreading; import java.io.IOException; import java.io.InputStream; import org.springframework.asm.ClassReader; import org.springframework.core.io.Resource; import org.springframework.core.type.AnnotationMetadata; import org.springframework.core.type.ClassMetadata; /** * {@link MetadataReader} implementation based on an ASM * {@link org.springframework.asm.ClassReader}. * * <p>Package-visible in order to allow for repackaging the ASM library * without effect on users of the <code>core.type</code> package. * * @author Juergen Hoeller * @author Costin Leau * @since 2.5 */ final class SimpleMetadataReader implements MetadataReader { private final Resource resource; private final ClassMetadata classMetadata; private final AnnotationMetadata annotationMetadata; SimpleMetadataReader(Resource resource, ClassLoader classLoader) throws IOException { InputStream is = resource.getInputStream(); ClassReader classReader = null; try { classReader = new ClassReader(is); } finally { is.close(); } AnnotationMetadataReadingVisitor visitor = new AnnotationMetadataReadingVisitor(classLoader); classReader.accept(visitor, true); this.annotationMetadata = visitor; // (since AnnotationMetadataReader extends ClassMetadataReadingVisitor) this.classMetadata = visitor; this.resource = resource; } public Resource getResource() { return this.resource; } public ClassMetadata getClassMetadata() { return this.classMetadata; } public AnnotationMetadata getAnnotationMetadata() { return this.annotationMetadata; } }
Use BufferedInputStream in SimpleMetaDataReader to double performance Issue: SPR-9528
spring-core/src/main/java/org/springframework/core/type/classreading/SimpleMetadataReader.java
Use BufferedInputStream in SimpleMetaDataReader to double performance
<ide><path>pring-core/src/main/java/org/springframework/core/type/classreading/SimpleMetadataReader.java <ide> /* <del> * Copyright 2002-2009 the original author or authors. <add> * Copyright 2002-2012 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> package org.springframework.core.type.classreading; <ide> <add>import java.io.BufferedInputStream; <ide> import java.io.IOException; <ide> import java.io.InputStream; <ide> <ide> final class SimpleMetadataReader implements MetadataReader { <ide> <ide> private final Resource resource; <add> <ide> private final ClassMetadata classMetadata; <add> <ide> private final AnnotationMetadata annotationMetadata; <ide> <add> <ide> SimpleMetadataReader(Resource resource, ClassLoader classLoader) throws IOException { <del> InputStream is = resource.getInputStream(); <add> InputStream is = new BufferedInputStream(resource.getInputStream()); <ide> ClassReader classReader = null; <ide> try { <ide> classReader = new ClassReader(is); <del> } finally { <add> } <add> finally { <ide> is.close(); <ide> } <ide> <ide> AnnotationMetadataReadingVisitor visitor = new AnnotationMetadataReadingVisitor(classLoader); <ide> classReader.accept(visitor, true); <del> <add> <ide> this.annotationMetadata = visitor; <ide> // (since AnnotationMetadataReader extends ClassMetadataReadingVisitor) <ide> this.classMetadata = visitor; <ide> public AnnotationMetadata getAnnotationMetadata() { <ide> return this.annotationMetadata; <ide> } <add> <ide> }
Java
lgpl-2.1
07154b361813f180a1573f03e3230b7505e57778
0
Cazsius/Spice-of-Life-Carrot-Edition
package com.cazsius.solcarrot.handler; import com.cazsius.solcarrot.capability.FoodCapability; import net.minecraft.init.SoundEvents; import net.minecraft.util.EnumParticleTypes; import net.minecraft.util.text.TextComponentTranslation; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import squeek.applecore.api.food.FoodEvent; public class HandlerFoodTracker { @SubscribeEvent public void onFoodEaten(FoodEvent.FoodEaten event) { FoodCapability food = event.player.getCapability(FoodCapability.FOOD_CAPABILITY, null); food.addFood(event.food.getItem()); float storedHP = event.player.getMaxHealth(); MaxHealthHandler.updateFoodHPModifier(event.player); if (storedHP < event.player.getMaxHealth()) { event.player.playSound(SoundEvents.ENTITY_PLAYER_LEVELUP, 1.0F, 1.0F); event.player.world.spawnParticle(EnumParticleTypes.VILLAGER_HAPPY, event.player.posX, event.player.posY + 2, event.player.posZ, event.player.motionX, event.player.motionY, event.player.motionZ, new int[0]); if (event.player.world.isRemote) { int hpm = HandlerConfiguration.getHeartsPerMilestone(); TextComponentTranslation milestoneMessage = new TextComponentTranslation( "What a unique flavor! You've gained " + (hpm == 1 ? "a heart!" : hpm + " hearts!")); event.player.sendMessage(milestoneMessage); int foodsEaten = food.getCount(); int milestone = 0; int[] milestoneArray = HandlerConfiguration.getMilestoneArray(); while (milestone < milestoneArray.length && foodsEaten + 1 > milestoneArray[milestone]) { milestone++; } if (milestone == milestoneArray.length) { milestoneMessage = new TextComponentTranslation( "Your desire to seek unique foods is finally satisfied."); } else { milestoneMessage = new TextComponentTranslation( "Sample another " + (milestoneArray[milestone] - milestoneArray[milestone - 1]) + " varieties of food to gain another " + (hpm == 1 ? "heart!" : hpm + " hearts!")); } event.player.sendMessage(milestoneMessage); } } } }
src/main/java/com/cazsius/solcarrot/handler/HandlerFoodTracker.java
package com.cazsius.solcarrot.handler; import com.cazsius.solcarrot.capability.FoodCapability; import net.minecraft.init.SoundEvents; import net.minecraft.util.EnumParticleTypes; import net.minecraft.util.text.TextComponentTranslation; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import squeek.applecore.api.food.FoodEvent; public class HandlerFoodTracker { @SubscribeEvent public void onFoodEaten(FoodEvent.FoodEaten event) { FoodCapability food = event.player.getCapability(FoodCapability.FOOD_CAPABILITY, null); food.addFood(event.food.getItem()); float storedHP = event.player.getMaxHealth(); MaxHealthHandler.updateFoodHPModifier(event.player); if (storedHP < event.player.getMaxHealth()) { event.player.playSound(SoundEvents.ENTITY_PLAYER_LEVELUP, 1.0F, 1.0F); event.player.world.spawnParticle(EnumParticleTypes.VILLAGER_HAPPY, event.player.posX, event.player.posY + 2, event.player.posZ, event.player.motionX, event.player.motionY, event.player.motionZ, new int[0]); if(event.player.world.isRemote) { int hpm = HandlerConfiguration.getHeartsPerMilestone(); TextComponentTranslation milestoneMessage = new TextComponentTranslation("What a unique flavor! You've gained " +(hpm==1?"a heart!":hpm + " hearts!")); event.player.sendMessage(milestoneMessage); int foodsEaten = food.getCount(); int milestone = 0; int[] milestoneArray = HandlerConfiguration.getMilestoneArray(); while (milestone < milestoneArray.length && foodsEaten + 1 > milestoneArray[milestone]) { milestone++; } if (milestone == milestoneArray.length) { milestoneMessage = new TextComponentTranslation("Your desire to seek unique foods is finally satisfied."); } else { milestoneMessage = new TextComponentTranslation("Sample another "+(milestoneArray[milestone]-milestoneArray[milestone-1])+ " varieties of food to gain another "+(hpm==1?"heart!":hpm + " hearts!")); } event.player.sendMessage(milestoneMessage); } } } }
Formatted
src/main/java/com/cazsius/solcarrot/handler/HandlerFoodTracker.java
Formatted
<ide><path>rc/main/java/com/cazsius/solcarrot/handler/HandlerFoodTracker.java <ide> float storedHP = event.player.getMaxHealth(); <ide> <ide> MaxHealthHandler.updateFoodHPModifier(event.player); <del> if (storedHP < event.player.getMaxHealth()) <del> { <add> if (storedHP < event.player.getMaxHealth()) { <ide> event.player.playSound(SoundEvents.ENTITY_PLAYER_LEVELUP, 1.0F, 1.0F); <ide> event.player.world.spawnParticle(EnumParticleTypes.VILLAGER_HAPPY, event.player.posX, event.player.posY + 2, <ide> event.player.posZ, event.player.motionX, event.player.motionY, event.player.motionZ, new int[0]); <del> if(event.player.world.isRemote) <del> { <add> if (event.player.world.isRemote) { <ide> int hpm = HandlerConfiguration.getHeartsPerMilestone(); <del> TextComponentTranslation milestoneMessage = new TextComponentTranslation("What a unique flavor! You've gained " +(hpm==1?"a heart!":hpm + " hearts!")); <add> TextComponentTranslation milestoneMessage = new TextComponentTranslation( <add> "What a unique flavor! You've gained " + (hpm == 1 ? "a heart!" : hpm + " hearts!")); <ide> event.player.sendMessage(milestoneMessage); <del> <add> <ide> int foodsEaten = food.getCount(); <ide> int milestone = 0; <ide> int[] milestoneArray = HandlerConfiguration.getMilestoneArray(); <del> while (milestone < milestoneArray.length && foodsEaten + 1 > milestoneArray[milestone]) <del> { <add> while (milestone < milestoneArray.length && foodsEaten + 1 > milestoneArray[milestone]) { <ide> milestone++; <ide> } <del> <del> if (milestone == milestoneArray.length) <del> { <del> milestoneMessage = new TextComponentTranslation("Your desire to seek unique foods is finally satisfied."); <del> } <del> else <del> { <del> milestoneMessage = new TextComponentTranslation("Sample another "+(milestoneArray[milestone]-milestoneArray[milestone-1])+ <del> " varieties of food to gain another "+(hpm==1?"heart!":hpm + " hearts!")); <add> <add> if (milestone == milestoneArray.length) { <add> milestoneMessage = new TextComponentTranslation( <add> "Your desire to seek unique foods is finally satisfied."); <add> } else { <add> milestoneMessage = new TextComponentTranslation( <add> "Sample another " + (milestoneArray[milestone] - milestoneArray[milestone - 1]) <add> + " varieties of food to gain another " + (hpm == 1 ? "heart!" : hpm + " hearts!")); <ide> } <ide> event.player.sendMessage(milestoneMessage); <ide> }
Java
apache-2.0
efa60d58fbf973edff7abc9aa95b09834ed4bca7
0
lumifyio/securegraph,lumifyio/securegraph,lumifyio/securegraph
package org.securegraph.test; import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.securegraph.*; import org.securegraph.mutation.ElementMutation; import org.securegraph.property.PropertyValue; import org.securegraph.property.StreamingPropertyValue; import org.securegraph.query.*; import org.securegraph.test.util.LargeStringInputStream; import org.securegraph.type.GeoCircle; import org.securegraph.type.GeoPoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.IOException; import java.math.BigDecimal; import java.util.*; import static org.junit.Assert.*; import static org.securegraph.test.util.IterableUtils.assertContains; import static org.securegraph.util.IterableUtils.count; import static org.securegraph.util.IterableUtils.toList; @RunWith(JUnit4.class) public abstract class GraphTestBase { private static final Logger LOGGER = LoggerFactory.getLogger(GraphTestBase.class); public static final Visibility VISIBILITY_A = new Visibility("a"); public static final Visibility VISIBILITY_B = new Visibility("b"); public static final Visibility VISIBILITY_EMPTY = new Visibility(""); public final Authorizations AUTHORIZATIONS_A; public final Authorizations AUTHORIZATIONS_B; public final Authorizations AUTHORIZATIONS_C; public final Authorizations AUTHORIZATIONS_A_AND_B; public final Authorizations AUTHORIZATIONS_EMPTY; public static final int LARGE_PROPERTY_VALUE_SIZE = 1024 + 1; protected Graph graph; protected abstract Graph createGraph() throws Exception; public Graph getGraph() { return graph; } public GraphTestBase() { AUTHORIZATIONS_A = createAuthorizations("a"); AUTHORIZATIONS_B = createAuthorizations("b"); AUTHORIZATIONS_C = createAuthorizations("c"); AUTHORIZATIONS_A_AND_B = createAuthorizations("a", "b"); AUTHORIZATIONS_EMPTY = createAuthorizations(); } protected abstract Authorizations createAuthorizations(String... auths); @Before public void before() throws Exception { graph = createGraph(); } @After public void after() throws Exception { graph.shutdown(); graph = null; } @Test public void testAddVertexWithId() { Vertex v = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); assertNotNull(v); assertEquals("v1", v.getId()); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertNotNull(v); assertEquals("v1", v.getId()); assertEquals(VISIBILITY_A, v.getVisibility()); v = graph.getVertex("", AUTHORIZATIONS_A); assertNull(v); v = graph.getVertex(null, AUTHORIZATIONS_A); assertNull(v); } @Test public void testAddVertexWithoutId() { Vertex v = graph.addVertex(VISIBILITY_A, AUTHORIZATIONS_A); assertNotNull(v); Object vertexId = v.getId(); assertNotNull(vertexId); v = graph.getVertex(vertexId, AUTHORIZATIONS_A); assertNotNull(v); assertNotNull(vertexId); } @Test public void testAddStreamingPropertyValue() throws IOException, InterruptedException { String expectedLargeValue = IOUtils.toString(new LargeStringInputStream(LARGE_PROPERTY_VALUE_SIZE)); PropertyValue propSmall = new StreamingPropertyValue(new ByteArrayInputStream("value1".getBytes()), String.class); PropertyValue propLarge = new StreamingPropertyValue(new ByteArrayInputStream(expectedLargeValue.getBytes()), String.class); String largePropertyName = "propLarge/\\*!@#$%^&*()[]{}|"; Vertex v1 = graph.prepareVertex("v1", VISIBILITY_A) .setProperty("propSmall", propSmall, VISIBILITY_A) .setProperty(largePropertyName, propLarge, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Iterable<Object> propSmallValues = v1.getPropertyValues("propSmall"); assertEquals(1, count(propSmallValues)); Object propSmallValue = propSmallValues.iterator().next(); assertTrue("propSmallValue was " + propSmallValue.getClass().getName(), propSmallValue instanceof StreamingPropertyValue); StreamingPropertyValue value = (StreamingPropertyValue) propSmallValue; assertEquals(String.class, value.getValueType()); assertEquals("value1".getBytes().length, value.getLength()); assertEquals("value1", IOUtils.toString(value.getInputStream())); assertEquals("value1", IOUtils.toString(value.getInputStream())); Iterable<Object> propLargeValues = v1.getPropertyValues(largePropertyName); assertEquals(1, count(propLargeValues)); Object propLargeValue = propLargeValues.iterator().next(); assertTrue(largePropertyName + " was " + propLargeValue.getClass().getName(), propLargeValue instanceof StreamingPropertyValue); value = (StreamingPropertyValue) propLargeValue; assertEquals(String.class, value.getValueType()); assertEquals(expectedLargeValue.getBytes().length, value.getLength()); assertEquals(expectedLargeValue, IOUtils.toString(value.getInputStream())); assertEquals(expectedLargeValue, IOUtils.toString(value.getInputStream())); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); propSmallValues = v1.getPropertyValues("propSmall"); assertEquals(1, count(propSmallValues)); propSmallValue = propSmallValues.iterator().next(); assertTrue("propSmallValue was " + propSmallValue.getClass().getName(), propSmallValue instanceof StreamingPropertyValue); value = (StreamingPropertyValue) propSmallValue; assertEquals(String.class, value.getValueType()); assertEquals("value1".getBytes().length, value.getLength()); assertEquals("value1", IOUtils.toString(value.getInputStream())); assertEquals("value1", IOUtils.toString(value.getInputStream())); propLargeValues = v1.getPropertyValues(largePropertyName); assertEquals(1, count(propLargeValues)); propLargeValue = propLargeValues.iterator().next(); assertTrue(largePropertyName + " was " + propLargeValue.getClass().getName(), propLargeValue instanceof StreamingPropertyValue); value = (StreamingPropertyValue) propLargeValue; assertEquals(String.class, value.getValueType()); assertEquals(expectedLargeValue.getBytes().length, value.getLength()); assertEquals(expectedLargeValue, IOUtils.toString(value.getInputStream())); assertEquals(expectedLargeValue, IOUtils.toString(value.getInputStream())); } @Test public void testAddVertexPropertyWithMetadata() { Map<String, Object> prop1Metadata = new HashMap<String, Object>(); prop1Metadata.put("metadata1", "metadata1Value"); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", prop1Metadata, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties("prop1"))); Property prop1 = v.getProperties("prop1").iterator().next(); prop1Metadata = prop1.getMetadata(); assertNotNull(prop1Metadata); assertEquals(1, prop1Metadata.keySet().size()); assertEquals("metadata1Value", prop1Metadata.get("metadata1")); prop1Metadata.put("metadata2", "metadata2Value"); v.prepareMutation() .setProperty("prop1", "value1", prop1Metadata, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties("prop1"))); prop1 = v.getProperties("prop1").iterator().next(); prop1Metadata = prop1.getMetadata(); assertEquals(2, prop1Metadata.keySet().size()); assertEquals("metadata1Value", prop1Metadata.get("metadata1")); assertEquals("metadata2Value", prop1Metadata.get("metadata2")); // make sure we clear out old values prop1Metadata = new HashMap<String, Object>(); v.setProperty("prop1", "value1", prop1Metadata, VISIBILITY_A, AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties("prop1"))); prop1 = v.getProperties("prop1").iterator().next(); prop1Metadata = prop1.getMetadata(); assertEquals(0, prop1Metadata.keySet().size()); } @Test public void testAddVertexWithProperties() { Vertex v = graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", VISIBILITY_A) .setProperty("prop2", "value2", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(1, count(v.getProperties("prop1"))); assertEquals("value1", v.getPropertyValues("prop1").iterator().next()); assertEquals(1, count(v.getProperties("prop2"))); assertEquals("value2", v.getPropertyValues("prop2").iterator().next()); v = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertEquals(1, count(v.getProperties("prop1"))); assertEquals("value1", v.getPropertyValues("prop1").iterator().next()); assertEquals(1, count(v.getProperties("prop2"))); assertEquals("value2", v.getPropertyValues("prop2").iterator().next()); } @Test public void testAddVertexWithPropertiesWithTwoDifferentVisibilities() { Vertex v = graph.prepareVertex("v1", VISIBILITY_EMPTY) .setProperty("prop1", "value1a", VISIBILITY_A) .setProperty("prop1", "value1b", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(2, count(v.getProperties("prop1"))); v = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertEquals(2, count(v.getProperties("prop1"))); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties("prop1"))); assertEquals("value1a", v.getPropertyValue("prop1")); v = graph.getVertex("v1", AUTHORIZATIONS_B); assertEquals(1, count(v.getProperties("prop1"))); assertEquals("value1b", v.getPropertyValue("prop1")); } @Test public void testMultivaluedProperties() { Vertex v = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v.prepareMutation() .addPropertyValue("propid1a", "prop1", "value1a", VISIBILITY_A) .addPropertyValue("propid2a", "prop2", "value2a", VISIBILITY_A) .addPropertyValue("propid3a", "prop3", "value3a", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("value1a", v.getPropertyValues("prop1").iterator().next()); assertEquals("value2a", v.getPropertyValues("prop2").iterator().next()); assertEquals("value3a", v.getPropertyValues("prop3").iterator().next()); assertEquals(3, count(v.getProperties())); v.prepareMutation() .addPropertyValue("propid1a", "prop1", "value1b", VISIBILITY_A) .addPropertyValue("propid2a", "prop2", "value2b", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getPropertyValues("prop1"))); assertEquals("value1b", v.getPropertyValues("prop1").iterator().next()); assertEquals(1, count(v.getPropertyValues("prop2"))); assertEquals("value2b", v.getPropertyValues("prop2").iterator().next()); assertEquals(1, count(v.getPropertyValues("prop3"))); assertEquals("value3a", v.getPropertyValues("prop3").iterator().next()); assertEquals(3, count(v.getProperties())); v.addPropertyValue("propid1b", "prop1", "value1a-new", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertContains("value1b", v.getPropertyValues("prop1")); assertContains("value1a-new", v.getPropertyValues("prop1")); assertEquals(4, count(v.getProperties())); } @Test public void testMultivaluedPropertyOrder() { graph.prepareVertex("v1", VISIBILITY_A) .addPropertyValue("a", "prop", "a", VISIBILITY_A) .addPropertyValue("aa", "prop", "aa", VISIBILITY_A) .addPropertyValue("b", "prop", "b", VISIBILITY_A) .addPropertyValue("0", "prop", "0", VISIBILITY_A) .addPropertyValue("A", "prop", "A", VISIBILITY_A) .addPropertyValue("Z", "prop", "Z", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("0", v1.getPropertyValue("prop", 0)); assertEquals("A", v1.getPropertyValue("prop", 1)); assertEquals("Z", v1.getPropertyValue("prop", 2)); assertEquals("a", v1.getPropertyValue("prop", 3)); assertEquals("aa", v1.getPropertyValue("prop", 4)); assertEquals("b", v1.getPropertyValue("prop", 5)); } @Test public void testRemoveProperty() { Vertex v = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v.prepareMutation() .addPropertyValue("propid1a", "prop1", "value1a", VISIBILITY_A) .addPropertyValue("propid1b", "prop1", "value1b", VISIBILITY_A) .addPropertyValue("propid2a", "prop2", "value2a", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); v.removeProperty("prop1", AUTHORIZATIONS_A_AND_B); assertEquals(1, count(v.getProperties())); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties())); v.removeProperty("propid2a", "prop2", AUTHORIZATIONS_A_AND_B); assertEquals(0, count(v.getProperties())); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v.getProperties())); } @Test public void testAddVertexWithVisibility() { graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); graph.addVertex("v2", VISIBILITY_B, AUTHORIZATIONS_A); Iterable<Vertex> cVertices = graph.getVertices(AUTHORIZATIONS_C); assertEquals(0, count(cVertices)); Iterable<Vertex> aVertices = graph.getVertices(AUTHORIZATIONS_A); assertEquals(1, count(aVertices)); assertEquals("v1", aVertices.iterator().next().getId()); Iterable<Vertex> bVertices = graph.getVertices(AUTHORIZATIONS_B); assertEquals(1, count(bVertices)); assertEquals("v2", bVertices.iterator().next().getId()); Iterable<Vertex> allVertices = graph.getVertices(AUTHORIZATIONS_A_AND_B); assertEquals(2, count(allVertices)); } @Test public void testGetVerticesWithIds() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "v1", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v1b", VISIBILITY_A) .setProperty("prop1", "v1b", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("prop1", "v2", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v3", VISIBILITY_A) .setProperty("prop1", "v3", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); List<Object> ids = new ArrayList<Object>(); ids.add("v2"); ids.add("v1"); Iterable<Vertex> vertices = graph.getVertices(ids, AUTHORIZATIONS_A); boolean foundV1 = false, foundV2 = false; for (Vertex v : vertices) { if (v.getId().equals("v1")) { assertEquals("v1", v.getPropertyValue("prop1")); foundV1 = true; } else if (v.getId().equals("v2")) { assertEquals("v2", v.getPropertyValue("prop1")); foundV2 = true; } else { assertTrue("Unexpected vertex id: " + v.getId(), false); } } assertTrue("v1 not found", foundV1); assertTrue("v2 not found", foundV2); List<Vertex> verticesInOrder = graph.getVerticesInOrder(ids, AUTHORIZATIONS_A); assertEquals(2, verticesInOrder.size()); assertEquals("v2", verticesInOrder.get(0).getId()); assertEquals("v1", verticesInOrder.get(1).getId()); } @Test public void testGetEdgesWithIds() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); graph.prepareEdge("e1", v1, v2, "", VISIBILITY_A) .setProperty("prop1", "e1", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e1a", v1, v2, "", VISIBILITY_A) .setProperty("prop1", "e1a", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e2", v1, v3, "", VISIBILITY_A) .setProperty("prop1", "e2", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e3", v2, v3, "", VISIBILITY_A) .setProperty("prop1", "e3", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); List<Object> ids = new ArrayList<Object>(); ids.add("e1"); ids.add("e2"); Iterable<Edge> edges = graph.getEdges(ids, AUTHORIZATIONS_A); boolean foundE1 = false, foundE2 = false; for (Edge e : edges) { if (e.getId().equals("e1")) { assertEquals("e1", e.getPropertyValue("prop1")); foundE1 = true; } else if (e.getId().equals("e2")) { assertEquals("e2", e.getPropertyValue("prop1")); foundE2 = true; } else { assertTrue("Unexpected vertex id: " + e.getId(), false); } } assertTrue("e1 not found", foundE1); assertTrue("e2 not found", foundE2); } @Test public void testRemoveVertex() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); assertEquals(1, count(graph.getVertices(AUTHORIZATIONS_A))); graph.removeVertex(v1, AUTHORIZATIONS_A); assertEquals(0, count(graph.getVertices(AUTHORIZATIONS_A))); } @Test public void testRemoveVertexWithProperties() { Vertex v1 = graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(1, count(graph.getVertices(AUTHORIZATIONS_A))); graph.removeVertex(v1, AUTHORIZATIONS_A); assertEquals(0, count(graph.getVertices(AUTHORIZATIONS_A_AND_B))); } @Test public void testAddEdge() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Edge e = graph.addEdge("e1", v1, v2, "label1", VISIBILITY_A, AUTHORIZATIONS_A); assertNotNull(e); assertEquals("e1", e.getId()); assertEquals("label1", e.getLabel()); assertEquals("v1", e.getVertexId(Direction.OUT)); assertEquals(v1, e.getVertex(Direction.OUT, AUTHORIZATIONS_A)); assertEquals("v2", e.getVertexId(Direction.IN)); assertEquals(v2, e.getVertex(Direction.IN, AUTHORIZATIONS_A)); assertEquals(VISIBILITY_A, e.getVisibility()); e = graph.getEdge("e1", AUTHORIZATIONS_B); assertNull(e); e = graph.getEdge("e1", AUTHORIZATIONS_A); assertNotNull(e); assertEquals("e1", e.getId()); assertEquals("label1", e.getLabel()); assertEquals("v1", e.getVertexId(Direction.OUT)); assertEquals(v1, e.getVertex(Direction.OUT, AUTHORIZATIONS_A)); assertEquals("v2", e.getVertexId(Direction.IN)); assertEquals(v2, e.getVertex(Direction.IN, AUTHORIZATIONS_A)); assertEquals(VISIBILITY_A, e.getVisibility()); } @Test public void testGetEdge() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1to2label1", v1, v2, "label1", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1to2label2", v1, v2, "label2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e2to1", v2, v1, "label1", VISIBILITY_A, AUTHORIZATIONS_A); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(3, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(2, count(v1.getEdges(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(Direction.IN, AUTHORIZATIONS_A))); assertEquals(3, count(v1.getEdges(v2, Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(2, count(v1.getEdges(v2, Direction.OUT, AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(v2, Direction.IN, AUTHORIZATIONS_A))); assertEquals(2, count(v1.getEdges(v2, Direction.BOTH, "label1", AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(v2, Direction.OUT, "label1", AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(v2, Direction.IN, "label1", AUTHORIZATIONS_A))); assertEquals(3, count(v1.getEdges(v2, Direction.BOTH, new String[]{"label1", "label2"}, AUTHORIZATIONS_A))); assertEquals(2, count(v1.getEdges(v2, Direction.OUT, new String[]{"label1", "label2"}, AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(v2, Direction.IN, new String[]{"label1", "label2"}, AUTHORIZATIONS_A))); } @Test public void testAddEdgeWithProperties() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.prepareEdge("e1", v1, v2, "label1", VISIBILITY_A) .setProperty("propA", "valueA", VISIBILITY_A) .setProperty("propB", "valueB", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); Edge e = graph.getEdge("e1", AUTHORIZATIONS_A); assertEquals(1, count(e.getProperties())); assertEquals("valueA", e.getPropertyValues("propA").iterator().next()); assertEquals(0, count(e.getPropertyValues("propB"))); e = graph.getEdge("e1", AUTHORIZATIONS_A_AND_B); assertEquals(2, count(e.getProperties())); assertEquals("valueA", e.getPropertyValues("propA").iterator().next()); assertEquals("valueB", e.getPropertyValues("propB").iterator().next()); assertEquals("valueA", e.getPropertyValue("propA")); assertEquals("valueB", e.getPropertyValue("propB")); } @Test public void testRemoveEdge() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1", v1, v2, "label1", VISIBILITY_A, AUTHORIZATIONS_A); assertEquals(1, count(graph.getEdges(AUTHORIZATIONS_A))); try { graph.removeEdge("e1", AUTHORIZATIONS_B); } catch (IllegalArgumentException e) { // expected } assertEquals(1, count(graph.getEdges(AUTHORIZATIONS_A))); graph.removeEdge("e1", AUTHORIZATIONS_A); assertEquals(0, count(graph.getEdges(AUTHORIZATIONS_A))); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v1.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); v2 = graph.getVertex("v2", AUTHORIZATIONS_A); assertEquals(0, count(v2.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); } @Test public void testAddEdgeWithVisibility() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1", v1, v2, "edgeA", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e2", v1, v2, "edgeB", VISIBILITY_B, AUTHORIZATIONS_B); Iterable<Edge> aEdges = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B).getEdges(Direction.BOTH, AUTHORIZATIONS_A); assertEquals(1, count(aEdges)); Edge e1 = aEdges.iterator().next(); assertNotNull(e1); assertEquals("edgeA", e1.getLabel()); Iterable<Edge> bEdges = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B).getEdges(Direction.BOTH, AUTHORIZATIONS_B); assertEquals(1, count(bEdges)); Edge e2 = bEdges.iterator().next(); assertNotNull(e2); assertEquals("edgeB", e2.getLabel()); Iterable<Edge> allEdges = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B).getEdges(Direction.BOTH, AUTHORIZATIONS_A_AND_B); assertEquals(2, count(allEdges)); } @Test public void testGraphQuery() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1", v1, v2, "edgeA", VISIBILITY_A, AUTHORIZATIONS_A); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A).vertices(); assertEquals(2, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).skip(1).vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).limit(1).vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).skip(1).limit(1).vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).skip(2).vertices(); assertEquals(0, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).skip(1).limit(2).vertices(); assertEquals(1, count(vertices)); Iterable<Edge> edges = graph.query(AUTHORIZATIONS_A).edges(); assertEquals(1, count(edges)); } @Test public void testGraphQueryWithQueryString() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v1.setProperty("description", "This is vertex 1 - dog.", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); v2.setProperty("description", "This is vertex 2 - cat.", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query("vertex", AUTHORIZATIONS_A).vertices(); assertEquals(2, count(vertices)); vertices = graph.query("dog", AUTHORIZATIONS_A).vertices(); assertEquals(1, count(vertices)); vertices = graph.query("dog", AUTHORIZATIONS_B).vertices(); assertEquals(0, count(vertices)); } @Test public void testFacetedResults() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("gender", "male", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("gender", "male", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v3", VISIBILITY_A) .setProperty("gender", "female", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v4", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.flush(); Query q = graph.query(AUTHORIZATIONS_A); if (q instanceof QuerySupportingFacetedResults) { ((QuerySupportingFacetedResults) q).addFacet(new TermFacet("f1", "gender")); Iterable<Vertex> results = q.vertices(); assertEquals(4, count(results)); assertTrue("results was not of type IterableWithFacetedResults: " + results.getClass().getName(), results instanceof IterableWithFacetedResults); FacetedResult facetedResult = ((IterableWithFacetedResults) results).getFacetedResult("f1"); assertNotNull("facetedResults was null for name 'f1'", facetedResult); assertEquals(2, count(facetedResult.getTerms())); for (FacetedTerm facetedTerm : facetedResult.getTerms()) { if (facetedTerm.getTerm().equals("male")) { assertEquals(2, facetedTerm.getCount()); } else if (facetedTerm.getTerm().equals("female")) { assertEquals(1, facetedTerm.getCount()); } else { throw new RuntimeException("Unexpected FacetedTerm: " + facetedTerm.getTerm()); } } } else { LOGGER.warn("query does not support faceted results: " + q.getClass().getName()); } } @Test public void testGraphQueryHas() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_A) .setProperty("birthDate", new DateOnly(1989, 1, 5), VISIBILITY_A) .setProperty("lastAccessed", createDate(2014, 2, 24, 13, 0, 5), VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("age", 30, VISIBILITY_A) .setProperty("birthDate", new DateOnly(1984, 1, 5), VISIBILITY_A) .setProperty("lastAccessed", createDate(2014, 2, 25, 13, 0, 5), VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.EQUAL, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("birthDate", Compare.EQUAL, createDate(1989, 1, 5)) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("lastAccessed", Compare.EQUAL, createDate(2014, 2, 24, 13, 0, 5)) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.GREATER_THAN_EQUAL, 25) .vertices(); assertEquals(2, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.IN, new Integer[]{25}) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.IN, new Integer[]{25, 30}) .vertices(); assertEquals(2, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.GREATER_THAN, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.LESS_THAN, 26) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.LESS_THAN_EQUAL, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.NOT_EQUAL, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("lastAccessed", Compare.EQUAL, new DateOnly(2014, 2, 24)) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query("*", AUTHORIZATIONS_A) .has("age", Compare.IN, new Integer[]{25, 30}) .vertices(); assertEquals(2, count(vertices)); } @Test public void testGraphQueryVertexHasWithSecurity() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.EQUAL, 25) .vertices(); assertEquals(1, count(vertices)); } @Test public void testGraphQueryEdgeHasWithSecurity() { Vertex v1 = graph.prepareVertex("v1", VISIBILITY_A).save(AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.prepareVertex("v2", VISIBILITY_A).save(AUTHORIZATIONS_A_AND_B); Vertex v3 = graph.prepareVertex("v3", VISIBILITY_A).save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e1", v1, v2, "edge", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e2", v1, v3, "edge", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); Iterable<Edge> edges = graph.query(AUTHORIZATIONS_A) .has("age", Compare.EQUAL, 25) .edges(); assertEquals(1, count(edges)); } @Test public void testGraphQueryHasWithSpaces() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("name", "Joe Ferner", VISIBILITY_A) .setProperty("propWithHyphen", "hyphen-word", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("name", "Joe Smith", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query("Ferner", AUTHORIZATIONS_A) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query("joe", AUTHORIZATIONS_A) .vertices(); assertEquals(2, count(vertices)); if (!isUsingDefaultQuery(graph)) { vertices = graph.query("joe AND ferner", AUTHORIZATIONS_A) .vertices(); assertEquals(1, count(vertices)); } if (!isUsingDefaultQuery(graph)) { vertices = graph.query("joe smith", AUTHORIZATIONS_A) .vertices(); List<Vertex> verticesList = toList(vertices); assertEquals(2, verticesList.size()); assertEquals("v2", verticesList.get(0).getId()); assertEquals("v1", verticesList.get(1).getId()); } vertices = graph.query(AUTHORIZATIONS_A) .has("name", TextPredicate.CONTAINS, "Ferner") .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("name", TextPredicate.CONTAINS, "Joe") .has("name", TextPredicate.CONTAINS, "Ferner") .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("name", TextPredicate.CONTAINS, "Joe Ferner") .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("propWithHyphen", TextPredicate.CONTAINS, "hyphen-word") .vertices(); assertEquals(1, count(vertices)); } @Test public void testGraphQueryHasWithSpacesAndFieldedQueryString() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("name", "Joe Ferner", VISIBILITY_A) .setProperty("propWithHyphen", "hyphen-word", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("name", "Joe Smith", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); if (!isUsingDefaultQuery(graph)) { Iterable<Vertex> vertices = graph.query("name:\"joe ferner\"", AUTHORIZATIONS_A) .vertices(); assertEquals(1, count(vertices)); } } protected boolean isUsingDefaultQuery(Graph graph) { return graph.query(AUTHORIZATIONS_A) instanceof DefaultGraphQuery; } @Test public void testGraphQueryGeoPoint() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("location", new GeoPoint(38.9186, -77.2297, "Reston, VA"), VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("location", new GeoPoint(38.9544, -77.3464, "Reston, VA"), VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); List<Vertex> vertices = toList(graph.query(AUTHORIZATIONS_A) .has("location", GeoCompare.WITHIN, new GeoCircle(38.9186, -77.2297, 1)) .vertices()); assertEquals(1, count(vertices)); GeoPoint geoPoint = (GeoPoint) vertices.get(0).getPropertyValue("location"); assertEquals(38.9186, geoPoint.getLatitude(), 0.001); assertEquals(-77.2297, geoPoint.getLongitude(), 0.001); assertEquals("Reston, VA", geoPoint.getDescription()); vertices = toList(graph.query(AUTHORIZATIONS_A) .has("location", GeoCompare.WITHIN, new GeoCircle(38.9186, -77.2297, 25)) .vertices()); assertEquals(2, count(vertices)); } private Date createDate(int year, int month, int day) { return new GregorianCalendar(year, month, day).getTime(); } private Date createDate(int year, int month, int day, int hour, int min, int sec) { return new GregorianCalendar(year, month, day, hour, min, sec).getTime(); } @Test public void testGraphQueryRange() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("age", 30, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A) .range("age", 25, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .range("age", 20, 29) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .range("age", 25, 30) .vertices(); assertEquals(2, count(vertices)); } @Test public void testVertexQuery() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v1.setProperty("prop1", "value1", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); v2.setProperty("prop1", "value2", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); v3.setProperty("prop1", "value3", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Edge ev1v2 = graph.addEdge("e v1->v2", v1, v2, "edgeA", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev1v3 = graph.addEdge("e v1->v3", v1, v3, "edgeA", VISIBILITY_A, AUTHORIZATIONS_A); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); Iterable<Vertex> vertices = v1.query(AUTHORIZATIONS_A).vertices(); assertEquals(2, count(vertices)); assertContains(v2, vertices); assertContains(v3, vertices); vertices = v1.query(AUTHORIZATIONS_A) .has("prop1", "value2") .vertices(); assertEquals(1, count(vertices)); assertContains(v2, vertices); Iterable<Edge> edges = v1.query(AUTHORIZATIONS_A).edges(); assertEquals(2, count(edges)); assertContains(ev1v2, edges); assertContains(ev1v3, edges); edges = v1.query(AUTHORIZATIONS_A).edges(Direction.OUT); assertEquals(2, count(edges)); assertContains(ev1v2, edges); assertContains(ev1v3, edges); } @Test public void testFindPaths() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v4 = graph.addVertex("v4", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v2, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v1 -> v2 graph.addEdge(v2, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v2 -> v4 graph.addEdge(v1, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v1 -> v3 graph.addEdge(v3, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v3 -> v4 v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v4 = graph.getVertex("v4", AUTHORIZATIONS_A); List<Path> paths = toList(graph.findPaths(v1, v4, 2, AUTHORIZATIONS_A)); // v1 -> v2 -> v4 // v1 -> v3 -> v4 assertEquals(2, paths.size()); boolean found2 = false; boolean found3 = false; for (Path path : paths) { assertEquals(3, path.length()); int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v1.getId()); } else if (i == 1) { if (v2.getId().equals(id)) { found2 = true; } else if (v3.getId().equals(id)) { found3 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v4.getId()); } i++; } } assertTrue("v2 not found in path", found2); assertTrue("v3 not found in path", found3); v4 = graph.getVertex("v4", AUTHORIZATIONS_A); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); paths = toList(graph.findPaths(v4, v1, 2, AUTHORIZATIONS_A)); // v4 -> v2 -> v1 // v4 -> v3 -> v1 assertEquals(2, paths.size()); found2 = false; found3 = false; for (Path path : paths) { assertEquals(3, path.length()); int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v4.getId()); } else if (i == 1) { if (v2.getId().equals(id)) { found2 = true; } else if (v3.getId().equals(id)) { found3 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v1.getId()); } i++; } } assertTrue("v2 not found in path", found2); assertTrue("v3 not found in path", found3); } @Test public void testFindPathsMultiplePaths() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v4 = graph.addVertex("v4", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v5 = graph.addVertex("v5", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v1 -> v4 graph.addEdge(v1, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v1 -> v3 graph.addEdge(v3, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v3 -> v4 graph.addEdge(v2, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v2 -> v3 graph.addEdge(v4, v2, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v4 -> v2 graph.addEdge(v2, v5, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v2 -> v5 v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v2 = graph.getVertex("v2", AUTHORIZATIONS_A); v5 = graph.getVertex("v5", AUTHORIZATIONS_A); List<Path> paths = toList(graph.findPaths(v1, v2, 2, AUTHORIZATIONS_A)); // v1 -> v4 -> v2 // v1 -> v3 -> v2 assertEquals(2, paths.size()); boolean found3 = false; boolean found4 = false; for (Path path : paths) { assertEquals(3, path.length()); int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v1.getId()); } else if (i == 1) { if (v3.getId().equals(id)) { found3 = true; } else if (v4.getId().equals(id)) { found4 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v2.getId()); } i++; } } assertTrue("v3 not found in path", found3); assertTrue("v4 not found in path", found4); paths = toList(graph.findPaths(v1, v2, 3, AUTHORIZATIONS_A)); // v1 -> v4 -> v2 // v1 -> v3 -> v2 // v1 -> v3 -> v4 -> v2 // v1 -> v4 -> v3 -> v2 assertEquals(4, paths.size()); found3 = false; found4 = false; for (Path path : paths) { if (path.length() == 3) { int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v1.getId()); } else if (i == 1) { if (v3.getId().equals(id)) { found3 = true; } else if (v4.getId().equals(id)) { found4 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v2.getId()); } i++; } } else if (path.length() == 4) { } else { fail("Invalid path length " + path.length()); } } assertTrue("v3 not found in path", found3); assertTrue("v4 not found in path", found4); paths = toList(graph.findPaths(v1, v5, 2, AUTHORIZATIONS_A)); assertEquals(0, paths.size()); paths = toList(graph.findPaths(v1, v5, 3, AUTHORIZATIONS_A)); // v1 -> v4 -> v2 -> v5 // v1 -> v3 -> v2 -> v5 assertEquals(2, paths.size()); found3 = false; found4 = false; for (Path path : paths) { assertEquals(4, path.length()); int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v1.getId()); } else if (i == 1) { if (v3.getId().equals(id)) { found3 = true; } else if (v4.getId().equals(id)) { found4 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v2.getId()); } else if (i == 3) { assertEquals(id, v5.getId()); } i++; } } assertTrue("v3 not found in path", found3); assertTrue("v4 not found in path", found4); } @Test public void testGetVerticesFromVertex() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v4 = graph.addVertex("v4", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v2, "knows", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v2, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(3, count(v1.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(3, count(v1.getVertices(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(0, count(v1.getVertices(Direction.IN, AUTHORIZATIONS_A))); v2 = graph.getVertex("v2", AUTHORIZATIONS_A); assertEquals(2, count(v2.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(1, count(v2.getVertices(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(1, count(v2.getVertices(Direction.IN, AUTHORIZATIONS_A))); v3 = graph.getVertex("v3", AUTHORIZATIONS_A); assertEquals(2, count(v3.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(0, count(v3.getVertices(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(2, count(v3.getVertices(Direction.IN, AUTHORIZATIONS_A))); v4 = graph.getVertex("v4", AUTHORIZATIONS_A); assertEquals(1, count(v4.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(0, count(v4.getVertices(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(1, count(v4.getVertices(Direction.IN, AUTHORIZATIONS_A))); } @Test public void testBlankVisibilityString() { Vertex v = graph.addVertex("v1", VISIBILITY_EMPTY, AUTHORIZATIONS_EMPTY); assertNotNull(v); assertEquals("v1", v.getId()); v = graph.getVertex("v1", AUTHORIZATIONS_EMPTY); assertNotNull(v); assertEquals("v1", v.getId()); assertEquals(VISIBILITY_EMPTY, v.getVisibility()); } @Test public void testElementMutationDoesntChangeObjectUntilSave() { Vertex v = graph.addVertex("v1", VISIBILITY_EMPTY, AUTHORIZATIONS_EMPTY); v.setProperty("prop1", "value1", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); ElementMutation<Vertex> m = v.prepareMutation() .setProperty("prop1", "value2", VISIBILITY_A) .setProperty("prop2", "value2", VISIBILITY_A); assertEquals(1, count(v.getProperties())); assertEquals("value1", v.getPropertyValue("prop1")); m.save(AUTHORIZATIONS_A_AND_B); assertEquals(2, count(v.getProperties())); assertEquals("value2", v.getPropertyValue("prop1")); assertEquals("value2", v.getPropertyValue("prop2")); } @Test public void testFindRelatedEdges() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v4 = graph.addVertex("v4", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev1v2 = graph.addEdge("e v1->v2", v1, v2, "", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev1v3 = graph.addEdge("e v1->v3", v1, v3, "", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev2v3 = graph.addEdge("e v2->v3", v2, v3, "", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev3v1 = graph.addEdge("e v3->v1", v3, v1, "", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e v3->v4", v3, v4, "", VISIBILITY_A, AUTHORIZATIONS_A); List<Object> vertexIds = new ArrayList<Object>(); vertexIds.add("v1"); vertexIds.add("v2"); vertexIds.add("v3"); Iterable<Object> edges = toList(graph.findRelatedEdges(vertexIds, AUTHORIZATIONS_A)); assertEquals(4, count(edges)); assertContains(ev1v2.getId(), edges); assertContains(ev1v3.getId(), edges); assertContains(ev2v3.getId(), edges); assertContains(ev3v1.getId(), edges); } @Test public void testEmptyPropertyMutation() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v1.prepareMutation().save(AUTHORIZATIONS_A_AND_B); } @Test public void testTextIndex() throws Exception { graph.defineProperty("none").dataType(String.class).textIndexHint(TextIndexHint.NONE).define(); graph.defineProperty("none").dataType(String.class).textIndexHint(TextIndexHint.NONE).define(); // try calling define twice graph.defineProperty("both").dataType(String.class).textIndexHint(TextIndexHint.ALL).define(); graph.defineProperty("fullText").dataType(String.class).textIndexHint(TextIndexHint.FULL_TEXT).define(); graph.defineProperty("exactMatch").dataType(String.class).textIndexHint(TextIndexHint.EXACT_MATCH).define(); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("none", "Test Value", VISIBILITY_A) .setProperty("both", "Test Value", VISIBILITY_A) .setProperty("fullText", "Test Value", VISIBILITY_A) .setProperty("exactMatch", "Test Value", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("Test Value", v1.getPropertyValue("none")); assertEquals("Test Value", v1.getPropertyValue("both")); assertEquals("Test Value", v1.getPropertyValue("fullText")); assertEquals("Test Value", v1.getPropertyValue("exactMatch")); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("both", TextPredicate.CONTAINS, "Test").vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("fullText", TextPredicate.CONTAINS, "Test").vertices())); assertEquals("exact match shouldn't match partials", 0, count(graph.query(AUTHORIZATIONS_A).has("exactMatch", TextPredicate.CONTAINS, "Test").vertices())); assertEquals("unindexed property shouldn't match partials", 0, count(graph.query(AUTHORIZATIONS_A).has("none", TextPredicate.CONTAINS, "Test").vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("both", "Test Value").vertices())); assertEquals("default has predicate is equals which shouldn't work for full text", 0, count(graph.query(AUTHORIZATIONS_A).has("fullText", "Test Value").vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("exactMatch", "Test Value").vertices())); assertEquals("default has predicate is equals which shouldn't work for unindexed", 0, count(graph.query(AUTHORIZATIONS_A).has("none", "Test Value").vertices())); } @Test public void testFieldBoost() throws Exception { if (!graph.isFieldBoostSupported()) { LOGGER.warn("Boost not supported"); return; } graph.defineProperty("a") .dataType(String.class) .textIndexHint(TextIndexHint.ALL) .boost(1) .define(); graph.defineProperty("b") .dataType(String.class) .textIndexHint(TextIndexHint.ALL) .boost(2) .define(); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("a", "Test Value", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("b", "Test Value", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); assertVertexIds(graph.query("Test", AUTHORIZATIONS_A).vertices(), new String[]{"v2", "v1"}); } @Test public void testVertexBoost() throws Exception { Vertex v1 = graph.prepareVertex("v1", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.prepareVertex("v2", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v3 = graph.prepareVertex("v3", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.addEdge("e1", v3, v2, "link", VISIBILITY_A, AUTHORIZATIONS_A); assertVertexIds(graph.query(AUTHORIZATIONS_A).vertices(), new String[]{"v2", "v3", "v1"}); } @Test public void testValueTypes() throws Exception { Date date = createDate(2014, 2, 24, 13, 0, 5); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("int", 5, VISIBILITY_A) .setProperty("bigDecimal", new BigDecimal(10), VISIBILITY_A) .setProperty("double", 5.6, VISIBILITY_A) .setProperty("float", 6.4f, VISIBILITY_A) .setProperty("string", "test", VISIBILITY_A) .setProperty("byte", (byte) 5, VISIBILITY_A) .setProperty("long", (long) 5, VISIBILITY_A) .setProperty("boolean", true, VISIBILITY_A) .setProperty("geopoint", new GeoPoint(77, -33), VISIBILITY_A) .setProperty("short", (short) 5, VISIBILITY_A) .setProperty("date", date, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("int", 5).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("double", 5.6).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("float", 6.4f).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("string", "test").vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("byte", 5).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("long", 5).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("boolean", true).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("short", 5).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("date", date).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("bigDecimal", 10).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("geopoint", GeoCompare.WITHIN, new GeoCircle(77, -33, 1)).vertices())); } @Test public void testChangeVisibilityVertex() { graph.prepareVertex("v1", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.prepareMutation() .alterElementVisibility(VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertNull(v1); v1 = graph.getVertex("v1", AUTHORIZATIONS_B); assertNotNull(v1); // change to same visibility v1 = graph.getVertex("v1", AUTHORIZATIONS_B); v1.prepareMutation() .alterElementVisibility(VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertNull(v1); v1 = graph.getVertex("v1", AUTHORIZATIONS_B); assertNotNull(v1); } @Test public void testChangeVisibilityVertexProperties() { Map<String, Object> prop1Metadata = new HashMap<String, Object>(); prop1Metadata.put("prop1_key1", "value1"); Map<String, Object> prop2Metadata = new HashMap<String, Object>(); prop2Metadata.put("prop2_key1", "value1"); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", prop1Metadata, VISIBILITY_EMPTY) .setProperty("prop2", "value2", prop2Metadata, VISIBILITY_EMPTY) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.prepareMutation() .alterPropertyVisibility("prop1", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertNull(v1.getProperty("prop1")); assertNotNull(v1.getProperty("prop2")); v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertNotNull(v1.getProperty("prop1")); assertNotNull(v1.getProperty("prop2")); // alter and set property in one mutation v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); v1.prepareMutation() .alterPropertyVisibility("prop1", VISIBILITY_A) .setProperty("prop1", "value1New", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertNotNull(v1.getProperty("prop1")); assertEquals("value1New", v1.getPropertyValue("prop1")); // alter visibility to the same visibility v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); v1.prepareMutation() .alterPropertyVisibility("prop1", VISIBILITY_A) .setProperty("prop1", "value1New2", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertNotNull(v1.getProperty("prop1")); assertEquals("value1New2", v1.getPropertyValue("prop1")); } @Test public void testChangeVisibilityEdge() { Vertex v1 = graph.prepareVertex("v1", VISIBILITY_EMPTY) .save(AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.prepareVertex("v2", VISIBILITY_EMPTY) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e1", v1, v2, "", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); // test that we can see the edge with A and not B v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_B))); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_A))); // change the edge Edge e1 = graph.getEdge("e1", AUTHORIZATIONS_A); e1.prepareMutation() .alterElementVisibility(VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); // test that we can see the edge with B and not A v1 = graph.getVertex("v1", AUTHORIZATIONS_B); assertEquals(1, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_B))); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_A))); // change the edge visibility to same e1 = graph.getEdge("e1", AUTHORIZATIONS_B); e1.prepareMutation() .alterElementVisibility(VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); // test that we can see the edge with B and not A v1 = graph.getVertex("v1", AUTHORIZATIONS_B); assertEquals(1, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_B))); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_A))); } @Test public void testChangeVisibilityOnBadPropertyName() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", VISIBILITY_EMPTY) .setProperty("prop2", "value2", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); try { graph.getVertex("v1", AUTHORIZATIONS_A) .prepareMutation() .alterPropertyVisibility("propBad", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); fail("show throw"); } catch (SecureGraphException ex) { assertNotNull(ex); } } @Test public void testChangeVisibilityOnStreamingProperty() throws IOException { String expectedLargeValue = IOUtils.toString(new LargeStringInputStream(LARGE_PROPERTY_VALUE_SIZE)); PropertyValue propSmall = new StreamingPropertyValue(new ByteArrayInputStream("value1".getBytes()), String.class); PropertyValue propLarge = new StreamingPropertyValue(new ByteArrayInputStream(expectedLargeValue.getBytes()), String.class); String largePropertyName = "propLarge/\\*!@#$%^&*()[]{}|"; graph.prepareVertex("v1", VISIBILITY_A) .setProperty("propSmall", propSmall, VISIBILITY_A) .setProperty(largePropertyName, propLarge, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); assertEquals(2, count(graph.getVertex("v1", AUTHORIZATIONS_A).getProperties())); graph.getVertex("v1", AUTHORIZATIONS_A) .prepareMutation() .alterPropertyVisibility("propSmall", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(1, count(graph.getVertex("v1", AUTHORIZATIONS_A).getProperties())); graph.getVertex("v1", AUTHORIZATIONS_A) .prepareMutation() .alterPropertyVisibility(largePropertyName, VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(0, count(graph.getVertex("v1", AUTHORIZATIONS_A).getProperties())); assertEquals(2, count(graph.getVertex("v1", AUTHORIZATIONS_A_AND_B).getProperties())); } @Test public void testChangePropertyMetadata() { Map<String, Object> prop1Metadata = new HashMap<String, Object>(); prop1Metadata.put("prop1_key1", "valueOld"); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", prop1Metadata, VISIBILITY_EMPTY) .setProperty("prop2", "value2", null, VISIBILITY_EMPTY) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.prepareMutation() .alterPropertyMetadata("prop1", "prop1_key1", "valueNew") .save(AUTHORIZATIONS_A_AND_B); assertEquals("valueNew", v1.getProperty("prop1").getMetadata().get("prop1_key1")); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("valueNew", v1.getProperty("prop1").getMetadata().get("prop1_key1")); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.prepareMutation() .alterPropertyMetadata("prop2", "prop2_key1", "valueNew") .save(AUTHORIZATIONS_A_AND_B); assertEquals("valueNew", v1.getProperty("prop2").getMetadata().get("prop2_key1")); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("valueNew", v1.getProperty("prop2").getMetadata().get("prop2_key1")); } @Test public void testIsVisibilityValid() { assertFalse(graph.isVisibilityValid(VISIBILITY_A, AUTHORIZATIONS_C)); assertTrue(graph.isVisibilityValid(VISIBILITY_B, AUTHORIZATIONS_A_AND_B)); assertTrue(graph.isVisibilityValid(VISIBILITY_B, AUTHORIZATIONS_B)); assertTrue(graph.isVisibilityValid(VISIBILITY_EMPTY, AUTHORIZATIONS_A)); } @Test public void testModifyVertexWithLowerAuthorizationThenOtherProperties() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", VISIBILITY_A) .setProperty("prop2", "value2", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); graph.flush(); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.setProperty("prop1", "value1New", VISIBILITY_A, AUTHORIZATIONS_A); graph.flush(); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A_AND_B) .has("prop2", "value2") .vertices(); assertVertexIds(vertices, new String[]{"v1"}); } protected void assertVertexIds(Iterable<Vertex> vertices, String[] ids) { List<Vertex> verticesList = toList(vertices); assertEquals("ids length mismatch", ids.length, verticesList.size()); for (int i = 0; i < ids.length; i++) { assertEquals("at offset: " + i, ids[i], verticesList.get(i).getId()); } } }
securegraph-test/src/main/java/org/securegraph/test/GraphTestBase.java
package org.securegraph.test; import org.apache.commons.io.IOUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; import org.securegraph.*; import org.securegraph.mutation.ElementMutation; import org.securegraph.property.PropertyValue; import org.securegraph.property.StreamingPropertyValue; import org.securegraph.query.*; import org.securegraph.test.util.LargeStringInputStream; import org.securegraph.type.GeoCircle; import org.securegraph.type.GeoPoint; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.ByteArrayInputStream; import java.io.IOException; import java.math.BigDecimal; import java.util.*; import static org.junit.Assert.*; import static org.securegraph.test.util.IterableUtils.assertContains; import static org.securegraph.util.IterableUtils.count; import static org.securegraph.util.IterableUtils.toList; @RunWith(JUnit4.class) public abstract class GraphTestBase { private static final Logger LOGGER = LoggerFactory.getLogger(GraphTestBase.class); public static final Visibility VISIBILITY_A = new Visibility("a"); public static final Visibility VISIBILITY_B = new Visibility("b"); public static final Visibility VISIBILITY_EMPTY = new Visibility(""); public final Authorizations AUTHORIZATIONS_A; public final Authorizations AUTHORIZATIONS_B; public final Authorizations AUTHORIZATIONS_C; public final Authorizations AUTHORIZATIONS_A_AND_B; public final Authorizations AUTHORIZATIONS_EMPTY; public static final int LARGE_PROPERTY_VALUE_SIZE = 1024 + 1; protected Graph graph; protected abstract Graph createGraph() throws Exception; public Graph getGraph() { return graph; } public GraphTestBase() { AUTHORIZATIONS_A = createAuthorizations("a"); AUTHORIZATIONS_B = createAuthorizations("b"); AUTHORIZATIONS_C = createAuthorizations("c"); AUTHORIZATIONS_A_AND_B = createAuthorizations("a", "b"); AUTHORIZATIONS_EMPTY = createAuthorizations(); } protected abstract Authorizations createAuthorizations(String... auths); @Before public void before() throws Exception { graph = createGraph(); } @After public void after() throws Exception { graph.shutdown(); graph = null; } @Test public void testAddVertexWithId() { Vertex v = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); assertNotNull(v); assertEquals("v1", v.getId()); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertNotNull(v); assertEquals("v1", v.getId()); assertEquals(VISIBILITY_A, v.getVisibility()); v = graph.getVertex("", AUTHORIZATIONS_A); assertNull(v); v = graph.getVertex(null, AUTHORIZATIONS_A); assertNull(v); } @Test public void testAddVertexWithoutId() { Vertex v = graph.addVertex(VISIBILITY_A, AUTHORIZATIONS_A); assertNotNull(v); Object vertexId = v.getId(); assertNotNull(vertexId); v = graph.getVertex(vertexId, AUTHORIZATIONS_A); assertNotNull(v); assertNotNull(vertexId); } @Test public void testAddStreamingPropertyValue() throws IOException, InterruptedException { String expectedLargeValue = IOUtils.toString(new LargeStringInputStream(LARGE_PROPERTY_VALUE_SIZE)); PropertyValue propSmall = new StreamingPropertyValue(new ByteArrayInputStream("value1".getBytes()), String.class); PropertyValue propLarge = new StreamingPropertyValue(new ByteArrayInputStream(expectedLargeValue.getBytes()), String.class); String largePropertyName = "propLarge/\\*!@#$%^&*()[]{}|"; Vertex v1 = graph.prepareVertex("v1", VISIBILITY_A) .setProperty("propSmall", propSmall, VISIBILITY_A) .setProperty(largePropertyName, propLarge, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Iterable<Object> propSmallValues = v1.getPropertyValues("propSmall"); assertEquals(1, count(propSmallValues)); Object propSmallValue = propSmallValues.iterator().next(); assertTrue("propSmallValue was " + propSmallValue.getClass().getName(), propSmallValue instanceof StreamingPropertyValue); StreamingPropertyValue value = (StreamingPropertyValue) propSmallValue; assertEquals(String.class, value.getValueType()); assertEquals("value1".getBytes().length, value.getLength()); assertEquals("value1", IOUtils.toString(value.getInputStream())); assertEquals("value1", IOUtils.toString(value.getInputStream())); Iterable<Object> propLargeValues = v1.getPropertyValues(largePropertyName); assertEquals(1, count(propLargeValues)); Object propLargeValue = propLargeValues.iterator().next(); assertTrue(largePropertyName + " was " + propLargeValue.getClass().getName(), propLargeValue instanceof StreamingPropertyValue); value = (StreamingPropertyValue) propLargeValue; assertEquals(String.class, value.getValueType()); assertEquals(expectedLargeValue.getBytes().length, value.getLength()); assertEquals(expectedLargeValue, IOUtils.toString(value.getInputStream())); assertEquals(expectedLargeValue, IOUtils.toString(value.getInputStream())); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); propSmallValues = v1.getPropertyValues("propSmall"); assertEquals(1, count(propSmallValues)); propSmallValue = propSmallValues.iterator().next(); assertTrue("propSmallValue was " + propSmallValue.getClass().getName(), propSmallValue instanceof StreamingPropertyValue); value = (StreamingPropertyValue) propSmallValue; assertEquals(String.class, value.getValueType()); assertEquals("value1".getBytes().length, value.getLength()); assertEquals("value1", IOUtils.toString(value.getInputStream())); assertEquals("value1", IOUtils.toString(value.getInputStream())); propLargeValues = v1.getPropertyValues(largePropertyName); assertEquals(1, count(propLargeValues)); propLargeValue = propLargeValues.iterator().next(); assertTrue(largePropertyName + " was " + propLargeValue.getClass().getName(), propLargeValue instanceof StreamingPropertyValue); value = (StreamingPropertyValue) propLargeValue; assertEquals(String.class, value.getValueType()); assertEquals(expectedLargeValue.getBytes().length, value.getLength()); assertEquals(expectedLargeValue, IOUtils.toString(value.getInputStream())); assertEquals(expectedLargeValue, IOUtils.toString(value.getInputStream())); } @Test public void testAddVertexPropertyWithMetadata() { Map<String, Object> prop1Metadata = new HashMap<String, Object>(); prop1Metadata.put("metadata1", "metadata1Value"); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", prop1Metadata, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties("prop1"))); Property prop1 = v.getProperties("prop1").iterator().next(); prop1Metadata = prop1.getMetadata(); assertNotNull(prop1Metadata); assertEquals(1, prop1Metadata.keySet().size()); assertEquals("metadata1Value", prop1Metadata.get("metadata1")); prop1Metadata.put("metadata2", "metadata2Value"); v.prepareMutation() .setProperty("prop1", "value1", prop1Metadata, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties("prop1"))); prop1 = v.getProperties("prop1").iterator().next(); prop1Metadata = prop1.getMetadata(); assertEquals(2, prop1Metadata.keySet().size()); assertEquals("metadata1Value", prop1Metadata.get("metadata1")); assertEquals("metadata2Value", prop1Metadata.get("metadata2")); // make sure we clear out old values prop1Metadata = new HashMap<String, Object>(); v.setProperty("prop1", "value1", prop1Metadata, VISIBILITY_A, AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties("prop1"))); prop1 = v.getProperties("prop1").iterator().next(); prop1Metadata = prop1.getMetadata(); assertEquals(0, prop1Metadata.keySet().size()); } @Test public void testAddVertexWithProperties() { Vertex v = graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", VISIBILITY_A) .setProperty("prop2", "value2", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(1, count(v.getProperties("prop1"))); assertEquals("value1", v.getPropertyValues("prop1").iterator().next()); assertEquals(1, count(v.getProperties("prop2"))); assertEquals("value2", v.getPropertyValues("prop2").iterator().next()); v = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertEquals(1, count(v.getProperties("prop1"))); assertEquals("value1", v.getPropertyValues("prop1").iterator().next()); assertEquals(1, count(v.getProperties("prop2"))); assertEquals("value2", v.getPropertyValues("prop2").iterator().next()); } @Test public void testAddVertexWithPropertiesWithTwoDifferentVisibilities() { Vertex v = graph.prepareVertex("v1", VISIBILITY_EMPTY) .setProperty("prop1", "value1a", VISIBILITY_A) .setProperty("prop1", "value1b", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(2, count(v.getProperties("prop1"))); v = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertEquals(2, count(v.getProperties("prop1"))); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties("prop1"))); assertEquals("value1a", v.getPropertyValue("prop1")); v = graph.getVertex("v1", AUTHORIZATIONS_B); assertEquals(1, count(v.getProperties("prop1"))); assertEquals("value1b", v.getPropertyValue("prop1")); } @Test public void testMultivaluedProperties() { Vertex v = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v.prepareMutation() .addPropertyValue("propid1a", "prop1", "value1a", VISIBILITY_A) .addPropertyValue("propid2a", "prop2", "value2a", VISIBILITY_A) .addPropertyValue("propid3a", "prop3", "value3a", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("value1a", v.getPropertyValues("prop1").iterator().next()); assertEquals("value2a", v.getPropertyValues("prop2").iterator().next()); assertEquals("value3a", v.getPropertyValues("prop3").iterator().next()); assertEquals(3, count(v.getProperties())); v.prepareMutation() .addPropertyValue("propid1a", "prop1", "value1b", VISIBILITY_A) .addPropertyValue("propid2a", "prop2", "value2b", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getPropertyValues("prop1"))); assertEquals("value1b", v.getPropertyValues("prop1").iterator().next()); assertEquals(1, count(v.getPropertyValues("prop2"))); assertEquals("value2b", v.getPropertyValues("prop2").iterator().next()); assertEquals(1, count(v.getPropertyValues("prop3"))); assertEquals("value3a", v.getPropertyValues("prop3").iterator().next()); assertEquals(3, count(v.getProperties())); v.addPropertyValue("propid1b", "prop1", "value1a-new", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertContains("value1b", v.getPropertyValues("prop1")); assertContains("value1a-new", v.getPropertyValues("prop1")); assertEquals(4, count(v.getProperties())); } @Test public void testMultivaluedPropertyOrder() { graph.prepareVertex("v1", VISIBILITY_A) .addPropertyValue("a", "prop", "a", VISIBILITY_A) .addPropertyValue("aa", "prop", "aa", VISIBILITY_A) .addPropertyValue("b", "prop", "b", VISIBILITY_A) .addPropertyValue("0", "prop", "0", VISIBILITY_A) .addPropertyValue("A", "prop", "A", VISIBILITY_A) .addPropertyValue("Z", "prop", "Z", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("0", v1.getPropertyValue("prop", 0)); assertEquals("A", v1.getPropertyValue("prop", 1)); assertEquals("Z", v1.getPropertyValue("prop", 2)); assertEquals("a", v1.getPropertyValue("prop", 3)); assertEquals("aa", v1.getPropertyValue("prop", 4)); assertEquals("b", v1.getPropertyValue("prop", 5)); } @Test public void testRemoveProperty() { Vertex v = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v.prepareMutation() .addPropertyValue("propid1a", "prop1", "value1a", VISIBILITY_A) .addPropertyValue("propid1b", "prop1", "value1b", VISIBILITY_A) .addPropertyValue("propid2a", "prop2", "value2a", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v = graph.getVertex("v1", AUTHORIZATIONS_A); v.removeProperty("prop1", AUTHORIZATIONS_A_AND_B); assertEquals(1, count(v.getProperties())); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v.getProperties())); v.removeProperty("propid2a", "prop2", AUTHORIZATIONS_A_AND_B); assertEquals(0, count(v.getProperties())); v = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v.getProperties())); } @Test public void testAddVertexWithVisibility() { graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); graph.addVertex("v2", VISIBILITY_B, AUTHORIZATIONS_A); Iterable<Vertex> cVertices = graph.getVertices(AUTHORIZATIONS_C); assertEquals(0, count(cVertices)); Iterable<Vertex> aVertices = graph.getVertices(AUTHORIZATIONS_A); assertEquals(1, count(aVertices)); assertEquals("v1", aVertices.iterator().next().getId()); Iterable<Vertex> bVertices = graph.getVertices(AUTHORIZATIONS_B); assertEquals(1, count(bVertices)); assertEquals("v2", bVertices.iterator().next().getId()); Iterable<Vertex> allVertices = graph.getVertices(AUTHORIZATIONS_A_AND_B); assertEquals(2, count(allVertices)); } @Test public void testGetVerticesWithIds() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "v1", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v1b", VISIBILITY_A) .setProperty("prop1", "v1b", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("prop1", "v2", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v3", VISIBILITY_A) .setProperty("prop1", "v3", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); List<Object> ids = new ArrayList<Object>(); ids.add("v2"); ids.add("v1"); Iterable<Vertex> vertices = graph.getVertices(ids, AUTHORIZATIONS_A); boolean foundV1 = false, foundV2 = false; for (Vertex v : vertices) { if (v.getId().equals("v1")) { assertEquals("v1", v.getPropertyValue("prop1")); foundV1 = true; } else if (v.getId().equals("v2")) { assertEquals("v2", v.getPropertyValue("prop1")); foundV2 = true; } else { assertTrue("Unexpected vertex id: " + v.getId(), false); } } assertTrue("v1 not found", foundV1); assertTrue("v2 not found", foundV2); List<Vertex> verticesInOrder = graph.getVerticesInOrder(ids, AUTHORIZATIONS_A); assertEquals(2, verticesInOrder.size()); assertEquals("v2", verticesInOrder.get(0).getId()); assertEquals("v1", verticesInOrder.get(1).getId()); } @Test public void testGetEdgesWithIds() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); graph.prepareEdge("e1", v1, v2, "", VISIBILITY_A) .setProperty("prop1", "e1", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e1a", v1, v2, "", VISIBILITY_A) .setProperty("prop1", "e1a", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e2", v1, v3, "", VISIBILITY_A) .setProperty("prop1", "e2", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e3", v2, v3, "", VISIBILITY_A) .setProperty("prop1", "e3", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); List<Object> ids = new ArrayList<Object>(); ids.add("e1"); ids.add("e2"); Iterable<Edge> edges = graph.getEdges(ids, AUTHORIZATIONS_A); boolean foundE1 = false, foundE2 = false; for (Edge e : edges) { if (e.getId().equals("e1")) { assertEquals("e1", e.getPropertyValue("prop1")); foundE1 = true; } else if (e.getId().equals("e2")) { assertEquals("e2", e.getPropertyValue("prop1")); foundE2 = true; } else { assertTrue("Unexpected vertex id: " + e.getId(), false); } } assertTrue("e1 not found", foundE1); assertTrue("e2 not found", foundE2); } @Test public void testRemoveVertex() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); assertEquals(1, count(graph.getVertices(AUTHORIZATIONS_A))); graph.removeVertex(v1, AUTHORIZATIONS_A); assertEquals(0, count(graph.getVertices(AUTHORIZATIONS_A))); } @Test public void testRemoveVertexWithProperties() { Vertex v1 = graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(1, count(graph.getVertices(AUTHORIZATIONS_A))); graph.removeVertex(v1, AUTHORIZATIONS_A); assertEquals(0, count(graph.getVertices(AUTHORIZATIONS_A_AND_B))); } @Test public void testAddEdge() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Edge e = graph.addEdge("e1", v1, v2, "label1", VISIBILITY_A, AUTHORIZATIONS_A); assertNotNull(e); assertEquals("e1", e.getId()); assertEquals("label1", e.getLabel()); assertEquals("v1", e.getVertexId(Direction.OUT)); assertEquals(v1, e.getVertex(Direction.OUT, AUTHORIZATIONS_A)); assertEquals("v2", e.getVertexId(Direction.IN)); assertEquals(v2, e.getVertex(Direction.IN, AUTHORIZATIONS_A)); assertEquals(VISIBILITY_A, e.getVisibility()); e = graph.getEdge("e1", AUTHORIZATIONS_B); assertNull(e); e = graph.getEdge("e1", AUTHORIZATIONS_A); assertNotNull(e); assertEquals("e1", e.getId()); assertEquals("label1", e.getLabel()); assertEquals("v1", e.getVertexId(Direction.OUT)); assertEquals(v1, e.getVertex(Direction.OUT, AUTHORIZATIONS_A)); assertEquals("v2", e.getVertexId(Direction.IN)); assertEquals(v2, e.getVertex(Direction.IN, AUTHORIZATIONS_A)); assertEquals(VISIBILITY_A, e.getVisibility()); } @Test public void testGetEdge() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1to2label1", v1, v2, "label1", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1to2label2", v1, v2, "label2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e2to1", v2, v1, "label1", VISIBILITY_A, AUTHORIZATIONS_A); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(3, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(2, count(v1.getEdges(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(Direction.IN, AUTHORIZATIONS_A))); assertEquals(3, count(v1.getEdges(v2, Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(2, count(v1.getEdges(v2, Direction.OUT, AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(v2, Direction.IN, AUTHORIZATIONS_A))); assertEquals(2, count(v1.getEdges(v2, Direction.BOTH, "label1", AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(v2, Direction.OUT, "label1", AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(v2, Direction.IN, "label1", AUTHORIZATIONS_A))); assertEquals(3, count(v1.getEdges(v2, Direction.BOTH, new String[]{"label1", "label2"}, AUTHORIZATIONS_A))); assertEquals(2, count(v1.getEdges(v2, Direction.OUT, new String[]{"label1", "label2"}, AUTHORIZATIONS_A))); assertEquals(1, count(v1.getEdges(v2, Direction.IN, new String[]{"label1", "label2"}, AUTHORIZATIONS_A))); } @Test public void testAddEdgeWithProperties() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.prepareEdge("e1", v1, v2, "label1", VISIBILITY_A) .setProperty("propA", "valueA", VISIBILITY_A) .setProperty("propB", "valueB", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); Edge e = graph.getEdge("e1", AUTHORIZATIONS_A); assertEquals(1, count(e.getProperties())); assertEquals("valueA", e.getPropertyValues("propA").iterator().next()); assertEquals(0, count(e.getPropertyValues("propB"))); e = graph.getEdge("e1", AUTHORIZATIONS_A_AND_B); assertEquals(2, count(e.getProperties())); assertEquals("valueA", e.getPropertyValues("propA").iterator().next()); assertEquals("valueB", e.getPropertyValues("propB").iterator().next()); assertEquals("valueA", e.getPropertyValue("propA")); assertEquals("valueB", e.getPropertyValue("propB")); } @Test public void testRemoveEdge() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1", v1, v2, "label1", VISIBILITY_A, AUTHORIZATIONS_A); assertEquals(1, count(graph.getEdges(AUTHORIZATIONS_A))); try { graph.removeEdge("e1", AUTHORIZATIONS_B); } catch (IllegalArgumentException e) { // expected } assertEquals(1, count(graph.getEdges(AUTHORIZATIONS_A))); graph.removeEdge("e1", AUTHORIZATIONS_A); assertEquals(0, count(graph.getEdges(AUTHORIZATIONS_A))); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v1.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); v2 = graph.getVertex("v2", AUTHORIZATIONS_A); assertEquals(0, count(v2.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); } @Test public void testAddEdgeWithVisibility() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1", v1, v2, "edgeA", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e2", v1, v2, "edgeB", VISIBILITY_B, AUTHORIZATIONS_B); Iterable<Edge> aEdges = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B).getEdges(Direction.BOTH, AUTHORIZATIONS_A); assertEquals(1, count(aEdges)); Edge e1 = aEdges.iterator().next(); assertNotNull(e1); assertEquals("edgeA", e1.getLabel()); Iterable<Edge> bEdges = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B).getEdges(Direction.BOTH, AUTHORIZATIONS_B); assertEquals(1, count(bEdges)); Edge e2 = bEdges.iterator().next(); assertNotNull(e2); assertEquals("edgeB", e2.getLabel()); Iterable<Edge> allEdges = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B).getEdges(Direction.BOTH, AUTHORIZATIONS_A_AND_B); assertEquals(2, count(allEdges)); } @Test public void testGraphQuery() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e1", v1, v2, "edgeA", VISIBILITY_A, AUTHORIZATIONS_A); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A).vertices(); assertEquals(2, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).skip(1).vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).limit(1).vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).skip(1).limit(1).vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).skip(2).vertices(); assertEquals(0, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A).skip(1).limit(2).vertices(); assertEquals(1, count(vertices)); Iterable<Edge> edges = graph.query(AUTHORIZATIONS_A).edges(); assertEquals(1, count(edges)); } @Test public void testGraphQueryWithQueryString() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v1.setProperty("description", "This is vertex 1 - dog.", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); v2.setProperty("description", "This is vertex 2 - cat.", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query("vertex", AUTHORIZATIONS_A).vertices(); assertEquals(2, count(vertices)); vertices = graph.query("dog", AUTHORIZATIONS_A).vertices(); assertEquals(1, count(vertices)); vertices = graph.query("dog", AUTHORIZATIONS_B).vertices(); assertEquals(0, count(vertices)); } @Test public void testFacetedResults() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("gender", "male", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("gender", "male", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v3", VISIBILITY_A) .setProperty("gender", "female", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v4", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.flush(); Query q = graph.query(AUTHORIZATIONS_A); if (q instanceof QuerySupportingFacetedResults) { ((QuerySupportingFacetedResults) q).addFacet(new TermFacet("f1", "gender")); Iterable<Vertex> results = q.vertices(); assertEquals(4, count(results)); assertTrue("results was not of type IterableWithFacetedResults: " + results.getClass().getName(), results instanceof IterableWithFacetedResults); FacetedResult facetedResult = ((IterableWithFacetedResults) results).getFacetedResult("f1"); assertNotNull("facetedResults was null for name 'f1'", facetedResult); assertEquals(2, count(facetedResult.getTerms())); for (FacetedTerm facetedTerm : facetedResult.getTerms()) { if (facetedTerm.getTerm().equals("male")) { assertEquals(2, facetedTerm.getCount()); } else if (facetedTerm.getTerm().equals("female")) { assertEquals(1, facetedTerm.getCount()); } else { throw new RuntimeException("Unexpected FacetedTerm: " + facetedTerm.getTerm()); } } } else { LOGGER.warn("query does not support faceted results: " + q.getClass().getName()); } } @Test public void testGraphQueryHas() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_A) .setProperty("birthDate", new DateOnly(1989, 1, 5), VISIBILITY_A) .setProperty("lastAccessed", createDate(2014, 2, 24, 13, 0, 5), VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("age", 30, VISIBILITY_A) .setProperty("birthDate", new DateOnly(1984, 1, 5), VISIBILITY_A) .setProperty("lastAccessed", createDate(2014, 2, 25, 13, 0, 5), VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.EQUAL, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("birthDate", Compare.EQUAL, createDate(1989, 1, 5)) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("lastAccessed", Compare.EQUAL, createDate(2014, 2, 24, 13, 0, 5)) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.GREATER_THAN_EQUAL, 25) .vertices(); assertEquals(2, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.IN, new Integer[]{25}) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.IN, new Integer[]{25, 30}) .vertices(); assertEquals(2, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.GREATER_THAN, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.LESS_THAN, 26) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.LESS_THAN_EQUAL, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.NOT_EQUAL, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("lastAccessed", Compare.EQUAL, new DateOnly(2014, 2, 24)) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query("*", AUTHORIZATIONS_A) .has("age", Compare.IN, new Integer[]{25, 30}) .vertices(); assertEquals(2, count(vertices)); } @Test public void testGraphQueryVertexHasWithSecurity() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A) .has("age", Compare.EQUAL, 25) .vertices(); assertEquals(1, count(vertices)); } @Test public void testGraphQueryEdgeHasWithSecurity() { Vertex v1 = graph.prepareVertex("v1", VISIBILITY_A).save(AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.prepareVertex("v2", VISIBILITY_A).save(AUTHORIZATIONS_A_AND_B); Vertex v3 = graph.prepareVertex("v3", VISIBILITY_A).save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e1", v1, v2, "edge", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e2", v1, v3, "edge", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); Iterable<Edge> edges = graph.query(AUTHORIZATIONS_A) .has("age", Compare.EQUAL, 25) .edges(); assertEquals(1, count(edges)); } @Test public void testGraphQueryHasWithSpaces() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("name", "Joe Ferner", VISIBILITY_A) .setProperty("propWithHyphen", "hyphen-word", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("name", "Joe Smith", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query("Ferner", AUTHORIZATIONS_A) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query("joe", AUTHORIZATIONS_A) .vertices(); assertEquals(2, count(vertices)); if (!isUsingDefaultQuery(graph)) { vertices = graph.query("joe AND ferner", AUTHORIZATIONS_A) .vertices(); assertEquals(1, count(vertices)); } if (!isUsingDefaultQuery(graph)) { vertices = graph.query("joe smith", AUTHORIZATIONS_A) .vertices(); List<Vertex> verticesList = toList(vertices); assertEquals(2, verticesList.size()); assertEquals("v2", verticesList.get(0).getId()); assertEquals("v1", verticesList.get(1).getId()); } vertices = graph.query(AUTHORIZATIONS_A) .has("name", TextPredicate.CONTAINS, "Ferner") .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("name", TextPredicate.CONTAINS, "Joe") .has("name", TextPredicate.CONTAINS, "Ferner") .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("name", TextPredicate.CONTAINS, "Joe Ferner") .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .has("propWithHyphen", TextPredicate.CONTAINS, "hyphen-word") .vertices(); assertEquals(1, count(vertices)); } @Test public void testGraphQueryHasWithSpacesAndFieldedQueryString() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("name", "Joe Ferner", VISIBILITY_A) .setProperty("propWithHyphen", "hyphen-word", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("name", "Joe Smith", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); if (!isUsingDefaultQuery(graph)) { Iterable<Vertex> vertices = graph.query("name:\"joe ferner\"", AUTHORIZATIONS_A) .vertices(); assertEquals(1, count(vertices)); } } protected boolean isUsingDefaultQuery(Graph graph) { return graph.query(AUTHORIZATIONS_A) instanceof DefaultGraphQuery; } @Test public void testGraphQueryGeoPoint() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("location", new GeoPoint(38.9186, -77.2297, "Reston, VA"), VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("location", new GeoPoint(38.9544, -77.3464, "Reston, VA"), VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); List<Vertex> vertices = toList(graph.query(AUTHORIZATIONS_A) .has("location", GeoCompare.WITHIN, new GeoCircle(38.9186, -77.2297, 1)) .vertices()); assertEquals(1, count(vertices)); GeoPoint geoPoint = (GeoPoint) vertices.get(0).getPropertyValue("location"); assertEquals(38.9186, geoPoint.getLatitude(), 0.001); assertEquals(-77.2297, geoPoint.getLongitude(), 0.001); assertEquals("Reston, VA", geoPoint.getDescription()); vertices = toList(graph.query(AUTHORIZATIONS_A) .has("location", GeoCompare.WITHIN, new GeoCircle(38.9186, -77.2297, 25)) .vertices()); assertEquals(2, count(vertices)); } private Date createDate(int year, int month, int day) { return new GregorianCalendar(year, month, day).getTime(); } private Date createDate(int year, int month, int day, int hour, int min, int sec) { return new GregorianCalendar(year, month, day, hour, min, sec).getTime(); } @Test public void testGraphQueryRange() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("age", 25, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("age", 30, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A) .range("age", 25, 25) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .range("age", 20, 29) .vertices(); assertEquals(1, count(vertices)); vertices = graph.query(AUTHORIZATIONS_A) .range("age", 25, 30) .vertices(); assertEquals(2, count(vertices)); } @Test public void testVertexQuery() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v1.setProperty("prop1", "value1", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); v2.setProperty("prop1", "value2", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); v3.setProperty("prop1", "value3", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); Edge ev1v2 = graph.addEdge("e v1->v2", v1, v2, "edgeA", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev1v3 = graph.addEdge("e v1->v3", v1, v3, "edgeA", VISIBILITY_A, AUTHORIZATIONS_A); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); Iterable<Vertex> vertices = v1.query(AUTHORIZATIONS_A).vertices(); assertEquals(2, count(vertices)); assertContains(v2, vertices); assertContains(v3, vertices); vertices = v1.query(AUTHORIZATIONS_A) .has("prop1", "value2") .vertices(); assertEquals(1, count(vertices)); assertContains(v2, vertices); Iterable<Edge> edges = v1.query(AUTHORIZATIONS_A).edges(); assertEquals(2, count(edges)); assertContains(ev1v2, edges); assertContains(ev1v3, edges); edges = v1.query(AUTHORIZATIONS_A).edges(Direction.OUT); assertEquals(2, count(edges)); assertContains(ev1v2, edges); assertContains(ev1v3, edges); } @Test public void testFindPaths() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v4 = graph.addVertex("v4", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v2, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v1 -> v2 graph.addEdge(v2, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v2 -> v4 graph.addEdge(v1, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v1 -> v3 graph.addEdge(v3, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v3 -> v4 v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v4 = graph.getVertex("v4", AUTHORIZATIONS_A); List<Path> paths = toList(graph.findPaths(v1, v4, 2, AUTHORIZATIONS_A)); // v1 -> v2 -> v4 // v1 -> v3 -> v4 assertEquals(2, paths.size()); boolean found2 = false; boolean found3 = false; for (Path path : paths) { assertEquals(3, path.length()); int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v1.getId()); } else if (i == 1) { if (v2.getId().equals(id)) { found2 = true; } else if (v3.getId().equals(id)) { found3 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v4.getId()); } i++; } } assertTrue("v2 not found in path", found2); assertTrue("v3 not found in path", found3); v4 = graph.getVertex("v4", AUTHORIZATIONS_A); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); paths = toList(graph.findPaths(v4, v1, 2, AUTHORIZATIONS_A)); // v4 -> v2 -> v1 // v4 -> v3 -> v1 assertEquals(2, paths.size()); found2 = false; found3 = false; for (Path path : paths) { assertEquals(3, path.length()); int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v4.getId()); } else if (i == 1) { if (v2.getId().equals(id)) { found2 = true; } else if (v3.getId().equals(id)) { found3 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v1.getId()); } i++; } } assertTrue("v2 not found in path", found2); assertTrue("v3 not found in path", found3); } @Test public void testFindPathsMultiplePaths() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v4 = graph.addVertex("v4", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v5 = graph.addVertex("v5", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v1 -> v4 graph.addEdge(v1, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v1 -> v3 graph.addEdge(v3, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v3 -> v4 graph.addEdge(v2, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v2 -> v3 graph.addEdge(v4, v2, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v4 -> v2 graph.addEdge(v2, v5, "knows", VISIBILITY_A, AUTHORIZATIONS_A); // v2 -> v5 v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v2 = graph.getVertex("v2", AUTHORIZATIONS_A); v5 = graph.getVertex("v5", AUTHORIZATIONS_A); List<Path> paths = toList(graph.findPaths(v1, v2, 2, AUTHORIZATIONS_A)); // v1 -> v4 -> v2 // v1 -> v3 -> v2 assertEquals(2, paths.size()); boolean found3 = false; boolean found4 = false; for (Path path : paths) { assertEquals(3, path.length()); int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v1.getId()); } else if (i == 1) { if (v3.getId().equals(id)) { found3 = true; } else if (v4.getId().equals(id)) { found4 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v2.getId()); } i++; } } assertTrue("v3 not found in path", found3); assertTrue("v4 not found in path", found4); paths = toList(graph.findPaths(v1, v2, 3, AUTHORIZATIONS_A)); // v1 -> v4 -> v2 // v1 -> v3 -> v2 // v1 -> v3 -> v4 -> v2 // v1 -> v4 -> v3 -> v2 assertEquals(4, paths.size()); found3 = false; found4 = false; for (Path path : paths) { if (path.length() == 3) { int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v1.getId()); } else if (i == 1) { if (v3.getId().equals(id)) { found3 = true; } else if (v4.getId().equals(id)) { found4 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v2.getId()); } i++; } } else if (path.length() == 4) { } else { fail("Invalid path length " + path.length()); } } assertTrue("v3 not found in path", found3); assertTrue("v4 not found in path", found4); paths = toList(graph.findPaths(v1, v5, 2, AUTHORIZATIONS_A)); assertEquals(0, paths.size()); paths = toList(graph.findPaths(v1, v5, 3, AUTHORIZATIONS_A)); // v1 -> v4 -> v2 -> v5 // v1 -> v3 -> v2 -> v5 assertEquals(2, paths.size()); found3 = false; found4 = false; for (Path path : paths) { assertEquals(4, path.length()); int i = 0; for (Object id : path) { if (i == 0) { assertEquals(id, v1.getId()); } else if (i == 1) { if (v3.getId().equals(id)) { found3 = true; } else if (v4.getId().equals(id)) { found4 = true; } else { fail("center of path is neither v2 or v3 but found " + id); } } else if (i == 2) { assertEquals(id, v2.getId()); } else if (i == 3) { assertEquals(id, v5.getId()); } i++; } } assertTrue("v3 not found in path", found3); assertTrue("v4 not found in path", found4); } @Test public void testGetVerticesFromVertex() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v4 = graph.addVertex("v4", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v2, "knows", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v1, v4, "knows", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge(v2, v3, "knows", VISIBILITY_A, AUTHORIZATIONS_A); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(3, count(v1.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(3, count(v1.getVertices(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(0, count(v1.getVertices(Direction.IN, AUTHORIZATIONS_A))); v2 = graph.getVertex("v2", AUTHORIZATIONS_A); assertEquals(2, count(v2.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(1, count(v2.getVertices(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(1, count(v2.getVertices(Direction.IN, AUTHORIZATIONS_A))); v3 = graph.getVertex("v3", AUTHORIZATIONS_A); assertEquals(2, count(v3.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(0, count(v3.getVertices(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(2, count(v3.getVertices(Direction.IN, AUTHORIZATIONS_A))); v4 = graph.getVertex("v4", AUTHORIZATIONS_A); assertEquals(1, count(v4.getVertices(Direction.BOTH, AUTHORIZATIONS_A))); assertEquals(0, count(v4.getVertices(Direction.OUT, AUTHORIZATIONS_A))); assertEquals(1, count(v4.getVertices(Direction.IN, AUTHORIZATIONS_A))); } @Test public void testBlankVisibilityString() { Vertex v = graph.addVertex("v1", VISIBILITY_EMPTY, AUTHORIZATIONS_EMPTY); assertNotNull(v); assertEquals("v1", v.getId()); v = graph.getVertex("v1", AUTHORIZATIONS_EMPTY); assertNotNull(v); assertEquals("v1", v.getId()); assertEquals(VISIBILITY_EMPTY, v.getVisibility()); } @Test public void testElementMutationDoesntChangeObjectUntilSave() { Vertex v = graph.addVertex("v1", VISIBILITY_EMPTY, AUTHORIZATIONS_EMPTY); v.setProperty("prop1", "value1", VISIBILITY_A, AUTHORIZATIONS_A_AND_B); ElementMutation<Vertex> m = v.prepareMutation() .setProperty("prop1", "value2", VISIBILITY_A) .setProperty("prop2", "value2", VISIBILITY_A); assertEquals(1, count(v.getProperties())); assertEquals("value1", v.getPropertyValue("prop1")); m.save(AUTHORIZATIONS_A_AND_B); assertEquals(2, count(v.getProperties())); assertEquals("value2", v.getPropertyValue("prop1")); assertEquals("value2", v.getPropertyValue("prop2")); } @Test public void testFindRelatedEdges() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v2 = graph.addVertex("v2", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v3 = graph.addVertex("v3", VISIBILITY_A, AUTHORIZATIONS_A); Vertex v4 = graph.addVertex("v4", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev1v2 = graph.addEdge("e v1->v2", v1, v2, "", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev1v3 = graph.addEdge("e v1->v3", v1, v3, "", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev2v3 = graph.addEdge("e v2->v3", v2, v3, "", VISIBILITY_A, AUTHORIZATIONS_A); Edge ev3v1 = graph.addEdge("e v3->v1", v3, v1, "", VISIBILITY_A, AUTHORIZATIONS_A); graph.addEdge("e v3->v4", v3, v4, "", VISIBILITY_A, AUTHORIZATIONS_A); List<Object> vertexIds = new ArrayList<Object>(); vertexIds.add("v1"); vertexIds.add("v2"); vertexIds.add("v3"); Iterable<Object> edges = toList(graph.findRelatedEdges(vertexIds, AUTHORIZATIONS_A)); assertEquals(4, count(edges)); assertContains(ev1v2.getId(), edges); assertContains(ev1v3.getId(), edges); assertContains(ev2v3.getId(), edges); assertContains(ev3v1.getId(), edges); } @Test public void testEmptyPropertyMutation() { Vertex v1 = graph.addVertex("v1", VISIBILITY_A, AUTHORIZATIONS_A); v1.prepareMutation().save(AUTHORIZATIONS_A_AND_B); } @Test public void testTextIndex() throws Exception { graph.defineProperty("none").dataType(String.class).textIndexHint(TextIndexHint.NONE).define(); graph.defineProperty("none").dataType(String.class).textIndexHint(TextIndexHint.NONE).define(); // try calling define twice graph.defineProperty("both").dataType(String.class).textIndexHint(TextIndexHint.ALL).define(); graph.defineProperty("fullText").dataType(String.class).textIndexHint(TextIndexHint.FULL_TEXT).define(); graph.defineProperty("exactMatch").dataType(String.class).textIndexHint(TextIndexHint.EXACT_MATCH).define(); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("none", "Test Value", VISIBILITY_A) .setProperty("both", "Test Value", VISIBILITY_A) .setProperty("fullText", "Test Value", VISIBILITY_A) .setProperty("exactMatch", "Test Value", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("Test Value", v1.getPropertyValue("none")); assertEquals("Test Value", v1.getPropertyValue("both")); assertEquals("Test Value", v1.getPropertyValue("fullText")); assertEquals("Test Value", v1.getPropertyValue("exactMatch")); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("both", TextPredicate.CONTAINS, "Test").vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("fullText", TextPredicate.CONTAINS, "Test").vertices())); assertEquals("exact match shouldn't match partials", 0, count(graph.query(AUTHORIZATIONS_A).has("exactMatch", TextPredicate.CONTAINS, "Test").vertices())); assertEquals("unindexed property shouldn't match partials", 0, count(graph.query(AUTHORIZATIONS_A).has("none", TextPredicate.CONTAINS, "Test").vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("both", "Test Value").vertices())); assertEquals("default has predicate is equals which shouldn't work for full text", 0, count(graph.query(AUTHORIZATIONS_A).has("fullText", "Test Value").vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("exactMatch", "Test Value").vertices())); assertEquals("default has predicate is equals which shouldn't work for unindexed", 0, count(graph.query(AUTHORIZATIONS_A).has("none", "Test Value").vertices())); } @Test public void testFieldBoost() throws Exception { if (!graph.isFieldBoostSupported()) { LOGGER.warn("Boost not supported"); return; } graph.defineProperty("a") .dataType(String.class) .textIndexHint(TextIndexHint.ALL) .boost(1) .define(); graph.defineProperty("b") .dataType(String.class) .textIndexHint(TextIndexHint.ALL) .boost(2) .define(); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("a", "Test Value", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.prepareVertex("v2", VISIBILITY_A) .setProperty("b", "Test Value", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); assertVertexIds(graph.query("Test", AUTHORIZATIONS_A).vertices(), new String[]{"v2", "v1"}); } @Test public void testVertexBoost() throws Exception { Vertex v1 = graph.prepareVertex("v1", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.prepareVertex("v2", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v3 = graph.prepareVertex("v3", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); graph.addEdge("e1", v3, v2, "link", VISIBILITY_A, AUTHORIZATIONS_A); assertVertexIds(graph.query(AUTHORIZATIONS_A).vertices(), new String[]{"v2", "v3", "v1"}); } @Test public void testValueTypes() throws Exception { Date date = createDate(2014, 2, 24, 13, 0, 5); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("int", 5, VISIBILITY_A) .setProperty("bigDecimal", new BigDecimal(10), VISIBILITY_A) .setProperty("double", 5.6, VISIBILITY_A) .setProperty("float", 6.4f, VISIBILITY_A) .setProperty("string", "test", VISIBILITY_A) .setProperty("byte", (byte) 5, VISIBILITY_A) .setProperty("long", (long) 5, VISIBILITY_A) .setProperty("boolean", true, VISIBILITY_A) .setProperty("geopoint", new GeoPoint(77, -33), VISIBILITY_A) .setProperty("short", (short) 5, VISIBILITY_A) .setProperty("date", date, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("int", 5).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("double", 5.6).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("float", 6.4f).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("string", "test").vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("byte", 5).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("long", 5).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("boolean", true).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("short", 5).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("date", date).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("bigDecimal", 10).vertices())); assertEquals(1, count(graph.query(AUTHORIZATIONS_A).has("geopoint", GeoCompare.WITHIN, new GeoCircle(77, -33, 1)).vertices())); } @Test public void testChangeVisibilityVertex() { graph.prepareVertex("v1", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.prepareMutation() .alterElementVisibility(VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertNull(v1); v1 = graph.getVertex("v1", AUTHORIZATIONS_B); assertNotNull(v1); // change to same visibility v1 = graph.getVertex("v1", AUTHORIZATIONS_B); v1.prepareMutation() .alterElementVisibility(VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertNull(v1); v1 = graph.getVertex("v1", AUTHORIZATIONS_B); assertNotNull(v1); } @Test public void testChangeVisibilityVertexProperties() { Map<String, Object> prop1Metadata = new HashMap<String, Object>(); prop1Metadata.put("prop1_key1", "value1"); Map<String, Object> prop2Metadata = new HashMap<String, Object>(); prop2Metadata.put("prop2_key1", "value1"); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", prop1Metadata, VISIBILITY_EMPTY) .setProperty("prop2", "value2", prop2Metadata, VISIBILITY_EMPTY) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.prepareMutation() .alterPropertyVisibility("prop1", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertNull(v1.getProperty("prop1")); assertNotNull(v1.getProperty("prop2")); v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertNotNull(v1.getProperty("prop1")); assertNotNull(v1.getProperty("prop2")); // alter and set property in one mutation v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); v1.prepareMutation() .alterPropertyVisibility("prop1", VISIBILITY_A) .setProperty("prop1", "value1New", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertNotNull(v1.getProperty("prop1")); assertEquals("value1New", v1.getPropertyValue("prop1")); // alter visibility to the same visibility v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); v1.prepareMutation() .alterPropertyVisibility("prop1", VISIBILITY_A) .setProperty("prop1", "value1New2", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); v1 = graph.getVertex("v1", AUTHORIZATIONS_A_AND_B); assertNotNull(v1.getProperty("prop1")); assertEquals("value1New2", v1.getPropertyValue("prop1")); } @Test public void testChangeVisibilityEdge() { Vertex v1 = graph.prepareVertex("v1", VISIBILITY_EMPTY) .save(AUTHORIZATIONS_A_AND_B); Vertex v2 = graph.prepareVertex("v2", VISIBILITY_EMPTY) .save(AUTHORIZATIONS_A_AND_B); graph.prepareEdge("e1", v1, v2, "", VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); // test that we can see the edge with A and not B v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_B))); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(1, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_A))); // change the edge Edge e1 = graph.getEdge("e1", AUTHORIZATIONS_A); e1.prepareMutation() .alterElementVisibility(VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); // test that we can see the edge with B and not A v1 = graph.getVertex("v1", AUTHORIZATIONS_B); assertEquals(1, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_B))); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_A))); // change the edge visibility to same e1 = graph.getEdge("e1", AUTHORIZATIONS_B); e1.prepareMutation() .alterElementVisibility(VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); // test that we can see the edge with B and not A v1 = graph.getVertex("v1", AUTHORIZATIONS_B); assertEquals(1, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_B))); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals(0, count(v1.getEdges(Direction.BOTH, AUTHORIZATIONS_A))); } @Test public void testChangeVisibilityOnBadPropertyName() { graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", VISIBILITY_EMPTY) .setProperty("prop2", "value2", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); try { graph.getVertex("v1", AUTHORIZATIONS_A) .prepareMutation() .alterPropertyVisibility("propBad", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); fail("show throw"); } catch (SecureGraphException ex) { assertNotNull(ex); } } @Test public void testChangeVisibilityOnStreamingProperty() throws IOException { String expectedLargeValue = IOUtils.toString(new LargeStringInputStream(LARGE_PROPERTY_VALUE_SIZE)); PropertyValue propSmall = new StreamingPropertyValue(new ByteArrayInputStream("value1".getBytes()), String.class); PropertyValue propLarge = new StreamingPropertyValue(new ByteArrayInputStream(expectedLargeValue.getBytes()), String.class); String largePropertyName = "propLarge/\\*!@#$%^&*()[]{}|"; graph.prepareVertex("v1", VISIBILITY_A) .setProperty("propSmall", propSmall, VISIBILITY_A) .setProperty(largePropertyName, propLarge, VISIBILITY_A) .save(AUTHORIZATIONS_A_AND_B); assertEquals(2, count(graph.getVertex("v1", AUTHORIZATIONS_A).getProperties())); graph.getVertex("v1", AUTHORIZATIONS_A) .prepareMutation() .alterPropertyVisibility("propSmall", VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(1, count(graph.getVertex("v1", AUTHORIZATIONS_A).getProperties())); graph.getVertex("v1", AUTHORIZATIONS_A) .prepareMutation() .alterPropertyVisibility(largePropertyName, VISIBILITY_B) .save(AUTHORIZATIONS_A_AND_B); assertEquals(0, count(graph.getVertex("v1", AUTHORIZATIONS_A).getProperties())); assertEquals(2, count(graph.getVertex("v1", AUTHORIZATIONS_A_AND_B).getProperties())); } @Test public void testChangePropertyMetadata() { Map<String, Object> prop1Metadata = new HashMap<String, Object>(); prop1Metadata.put("prop1_key1", "valueOld"); graph.prepareVertex("v1", VISIBILITY_A) .setProperty("prop1", "value1", prop1Metadata, VISIBILITY_EMPTY) .setProperty("prop2", "value2", null, VISIBILITY_EMPTY) .save(AUTHORIZATIONS_A_AND_B); Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.prepareMutation() .alterPropertyMetadata("prop1", "prop1_key1", "valueNew") .save(AUTHORIZATIONS_A_AND_B); assertEquals("valueNew", v1.getProperty("prop1").getMetadata().get("prop1_key1")); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("valueNew", v1.getProperty("prop1").getMetadata().get("prop1_key1")); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); v1.prepareMutation() .alterPropertyMetadata("prop2", "prop2_key1", "valueNew") .save(AUTHORIZATIONS_A_AND_B); assertEquals("valueNew", v1.getProperty("prop2").getMetadata().get("prop2_key1")); v1 = graph.getVertex("v1", AUTHORIZATIONS_A); assertEquals("valueNew", v1.getProperty("prop2").getMetadata().get("prop2_key1")); } @Test public void testIsVisibilityValid() { assertFalse(graph.isVisibilityValid(VISIBILITY_A, AUTHORIZATIONS_C)); assertTrue(graph.isVisibilityValid(VISIBILITY_B, AUTHORIZATIONS_A_AND_B)); assertTrue(graph.isVisibilityValid(VISIBILITY_B, AUTHORIZATIONS_B)); assertTrue(graph.isVisibilityValid(VISIBILITY_EMPTY, AUTHORIZATIONS_A)); } protected void assertVertexIds(Iterable<Vertex> vertices, String[] ids) { List<Vertex> verticesList = toList(vertices); assertEquals(ids.length, verticesList.size()); for (int i = 0; i < ids.length; i++) { assertEquals("at offset: " + i, ids[i], verticesList.get(i).getId()); } } }
write unit test that fails testModifyVertexWithLowerAuthorizationThenOtherProperties
securegraph-test/src/main/java/org/securegraph/test/GraphTestBase.java
write unit test that fails testModifyVertexWithLowerAuthorizationThenOtherProperties
<ide><path>ecuregraph-test/src/main/java/org/securegraph/test/GraphTestBase.java <ide> assertTrue(graph.isVisibilityValid(VISIBILITY_EMPTY, AUTHORIZATIONS_A)); <ide> } <ide> <add> @Test <add> public void testModifyVertexWithLowerAuthorizationThenOtherProperties() { <add> graph.prepareVertex("v1", VISIBILITY_A) <add> .setProperty("prop1", "value1", VISIBILITY_A) <add> .setProperty("prop2", "value2", VISIBILITY_B) <add> .save(AUTHORIZATIONS_A_AND_B); <add> graph.flush(); <add> <add> Vertex v1 = graph.getVertex("v1", AUTHORIZATIONS_A); <add> v1.setProperty("prop1", "value1New", VISIBILITY_A, AUTHORIZATIONS_A); <add> graph.flush(); <add> <add> Iterable<Vertex> vertices = graph.query(AUTHORIZATIONS_A_AND_B) <add> .has("prop2", "value2") <add> .vertices(); <add> assertVertexIds(vertices, new String[]{"v1"}); <add> } <add> <ide> protected void assertVertexIds(Iterable<Vertex> vertices, String[] ids) { <ide> List<Vertex> verticesList = toList(vertices); <del> assertEquals(ids.length, verticesList.size()); <add> assertEquals("ids length mismatch", ids.length, verticesList.size()); <ide> for (int i = 0; i < ids.length; i++) { <ide> assertEquals("at offset: " + i, ids[i], verticesList.get(i).getId()); <ide> }
Java
lgpl-2.1
fa6525f6e99f444464b8eee2a75f1429c0769786
0
MichaelMcDonnell/vrjuggler,vrjuggler/vrjuggler,LiuKeHua/vrjuggler,vancegroup-mirrors/vrjuggler,godbyk/vrjuggler-upstream-old,LiuKeHua/vrjuggler,godbyk/vrjuggler-upstream-old,godbyk/vrjuggler-upstream-old,vrjuggler/vrjuggler,LiuKeHua/vrjuggler,vancegroup-mirrors/vrjuggler,LiuKeHua/vrjuggler,MichaelMcDonnell/vrjuggler,vancegroup-mirrors/vrjuggler,vrjuggler/vrjuggler,MichaelMcDonnell/vrjuggler,vrjuggler/vrjuggler,MichaelMcDonnell/vrjuggler,vrjuggler/vrjuggler,vrjuggler/vrjuggler,godbyk/vrjuggler-upstream-old,vancegroup-mirrors/vrjuggler,LiuKeHua/vrjuggler,MichaelMcDonnell/vrjuggler,MichaelMcDonnell/vrjuggler,LiuKeHua/vrjuggler,vrjuggler/vrjuggler,godbyk/vrjuggler-upstream-old,vrjuggler/vrjuggler,vancegroup-mirrors/vrjuggler,MichaelMcDonnell/vrjuggler,MichaelMcDonnell/vrjuggler,LiuKeHua/vrjuggler,vancegroup-mirrors/vrjuggler,godbyk/vrjuggler-upstream-old,LiuKeHua/vrjuggler
/*************** <auto-copyright.pl BEGIN do not edit this line> ************** * * VR Juggler is (C) Copyright 1998-2003 by Iowa State University * * Original Authors: * Allen Bierbaum, Christopher Just, * Patrick Hartling, Kevin Meinert, * Carolina Cruz-Neira, Albert Baker * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. * * ----------------------------------------------------------------- * File: $RCSfile$ * Date modified: $Date$ * Version: $Revision$ * ----------------------------------------------------------------- * *************** <auto-copyright.pl END do not edit this line> ***************/ package org.vrjuggler.jccl.config.io; import java.io.*; import java.util.*; import org.jdom.*; import org.vrjuggler.jccl.config.*; /** * This object knows how to create a configuration from an XML document and a * configuration definition repository. */ public class ConfigurationParser implements ConfigIOConstants { public ConfigurationParser() { mDefinitionRepos = null; } /** * Sets the repository from which configuration definitions shall be located. */ public void setRepository(ConfigDefinitionRepository repos) { mDefinitionRepos = repos; } /** * Parses the configuration out of the given XML document. */ public Configuration parse(Document doc) throws ParseException { return parseConfiguration(doc.getRootElement()); } /** * Parses a configuration node in the DOM tree. This will return a * Configuration object. */ public Configuration parseConfiguration(Element root) throws ParseException { // Verify that the root element is a configuration if (!root.getName().equals(CONFIGURATION)) { throw new ParseException("Root element must be: "+CONFIGURATION); } // Verify that this configuration has a name String name = root.getAttributeValue(NAME); if (name == null) { throw new MissingAttributeException(NAME); } // Create the configuration that we will build up Configuration config = new Configuration(name); // Get the included configurations for (Iterator itr = root.getChildren(INCLUDE, CFG_NS).iterator(); itr.hasNext(); ) { // Parse the given include Element elt = (Element)itr.next(); config.addInclude(elt.getText()); } // Get the configuration definition lookup path /* List path = parseDefinitionPath(root.getChild(DEFINITION_PATH, CFG_NS)); for (Iterator itr = path.iterator(); itr.hasNext(); ) { // Add the configuration definition lookup path String dir = (String)itr.next(); config.addDefinitionPath(dir); } */ // Get the list of elements in this configuration List config_elts = parseElements(root.getChild(ELEMENTS, CFG_NS)/*, path*/); for (Iterator itr = config_elts.iterator(); itr.hasNext(); ) { // Add the given configuration element to the configuration ConfigElement config_elt = (ConfigElement)itr.next(); config.addElement(config_elt); } return config; } /** * Parses a definition path node in the DOM tree. */ private List parseDefinitionPath(Element root) throws ParseException { // Verify that the root element is a definition path node if (!root.getName().equals(DEFINITION_PATH)) { throw new ParseException("Root element must be: "+DEFINITION_PATH); } // Parse each dir child of the definition path node List path = new ArrayList(); for (Iterator itr = root.getChildren(DIR, CFG_NS).iterator(); itr.hasNext(); ) { // Parse the directory addition to the path Element elt = (Element)itr.next(); String dir = elt.getTextTrim(); path.add(dir); } return path; } /** * Parses an elements node in the DOM tree. The result is a list of the * configuration elements that are children of the node. */ private List parseElements(Element root/*, List searchPath*/) throws ParseException { // Verify that the root element is an elements node if (!root.getName().equals(ELEMENTS)) { throw new ParseException("Root element must be: "+ELEMENTS); } ConfigElementParser parser = new ConfigElementParser(mDefinitionRepos/*, searchPath*/); List config_elts = new ArrayList(); // Parse each child of the elements node for a configuration element for (Iterator itr = root.getChildren().iterator(); itr.hasNext(); ) { Element elt = (Element)itr.next(); config_elts.add(parser.parse(elt)); } return config_elts; } private static final String CONFIGURATION = "configuration"; private static final String DEFINITION_PATH = "definition_path"; private static final String DIR = "dir"; private static final String ELEMENTS = "elements"; private static final String FILE = "file"; private static final String INCLUDE = "include"; private static final String NAME = "name"; /** The repository from which configuration definitions are retrieved. */ private ConfigDefinitionRepository mDefinitionRepos; }
modules/jackal/config/org/vrjuggler/jccl/config/io/ConfigurationParser.java
/*************** <auto-copyright.pl BEGIN do not edit this line> ************** * * VR Juggler is (C) Copyright 1998-2003 by Iowa State University * * Original Authors: * Allen Bierbaum, Christopher Just, * Patrick Hartling, Kevin Meinert, * Carolina Cruz-Neira, Albert Baker * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public * License along with this library; if not, write to the * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. * * ----------------------------------------------------------------- * File: $RCSfile$ * Date modified: $Date$ * Version: $Revision$ * ----------------------------------------------------------------- * *************** <auto-copyright.pl END do not edit this line> ***************/ package org.vrjuggler.jccl.config.io; import java.io.*; import java.util.*; import org.jdom.*; import org.vrjuggler.jccl.config.*; /** * This object knows how to create a configuration from an XML document and a * configuration definition repository. */ public class ConfigurationParser implements ConfigIOConstants { public ConfigurationParser() { mDefinitionRepos = null; } /** * Sets the repository from which configuration definitions shall be located. */ public void setRepository(ConfigDefinitionRepository repos) { mDefinitionRepos = repos; } /** * Parses the configuration out of the given XML document. */ public Configuration parse(Document doc) throws ParseException { return parseConfiguration(doc.getRootElement()); } /** * Parses a configuration node in the DOM tree. This will return a * Configuration object. */ public Configuration parseConfiguration(Element root) throws ParseException { // Verify that the root element is a configuration if (!root.getName().equals(CONFIGURATION)) { throw new ParseException("Root element must be: "+CONFIGURATION); } // Verify that this configuration has a name String name = root.getAttributeValue(NAME); if (name == null) { throw new MissingAttributeException(NAME); } // Create the configuration that we will build up Configuration config = new Configuration(name); // Get the included configurations for (Iterator itr = root.getChildren(INCLUDE, CFG_NS).iterator(); itr.hasNext(); ) { // Parse the given include Element elt = (Element)itr.next(); config.addInclude(elt.getText()); } // Get the configuration definition lookup path List path = parseDefinitionPath(root.getChild(DEFINITION_PATH, CFG_NS)); for (Iterator itr = path.iterator(); itr.hasNext(); ) { // Add the configuration definition lookup path String dir = (String)itr.next(); config.addDefinitionPath(dir); } // Get the list of elements in this configuration List config_elts = parseElements(root.getChild(ELEMENTS, CFG_NS)/*, path*/); for (Iterator itr = config_elts.iterator(); itr.hasNext(); ) { // Add the given configuration element to the configuration ConfigElement config_elt = (ConfigElement)itr.next(); config.addElement(config_elt); } return config; } /** * Parses a definition path node in the DOM tree. */ private List parseDefinitionPath(Element root) throws ParseException { // Verify that the root element is a definition path node if (!root.getName().equals(DEFINITION_PATH)) { throw new ParseException("Root element must be: "+DEFINITION_PATH); } // Parse each dir child of the definition path node List path = new ArrayList(); for (Iterator itr = root.getChildren(DIR, CFG_NS).iterator(); itr.hasNext(); ) { // Parse the directory addition to the path Element elt = (Element)itr.next(); String dir = elt.getTextTrim(); path.add(dir); } return path; } /** * Parses an elements node in the DOM tree. The result is a list of the * configuration elements that are children of the node. */ private List parseElements(Element root/*, List searchPath*/) throws ParseException { // Verify that the root element is an elements node if (!root.getName().equals(ELEMENTS)) { throw new ParseException("Root element must be: "+ELEMENTS); } ConfigElementParser parser = new ConfigElementParser(mDefinitionRepos/*, searchPath*/); List config_elts = new ArrayList(); // Parse each child of the elements node for a configuration element for (Iterator itr = root.getChildren().iterator(); itr.hasNext(); ) { Element elt = (Element)itr.next(); config_elts.add(parser.parse(elt)); } return config_elts; } private static final String CONFIGURATION = "configuration"; private static final String DEFINITION_PATH = "definition_path"; private static final String DIR = "dir"; private static final String ELEMENTS = "elements"; private static final String FILE = "file"; private static final String INCLUDE = "include"; private static final String NAME = "name"; /** The repository from which configuration definitions are retrieved. */ private ConfigDefinitionRepository mDefinitionRepos; }
Don't try to read the definition path stuff from the configuration file. That is supposed to come from an environment variable, I believe. git-svn-id: 769d22dfa2d22aad706b9a451492fb87c0735f19@13243 08b38cba-cd3b-11de-854e-f91c5b6e4272
modules/jackal/config/org/vrjuggler/jccl/config/io/ConfigurationParser.java
Don't try to read the definition path stuff from the configuration file. That is supposed to come from an environment variable, I believe.
<ide><path>odules/jackal/config/org/vrjuggler/jccl/config/io/ConfigurationParser.java <ide> } <ide> <ide> // Get the configuration definition lookup path <add>/* <ide> List path = parseDefinitionPath(root.getChild(DEFINITION_PATH, CFG_NS)); <ide> for (Iterator itr = path.iterator(); itr.hasNext(); ) <ide> { <ide> String dir = (String)itr.next(); <ide> config.addDefinitionPath(dir); <ide> } <add>*/ <ide> <ide> // Get the list of elements in this configuration <ide> List config_elts = parseElements(root.getChild(ELEMENTS, CFG_NS)/*, path*/);
Java
apache-2.0
08e1c87d4f89a1509511b51a0f8ee2443f04833a
0
svenkubiak/mangooio,svenkubiak/mangooio,svenkubiak/mangooio
package io.mangoo.models; import java.security.Principal; import java.util.Collections; import java.util.Objects; import java.util.Set; import io.mangoo.enums.Required; import io.undertow.security.idm.Account; import io.undertow.security.idm.Credential; import io.undertow.security.idm.IdentityManager; import io.undertow.security.idm.PasswordCredential; /** * * A simple IdentityManager implementation * * @author svenkubiak * */ public class Identity implements IdentityManager { private String username; private String password; public Identity(String username, String password) { this.username = Objects.requireNonNull(username, Required.USERNAME.toString()); this.password = Objects.requireNonNull(password, Required.PASSWORD.toString()); } @Override public Account verify(Account account) { return null; } @Override public Account verify(Credential credential) { return null; } @Override public Account verify(String username, Credential credential) { Account account = null; if (this.username.equals(username) && verifyCredential(credential)) { account = getAccount(username); } return account; } private static Account getAccount(String username) { return new Account() { private static final long serialVersionUID = 5311970975103831035L; private transient Principal principal = () -> username; @Override public Principal getPrincipal() { return principal; } @Override public Set<String> getRoles() { return Collections.emptySet(); } }; } private boolean verifyCredential(Credential credential) { if (credential instanceof PasswordCredential) { return ((PasswordCredential) credential).getPassword().equals(this.password.toCharArray()); } return false; } }
mangooio-core/src/main/java/io/mangoo/models/Identity.java
package io.mangoo.models; import java.security.Principal; import java.util.Collections; import java.util.Objects; import java.util.Set; import io.mangoo.enums.Required; import io.undertow.security.idm.Account; import io.undertow.security.idm.Credential; import io.undertow.security.idm.IdentityManager; import io.undertow.security.idm.PasswordCredential; /** * * A simple IdentityManager implementation * * @author svenkubiak * */ public class Identity implements IdentityManager { private String username; private String password; public Identity(String username, String password) { this.username = Objects.requireNonNull(username, Required.USERNAME.toString()); this.password = Objects.requireNonNull(password, Required.PASSWORD.toString()); } @Override public Account verify(Account account) { return null; } @Override public Account verify(Credential credential) { return null; } @Override public Account verify(String username, Credential credential) { Account account = null; if (this.username.equals(username) && verifyCredential(credential)) { account = getAccount(username); } return account; } private static Account getAccount(String username) { return new Account() { private static final long serialVersionUID = 5311970975103831035L; private transient Principal principal = () -> username; @Override public Principal getPrincipal() { return principal; } @Override public Set<String> getRoles() { return Collections.emptySet(); } }; } private boolean verifyCredential(Credential credential) { if (credential instanceof PasswordCredential) { return new String (((PasswordCredential) credential).getPassword()).equals(this.password); } return false; } }
Sonar refactorings
mangooio-core/src/main/java/io/mangoo/models/Identity.java
Sonar refactorings
<ide><path>angooio-core/src/main/java/io/mangoo/models/Identity.java <ide> <ide> private boolean verifyCredential(Credential credential) { <ide> if (credential instanceof PasswordCredential) { <del> return new String (((PasswordCredential) credential).getPassword()).equals(this.password); <add> return ((PasswordCredential) credential).getPassword().equals(this.password.toCharArray()); <ide> } <ide> <ide> return false;
Java
apache-2.0
2cd4c7cc5b4404ea4da564223ab5576e32462a21
0
garpinc/pac4j,jkacer/pac4j,jkacer/pac4j,mmoayyed/pac4j,garpinc/pac4j,mmoayyed/pac4j,zawn/pac4j,topicusonderwijs/pac4j,zawn/pac4j,topicusonderwijs/pac4j
package org.pac4j.core.profile.service; import org.pac4j.core.context.WebContext; import org.pac4j.core.credentials.UsernamePasswordCredentials; import org.pac4j.core.credentials.authenticator.Authenticator; import org.pac4j.core.credentials.password.PasswordEncoder; import org.pac4j.core.exception.*; import org.pac4j.core.profile.CommonProfile; import org.pac4j.core.profile.definition.ProfileDefinitionAware; import org.pac4j.core.util.JavaSerializationHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import static org.pac4j.core.context.Pac4jConstants.*; import static org.pac4j.core.util.CommonHelper.*; /** * Abstract implementation of the {@link ProfileService} for the storage: LDAP, SQL and MongoDB. * * @author Jerome Leleu * @since 2.0.0 */ public abstract class AbstractProfileService<U extends CommonProfile> extends ProfileDefinitionAware<U> implements ProfileService<U>, Authenticator<UsernamePasswordCredentials> { public static final String ID = "id"; public static final String LINKEDID = "linkedid"; public static final String SERIALIZED_PROFILE = "serializedprofile"; protected final Logger logger = LoggerFactory.getLogger(getClass()); private String usernameAttribute = USERNAME; private String passwordAttribute = PASSWORD; private String idAttribute = ID; private PasswordEncoder passwordEncoder; private JavaSerializationHelper javaSerializationHelper = new JavaSerializationHelper(); private String attributes; protected String[] attributeNames; @Override protected void internalInit(final WebContext context) { assertNotNull("profileDefinition", getProfileDefinition()); assertNotBlank("usernameAttribute", this.usernameAttribute); assertNotBlank("passwordAttribute", this.passwordAttribute); assertNotBlank("idAttribute", this.idAttribute); if (isNotBlank(attributes)) { attributeNames = attributes.split(","); for (final String attributeName : attributeNames) { if (getIdAttribute().equalsIgnoreCase(attributeName) || LINKEDID.equalsIgnoreCase(attributeName) || getUsernameAttribute().equalsIgnoreCase(attributeName) || getPasswordAttribute().equalsIgnoreCase(attributeName) || SERIALIZED_PROFILE.equalsIgnoreCase(attributeName)) { throw new TechnicalException("The 'getIdAttribute()', linkedid, 'getUsernameAttribute()', 'getPasswordAttribute()' and serializedprofile attributes are not allowed"); } } } else { attributeNames = new String[0]; } } @Override public void create(final U profile, final String password) { init(null); assertNotNull("profile", profile); assertNotBlank(PASSWORD, password); assertNotBlank(ID, profile.getId()); assertNotBlank(USERNAME, profile.getUsername()); final Map<String, Object> attributes = convertProfileAndPasswordToAttributes(profile, password); insert(attributes); } @Override public void update(final U profile, final String password) { init(null); assertNotNull("profile", profile); assertNotBlank(ID, profile.getId()); assertNotBlank(USERNAME, profile.getUsername()); final Map<String, Object> attributes = convertProfileAndPasswordToAttributes(profile, password); update(attributes); } @Override public void remove(final U profile) { init(null); assertNotNull("profile", profile); removeById(profile.getId()); } @Override public void removeById(final String id) { init(null); assertNotBlank(ID, id); deleteById(id); } /** * Convert a profile and a password into a map of attributes for the storage. * * @param profile the profile * @param password the password * @return the attributes */ protected Map<String, Object> convertProfileAndPasswordToAttributes(final U profile, final String password) { final Map<String, Object> storageAttributes = new HashMap<>(); storageAttributes.put(getIdAttribute(), profile.getId()); storageAttributes.put(LINKEDID, profile.getLinkedId()); storageAttributes.put(getUsernameAttribute(), profile.getUsername()); // if a password has been provided, encode it if (isNotBlank(password)) { final String encodedPassword; // encode password if we have a passwordEncoder (MongoDB, SQL but not for LDAP) if (passwordEncoder != null) { encodedPassword = passwordEncoder.encode(password); } else { encodedPassword = password; } storageAttributes.put(getPasswordAttribute(), encodedPassword); } // legacy mode: save the defined attributes if (isLegacyMode()) { for (final String attributeName : attributeNames) { storageAttributes.put(attributeName, profile.getAttribute(attributeName)); } } else { // new behaviour (>= v2.0): save the serialized profile storageAttributes.put(SERIALIZED_PROFILE, javaSerializationHelper.serializeToBase64(profile)); } return storageAttributes; } /** * Insert the attributes in the storage. * * @param attributes the attributes */ protected abstract void insert(final Map<String, Object> attributes); /** * Update the attributes in the storage. * * @param attributes the attributes */ protected abstract void update(final Map<String, Object> attributes); /** * Delete a profile by its identifier in the storage. * * @param id the identifier */ protected abstract void deleteById(final String id); @Override public U findById(final String id) { init(null); assertNotBlank(getIdAttribute(), id); final List<Map<String, Object>> listAttributes = read(defineAttributesToRead(), getIdAttribute(), id); return convertAttributesToProfile(listAttributes); } @Override public U findByLinkedId(final String linkedId) { init(null); assertNotBlank(LINKEDID, linkedId); final List<Map<String, Object>> listAttributes = read(defineAttributesToRead(), LINKEDID, linkedId); return convertAttributesToProfile(listAttributes); } /** * Define the attributes to read in the storage. * * @return the attributes */ protected List<String> defineAttributesToRead() { final List<String> names = new ArrayList<>(); names.add(getIdAttribute()); names.add(LINKEDID); // legacy mode: 'getIdAttribute()' + linkedid + username + attributes if (isLegacyMode()) { names.add(getUsernameAttribute()); names.addAll(Arrays.asList(attributeNames)); } else { // new beahviour (>= v2.0): 'getIdAttribute()' + linkedid + serializedprofile names.add(SERIALIZED_PROFILE); } return names; } /** * Convert the list of map of attributes from the storage into a profile. * * @param listStorageAttributes the list of map of attributes * @return the profile */ protected U convertAttributesToProfile(final List<Map<String, Object>> listStorageAttributes) { if (listStorageAttributes == null || listStorageAttributes.size() == 0) { return null; } final Map<String, Object> storageAttributes = listStorageAttributes.get(0); logger.debug("Attributes retrieved from the store: {}", storageAttributes); final String linkedId = (String) storageAttributes.get(LINKEDID); // legacy mode: only read the defined attributes if (isLegacyMode()) { final U profile = getProfileDefinition().newProfile(); for (final String attributeName : attributeNames) { getProfileDefinition().convertAndAdd(profile, attributeName, storageAttributes.get(attributeName)); } profile.setId(storageAttributes.get(getUsernameAttribute())); if (isNotBlank(linkedId)) { profile.setLinkedId(linkedId); } return profile; } else { // new behaviour (>= v2.0): read the serialized profile final U profile = (U) javaSerializationHelper.unserializeFromBase64((String) storageAttributes.get(SERIALIZED_PROFILE)); final Object id = storageAttributes.get(getIdAttribute()); if (isBlank(profile.getId()) && id != null) { profile.setId(id); } if (isBlank(profile.getLinkedId()) && isNotBlank(linkedId)) { profile.setLinkedId(linkedId); } return profile; } } /** * Read the list of defined attributes in the storage for key=value query. * * @param names the attribute names to read * @param key the key for the query * @param value the value for the query * @return the list of map of attributes */ protected abstract List<Map<String, Object>> read(final List<String> names, final String key, final String value); @Override public void validate(final UsernamePasswordCredentials credentials, final WebContext context) throws HttpAction, CredentialsException { init(context); assertNotNull("credentials", credentials); final String username = credentials.getUsername(); final String password = credentials.getPassword(); assertNotBlank(USERNAME, username); assertNotBlank(PASSWORD, password); final List<String> attributesToRead = defineAttributesToRead(); // + password to check attributesToRead.add(PASSWORD); try { final List<Map<String, Object>> listAttributes = read(attributesToRead, getUsernameAttribute(), username); if (listAttributes == null || listAttributes.isEmpty()) { throw new AccountNotFoundException("No account found for: " + username); } else if (listAttributes.size() > 1) { throw new MultipleAccountsFoundException("Too many accounts found for: " + username); } else { final String retrievedPassword = (String) listAttributes.get(0).get(getPasswordAttribute()); // check password if (!passwordEncoder.matches(password, retrievedPassword)) { throw new BadCredentialsException("Bad credentials for: " + username); } else { final U profile = convertAttributesToProfile(listAttributes); credentials.setUserProfile(profile); } } } catch (final TechnicalException e) { logger.debug("Authentication error", e); throw e; } } protected boolean isLegacyMode() { return attributes != null; } public PasswordEncoder getPasswordEncoder() { return passwordEncoder; } public void setPasswordEncoder(final PasswordEncoder passwordEncoder) { this.passwordEncoder = passwordEncoder; } public String getAttributes() { return attributes; } /** * <p>Since version 2.0 of pac4j, the profile can be saved, updated and deleted in the storage * by serializing the profile (in the <code>serializedprofile</code> attribute).</p> * <p>In addition to what existed in previous versions, the profile was built from existing attributes. * Setting this attribute with a list of attributes separated by commas (no aliasing) allows you * to use different attributes of the storage instead of the <code>serializedprofile</code> attribute.</p> * * @param attributes the attributes */ public void setAttributes(final String attributes) { this.attributes = attributes; } public JavaSerializationHelper getJavaSerializationHelper() { return javaSerializationHelper; } public void setJavaSerializationHelper(final JavaSerializationHelper javaSerializationHelper) { this.javaSerializationHelper = javaSerializationHelper; } public String getUsernameAttribute() { return usernameAttribute; } public void setUsernameAttribute(final String usernameAttribute) { this.usernameAttribute = usernameAttribute; } public String getPasswordAttribute() { return passwordAttribute; } public void setPasswordAttribute(final String passwordAttribute) { this.passwordAttribute = passwordAttribute; } public String getIdAttribute() { return idAttribute; } public void setIdAttribute(final String idAttribute) { this.idAttribute = idAttribute; } }
pac4j-core/src/main/java/org/pac4j/core/profile/service/AbstractProfileService.java
package org.pac4j.core.profile.service; import org.pac4j.core.context.WebContext; import org.pac4j.core.credentials.UsernamePasswordCredentials; import org.pac4j.core.credentials.authenticator.Authenticator; import org.pac4j.core.credentials.password.PasswordEncoder; import org.pac4j.core.exception.*; import org.pac4j.core.profile.CommonProfile; import org.pac4j.core.profile.definition.ProfileDefinitionAware; import org.pac4j.core.util.JavaSerializationHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import static org.pac4j.core.context.Pac4jConstants.*; import static org.pac4j.core.util.CommonHelper.*; /** * Abstract implementation of the {@link ProfileService} for the storage: LDAP, SQL and MongoDB. * * @author Jerome Leleu * @since 2.0.0 */ public abstract class AbstractProfileService<U extends CommonProfile> extends ProfileDefinitionAware<U> implements ProfileService<U>, Authenticator<UsernamePasswordCredentials> { public static final String ID = "id"; public static final String LINKEDID = "linkedid"; public static final String SERIALIZED_PROFILE = "serializedprofile"; protected final Logger logger = LoggerFactory.getLogger(getClass()); private String usernameAttribute = USERNAME; private String passwordAttribute = PASSWORD; private String idAttribute = ID; private PasswordEncoder passwordEncoder; private JavaSerializationHelper javaSerializationHelper = new JavaSerializationHelper(); private String attributes; protected String[] attributeNames; @Override protected void internalInit(final WebContext context) { assertNotNull("profileDefinition", getProfileDefinition()); assertNotBlank("usernameAttribute", this.usernameAttribute); assertNotBlank("passwordAttribute", this.passwordAttribute); assertNotBlank("idAttribute", this.idAttribute); if (isNotBlank(attributes)) { attributeNames = attributes.split(","); for (final String attributeName : attributeNames) { if (getIdAttribute().equalsIgnoreCase(attributeName) || LINKEDID.equalsIgnoreCase(attributeName) || getUsernameAttribute().equalsIgnoreCase(attributeName) || getPasswordAttribute().equalsIgnoreCase(attributeName) || SERIALIZED_PROFILE.equalsIgnoreCase(attributeName)) { throw new TechnicalException("The 'getIdAttribute()', linkedid, 'getUsernameAttribute()', 'getPasswordAttribute()' and serializedprofile attributes are not allowed"); } } } else { attributeNames = new String[0]; } } @Override public void create(final U profile, final String password) { init(null); assertNotNull("profile", profile); assertNotBlank(PASSWORD, password); assertNotBlank(ID, profile.getId()); assertNotBlank(USERNAME, profile.getUsername()); final Map<String, Object> attributes = convertProfileAndPasswordToAttributes(profile, password); insert(attributes); } @Override public void update(final U profile, final String password) { init(null); assertNotNull("profile", profile); assertNotBlank(ID, profile.getId()); assertNotBlank(USERNAME, profile.getUsername()); final Map<String, Object> attributes = convertProfileAndPasswordToAttributes(profile, password); update(attributes); } @Override public void remove(final U profile) { init(null); assertNotNull("profile", profile); removeById(profile.getId()); } @Override public void removeById(final String id) { init(null); assertNotBlank(ID, id); deleteById(id); } /** * Convert a profile and a password into a map of attributes for the storage. * * @param profile the profile * @param password the password * @return the attributes */ protected Map<String, Object> convertProfileAndPasswordToAttributes(final U profile, final String password) { final Map<String, Object> storageAttributes = new HashMap<>(); storageAttributes.put(getIdAttribute(), profile.getId()); storageAttributes.put(LINKEDID, profile.getLinkedId()); storageAttributes.put(getUsernameAttribute(), profile.getUsername()); // if a password has been provided, encode it if (isNotBlank(password)) { final String encodedPassword; // encode password if we have a passwordEncoder (MongoDB, SQL but not for LDAP) if (passwordEncoder != null) { encodedPassword = passwordEncoder.encode(password); } else { encodedPassword = password; } storageAttributes.put(getPasswordAttribute(), encodedPassword); } // legacy mode: save the defined attributes if (isLegacyMode()) { for (final String attributeName : attributeNames) { storageAttributes.put(attributeName, profile.getAttribute(attributeName)); } } else { // new behaviour (>= v2.0): save the serialized profile storageAttributes.put(SERIALIZED_PROFILE, javaSerializationHelper.serializeToBase64(profile)); } return storageAttributes; } /** * Insert the attributes in the storage. * * @param attributes the attributes */ protected abstract void insert(final Map<String, Object> attributes); /** * Update the attributes in the storage. * * @param attributes the attributes */ protected abstract void update(final Map<String, Object> attributes); /** * Delete a profile by its identifier in the storage. * * @param id the identifier */ protected abstract void deleteById(final String id); @Override public U findById(final String id) { init(null); assertNotBlank(getIdAttribute(), id); final List<Map<String, Object>> listAttributes = read(defineAttributesToRead(), getIdAttribute(), id); return convertAttributesToProfile(listAttributes); } @Override public U findByLinkedId(final String linkedId) { init(null); assertNotBlank(LINKEDID, linkedId); final List<Map<String, Object>> listAttributes = read(defineAttributesToRead(), LINKEDID, linkedId); return convertAttributesToProfile(listAttributes); } /** * Define the attributes to read in the storage. * * @return the attributes */ protected List<String> defineAttributesToRead() { final List<String> names = new ArrayList<>(); names.add(getIdAttribute()); names.add(LINKEDID); // legacy mode: 'getIdAttribute()' + linkedid + username + attributes if (isLegacyMode()) { names.add(getUsernameAttribute()); names.addAll(Arrays.asList(attributeNames)); } else { // new beahviour (>= v2.0): 'getIdAttribute()' + linkedid + serializedprofile names.add(SERIALIZED_PROFILE); } return names; } /** * Convert the list of map of attributes from the storage into a profile. * * @param listStorageAttributes the list of map of attributes * @return the profile */ protected U convertAttributesToProfile(final List<Map<String, Object>> listStorageAttributes) { if (listStorageAttributes == null || listStorageAttributes.size() == 0) { return null; } final Map<String, Object> storageAttributes = listStorageAttributes.get(0); final String linkedId = (String) storageAttributes.get(LINKEDID); // legacy mode: only read the defined attributes if (isLegacyMode()) { final U profile = getProfileDefinition().newProfile(); for (final String attributeName : attributeNames) { getProfileDefinition().convertAndAdd(profile, attributeName, storageAttributes.get(attributeName)); } profile.setId(storageAttributes.get(getUsernameAttribute())); if (isNotBlank(linkedId)) { profile.setLinkedId(linkedId); } return profile; } else { // new behaviour (>= v2.0): read the serialized profile final U profile = (U) javaSerializationHelper.unserializeFromBase64((String) storageAttributes.get(SERIALIZED_PROFILE)); final Object id = storageAttributes.get(getIdAttribute()); if (isBlank(profile.getId()) && id != null) { profile.setId(id); } if (isBlank(profile.getLinkedId()) && isNotBlank(linkedId)) { profile.setLinkedId(linkedId); } return profile; } } /** * Read the list of defined attributes in the storage for key=value query. * * @param names the attribute names to read * @param key the key for the query * @param value the value for the query * @return the list of map of attributes */ protected abstract List<Map<String, Object>> read(final List<String> names, final String key, final String value); @Override public void validate(final UsernamePasswordCredentials credentials, final WebContext context) throws HttpAction, CredentialsException { init(context); assertNotNull("credentials", credentials); final String username = credentials.getUsername(); final String password = credentials.getPassword(); assertNotBlank(USERNAME, username); assertNotBlank(PASSWORD, password); final List<String> attributesToRead = defineAttributesToRead(); // + password to check attributesToRead.add(PASSWORD); try { final List<Map<String, Object>> listAttributes = read(attributesToRead, getUsernameAttribute(), username); if (listAttributes == null || listAttributes.isEmpty()) { throw new AccountNotFoundException("No account found for: " + username); } else if (listAttributes.size() > 1) { throw new MultipleAccountsFoundException("Too many accounts found for: " + username); } else { final String retrievedPassword = (String) listAttributes.get(0).get(getPasswordAttribute()); // check password if (!passwordEncoder.matches(password, retrievedPassword)) { throw new BadCredentialsException("Bad credentials for: " + username); } else { final U profile = convertAttributesToProfile(listAttributes); credentials.setUserProfile(profile); } } } catch (final TechnicalException e) { logger.debug("Authentication error", e); throw e; } } protected boolean isLegacyMode() { return attributes != null; } public PasswordEncoder getPasswordEncoder() { return passwordEncoder; } public void setPasswordEncoder(final PasswordEncoder passwordEncoder) { this.passwordEncoder = passwordEncoder; } public String getAttributes() { return attributes; } /** * <p>Since version 2.0 of pac4j, the profile can be saved, updated and deleted in the storage * by serializing the profile (in the <code>serializedprofile</code> attribute).</p> * <p>In addition to what existed in previous versions, the profile was built from existing attributes. * Setting this attribute with a list of attributes separated by commas (no aliasing) allows you * to use different attributes of the storage instead of the <code>serializedprofile</code> attribute.</p> * * @param attributes the attributes */ public void setAttributes(final String attributes) { this.attributes = attributes; } public JavaSerializationHelper getJavaSerializationHelper() { return javaSerializationHelper; } public void setJavaSerializationHelper(final JavaSerializationHelper javaSerializationHelper) { this.javaSerializationHelper = javaSerializationHelper; } public String getUsernameAttribute() { return usernameAttribute; } public void setUsernameAttribute(final String usernameAttribute) { this.usernameAttribute = usernameAttribute; } public String getPasswordAttribute() { return passwordAttribute; } public void setPasswordAttribute(final String passwordAttribute) { this.passwordAttribute = passwordAttribute; } public String getIdAttribute() { return idAttribute; } public void setIdAttribute(final String idAttribute) { this.idAttribute = idAttribute; } }
add debug logs
pac4j-core/src/main/java/org/pac4j/core/profile/service/AbstractProfileService.java
add debug logs
<ide><path>ac4j-core/src/main/java/org/pac4j/core/profile/service/AbstractProfileService.java <ide> } <ide> final Map<String, Object> storageAttributes = listStorageAttributes.get(0); <ide> <add> logger.debug("Attributes retrieved from the store: {}", storageAttributes); <ide> final String linkedId = (String) storageAttributes.get(LINKEDID); <ide> // legacy mode: only read the defined attributes <ide> if (isLegacyMode()) {
Java
mit
a1b1f048e63daaa5c581fa7c357b6bd99111fdf5
0
shrayasr/PCache
package com.pcache.exceptions; public class PCacheException extends Exception { public PCacheException(String message) { super(message); } public PCacheException(String message, Exception ex) { super(message, ex); } }
src/com/pcache/exceptions/PCacheException.java
package com.pcache.exceptions; public class PCacheException extends Exception { public PCacheException(String message) { super(message); } }
PCacheException can now pass along the exception Adding new constructor that takes the exception that was raised (if caught) so that it can be bubbled up along with the custom message
src/com/pcache/exceptions/PCacheException.java
PCacheException can now pass along the exception
<ide><path>rc/com/pcache/exceptions/PCacheException.java <ide> super(message); <ide> } <ide> <add> public PCacheException(String message, Exception ex) { <add> super(message, ex); <add> } <add> <ide> }
Java
apache-2.0
5ab6f9a8bc5d1166368ede7581479453bc9d7839
0
killbill/killbill-stripe-plugin,killbill/killbill-stripe-plugin
/* * Copyright 2020-2020 Equinix, Inc * Copyright 2014-2020 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.plugin.stripe; import java.math.BigDecimal; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.StreamSupport; import javax.annotation.Nullable; import org.joda.time.DateTime; import org.killbill.billing.ObjectType; import org.killbill.billing.account.api.Account; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.osgi.libs.killbill.OSGIConfigPropertiesService; import org.killbill.billing.osgi.libs.killbill.OSGIKillbillAPI; import org.killbill.billing.payment.api.PaymentApiException; import org.killbill.billing.payment.api.PaymentMethodPlugin; import org.killbill.billing.payment.api.PluginProperty; import org.killbill.billing.payment.api.TransactionType; import org.killbill.billing.payment.plugin.api.GatewayNotification; import org.killbill.billing.payment.plugin.api.HostedPaymentPageFormDescriptor; import org.killbill.billing.payment.plugin.api.PaymentMethodInfoPlugin; import org.killbill.billing.payment.plugin.api.PaymentPluginApiException; import org.killbill.billing.payment.plugin.api.PaymentPluginStatus; import org.killbill.billing.payment.plugin.api.PaymentTransactionInfoPlugin; import org.killbill.billing.plugin.api.PluginProperties; import org.killbill.billing.plugin.api.core.PluginCustomField; import org.killbill.billing.plugin.api.payment.PluginHostedPaymentPageFormDescriptor; import org.killbill.billing.plugin.api.payment.PluginPaymentPluginApi; import org.killbill.billing.plugin.stripe.dao.StripeDao; import org.killbill.billing.plugin.stripe.dao.gen.tables.StripePaymentMethods; import org.killbill.billing.plugin.stripe.dao.gen.tables.StripeResponses; import org.killbill.billing.plugin.stripe.dao.gen.tables.records.StripeHppRequestsRecord; import org.killbill.billing.plugin.stripe.dao.gen.tables.records.StripePaymentMethodsRecord; import org.killbill.billing.plugin.stripe.dao.gen.tables.records.StripeResponsesRecord; import org.killbill.billing.plugin.util.KillBillMoney; import org.killbill.billing.util.api.CustomFieldApiException; import org.killbill.billing.util.callcontext.CallContext; import org.killbill.billing.util.callcontext.TenantContext; import org.killbill.billing.util.customfield.CustomField; import org.killbill.clock.Clock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.stripe.exception.CardException; import com.stripe.exception.StripeException; import com.stripe.model.Charge; import com.stripe.model.Customer; import com.stripe.model.HasId; import com.stripe.model.PaymentIntent; import com.stripe.model.PaymentMethod; import com.stripe.model.PaymentSource; import com.stripe.model.PaymentSourceCollection; import com.stripe.model.Refund; import com.stripe.model.SetupIntent; import com.stripe.model.Source; import com.stripe.model.Token; import com.stripe.model.checkout.Session; import com.stripe.net.RequestOptions; import com.stripe.param.PaymentIntentCancelParams; public class StripePaymentPluginApi extends PluginPaymentPluginApi<StripeResponsesRecord, StripeResponses, StripePaymentMethodsRecord, StripePaymentMethods> { private enum CaptureMethod { AUTOMATIC("automatic"), MANUAL("manual"); public final String value; CaptureMethod(String value) { this.value = value; } } private static final Logger logger = LoggerFactory.getLogger(StripePaymentPluginApi.class); public static final String PROPERTY_FROM_HPP = "fromHPP"; public static final String PROPERTY_HPP_COMPLETION = "fromHPPCompletion"; public static final String PROPERTY_OVERRIDDEN_TRANSACTION_STATUS = "overriddenTransactionStatus"; private final StripeConfigPropertiesConfigurationHandler stripeConfigPropertiesConfigurationHandler; private final StripeDao dao; static final List<String> metadataFilter = ImmutableList.of("payment_method_types"); // needed for API calls to expand the response to contain the 'Sources' // https://stripe.com/docs/api/expanding_objects?lang=java private final Map<String, Object> expandSourcesParams; public StripePaymentPluginApi(final StripeConfigPropertiesConfigurationHandler stripeConfigPropertiesConfigurationHandler, final OSGIKillbillAPI killbillAPI, final OSGIConfigPropertiesService configProperties, final Clock clock, final StripeDao dao) { super(killbillAPI, configProperties, clock, dao); this.stripeConfigPropertiesConfigurationHandler = stripeConfigPropertiesConfigurationHandler; this.dao = dao; expandSourcesParams = new HashMap<String, Object>(); expandSourcesParams.put("expand", ImmutableList.of("sources")); } @Override public List<PaymentTransactionInfoPlugin> getPaymentInfo(final UUID kbAccountId, final UUID kbPaymentId, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException { final List<PaymentTransactionInfoPlugin> transactions = super.getPaymentInfo(kbAccountId, kbPaymentId, properties, context); if (transactions.isEmpty()) { // We don't know about this payment (maybe it was aborted in a control plugin) return transactions; } // Check if a HPP payment needs to be canceled final ExpiredPaymentPolicy expiredPaymentPolicy = new ExpiredPaymentPolicy(clock, stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId())); final StripePaymentTransactionInfoPlugin transactionToExpire = expiredPaymentPolicy.isExpired(transactions); if (transactionToExpire != null) { logger.info("Canceling expired Stripe transaction {} (created {})", transactionToExpire.getStripeResponseRecord().getStripeId(), transactionToExpire.getStripeResponseRecord().getCreatedDate()); final Map additionalMetadata = ImmutableMap.builder() .put(PROPERTY_OVERRIDDEN_TRANSACTION_STATUS, PaymentPluginStatus.CANCELED.toString()) .put("message", "Payment Expired - Cancelled by Janitor") .build(); try { dao.updateResponse(transactionToExpire.getStripeResponseRecord(), additionalMetadata); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to update expired payment", e); } // Reload payment return super.getPaymentInfo(kbAccountId, kbPaymentId, properties, context); } // Refresh, if needed boolean wasRefreshed = false; final RequestOptions requestOptions = buildRequestOptions(context); for (final PaymentTransactionInfoPlugin transaction : transactions) { if (transaction.getStatus() == PaymentPluginStatus.PENDING) { final String paymentIntentId = PluginProperties.findPluginPropertyValue("id", transaction.getProperties()); try { PaymentIntent intent = PaymentIntent.retrieve(paymentIntentId, requestOptions); // 3DS validated: must confirm the PaymentIntent if ("requires_confirmation".equals(intent.getStatus())) { logger.info("Confirming Stripe transaction {}", intent.getId()); intent = intent.confirm(requestOptions); } // 3DS authorization failure - Fail payment according to property else if (stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId()).isCancelOn3DSAuthorizationFailure() && "requires_payment_method".equals(intent.getStatus()) && intent.getLastPaymentError() != null && "payment_intent_authentication_failure".equals(intent.getLastPaymentError().getCode())) { logger.info("Cancelling Stripe PaymentIntent after 3DS authorization failure {}", intent.getId()); intent = intent.cancel( PaymentIntentCancelParams.builder() .setCancellationReason(PaymentIntentCancelParams.CancellationReason.ABANDONED) .build(), requestOptions ); } dao.updateResponse(transaction.getKbTransactionPaymentId(), intent, context.getTenantId()); wasRefreshed = true; } catch (final StripeException e) { logger.warn("Unable to fetch latest payment state in Stripe, data might be stale", e); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to refresh payment", e); } } } return wasRefreshed ? super.getPaymentInfo(kbAccountId, kbPaymentId, properties, context) : transactions; } @Override protected PaymentTransactionInfoPlugin buildPaymentTransactionInfoPlugin(final StripeResponsesRecord record) { return StripePaymentTransactionInfoPlugin.build(record); } @Override public PaymentMethodPlugin getPaymentMethodDetail(final UUID kbAccountId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException { final StripePaymentMethodsRecord record; try { record = dao.getPaymentMethod(kbPaymentMethodId, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to retrieve payment method for kbPaymentMethodId " + kbPaymentMethodId, e); } if (record == null) { // Known in KB but deleted in Stripe? return new StripePaymentMethodPlugin(kbPaymentMethodId, null, false, ImmutableList.<PluginProperty>of()); } else { return buildPaymentMethodPlugin(record); } } @Override protected PaymentMethodPlugin buildPaymentMethodPlugin(final StripePaymentMethodsRecord record) { return StripePaymentMethodPlugin.build(record); } @Override protected PaymentMethodInfoPlugin buildPaymentMethodInfoPlugin(final StripePaymentMethodsRecord record) { return StripePaymentMethodInfoPlugin.build(record); } @Override public void addPaymentMethod(final UUID kbAccountId, final UUID kbPaymentMethodId, final PaymentMethodPlugin paymentMethodProps, final boolean setDefault, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final RequestOptions requestOptions = buildRequestOptions(context); // Support both body and query parameters based plugin properties final Iterable<PluginProperty> allProperties = PluginProperties.merge(paymentMethodProps.getProperties(), properties); String paymentMethodIdInStripe = paymentMethodProps.getExternalPaymentMethodId(); String objectType = PluginProperties.getValue("object", "payment_method", allProperties); if (paymentMethodIdInStripe == null) { // Support also a source plugin property as it is a easier to pass it in cURLs and recommended by Stripe paymentMethodIdInStripe = PluginProperties.findPluginPropertyValue("source", allProperties); if (paymentMethodIdInStripe != null) { objectType = "source"; } else { // Support also a token plugin property as it is a bit easier to pass it in cURLs (also sent by kbcmd in the body) paymentMethodIdInStripe = PluginProperties.findPluginPropertyValue("token", allProperties); if (paymentMethodIdInStripe != null) { objectType = "token"; } } // Otherwise, defaults to payment_method (session flow) } final String sessionId = PluginProperties.findPluginPropertyValue("sessionId", allProperties); if (sessionId != null) { // Checkout flow try { final StripeHppRequestsRecord hppRecord = dao.getHppRequest(sessionId, context.getTenantId().toString()); if (hppRecord == null) { throw new PaymentPluginApiException("INTERNAL", "Unable to add payment method: missing StripeHppRequestsRecord for sessionId " + sessionId); } final String setupIntentId = (String) StripeDao.fromAdditionalData(hppRecord.getAdditionalData()).get("setup_intent_id"); final SetupIntent setupIntent = SetupIntent.retrieve(setupIntentId, requestOptions); if ("succeeded".equals(setupIntent.getStatus())) { final String existingCustomerId = getCustomerIdNoException(kbAccountId, context); if (existingCustomerId == null) { // Add magic custom field logger.info("Mapping kbAccountId {} to Stripe customer {}", kbAccountId, setupIntent.getCustomer()); killbillAPI.getCustomFieldUserApi().addCustomFields(ImmutableList.of(new PluginCustomField(kbAccountId, ObjectType.ACCOUNT, "STRIPE_CUSTOMER_ID", setupIntent.getCustomer(), clock.getUTCNow())), context); } else if (!existingCustomerId.equals(setupIntent.getCustomer())) { throw new PaymentPluginApiException("USER", "Unable to add payment method : setupIntent customerId is " + setupIntent.getCustomer() + " but account already mapped to " + existingCustomerId); } // Used below to create the row in the plugin // TODO This implicitly assumes the payment method type if "payment_method", is this always true? paymentMethodIdInStripe = setupIntent.getPaymentMethod(); } else { throw new PaymentPluginApiException("EXTERNAL", "Unable to add payment method: setupIntent status is: " + setupIntent.getStatus()); } } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to add payment method", e); } catch (final CustomFieldApiException e) { throw new PaymentPluginApiException("Unable to add custom field", e); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } final Map<String, Object> additionalDataMap; final String stripeId; if (paymentMethodIdInStripe != null) { if ("payment_method".equals(objectType)) { try { final PaymentMethod stripePaymentMethod = PaymentMethod.retrieve(paymentMethodIdInStripe, requestOptions); additionalDataMap = StripePluginProperties.toAdditionalDataMap(stripePaymentMethod); ImmutableMap<String, Object> defaultPaymentMethod = ImmutableMap.of("default_payment_method", paymentMethodIdInStripe); ImmutableMap<String, Object> params = ImmutableMap.of("payment_method", paymentMethodIdInStripe, "invoice_settings", setDefault ? defaultPaymentMethod : ImmutableMap.of()); stripeId = createStripeCustomer(kbAccountId, paymentMethodIdInStripe, stripePaymentMethod.getId(), params, requestOptions, allProperties, context); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } else if ("token".equals(objectType)) { try { final Token stripeToken = Token.retrieve(paymentMethodIdInStripe, requestOptions); additionalDataMap = StripePluginProperties.toAdditionalDataMap(stripeToken); stripeId = createStripeCustomer(kbAccountId, paymentMethodIdInStripe, stripeToken.getId(), ImmutableMap.of("source", paymentMethodIdInStripe), requestOptions, allProperties, context); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } else if ("source".equals(objectType)) { try { // The Stripe sourceId must be passed as the PaymentMethodPlugin#getExternalPaymentMethodId final Source stripeSource = Source.retrieve(paymentMethodIdInStripe, requestOptions); additionalDataMap = StripePluginProperties.toAdditionalDataMap(stripeSource); stripeId = createStripeCustomer(kbAccountId, paymentMethodIdInStripe, stripeSource.getId(), ImmutableMap.of("source", paymentMethodIdInStripe), requestOptions, allProperties, context); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } else if ("bank_account".equals(objectType)) { try { // The Stripe bankAccountId must be passed as the PaymentMethodPlugin#getExternalPaymentMethodId final String existingCustomerId = getCustomerId(kbAccountId, context); final PaymentSource paymentSource = Customer.retrieve(existingCustomerId, expandSourcesParams, requestOptions) .getSources() .retrieve(paymentMethodIdInStripe, requestOptions); additionalDataMap = StripePluginProperties.toAdditionalDataMap(paymentSource); stripeId = paymentSource.getId(); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } else { throw new UnsupportedOperationException("Payment Method type not yet supported: " + objectType); } } else { throw new PaymentPluginApiException("USER", "PaymentMethodPlugin#getExternalPaymentMethodId or sessionId plugin property must be passed"); } final DateTime utcNow = clock.getUTCNow(); try { dao.addPaymentMethod(kbAccountId, kbPaymentMethodId, additionalDataMap, stripeId, utcNow, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to add payment method", e); } } private String createStripeCustomer(final UUID kbAccountId, final String paymentMethodIdInStripe, final String defaultStripeId, final ImmutableMap<String, Object> customerParams, final RequestOptions requestOptions, final Iterable<PluginProperty> allProperties, final CallContext context) throws StripeException, PaymentPluginApiException { final String stripeId; final String existingCustomerId = getCustomerIdNoException(kbAccountId, context); final String createStripeCustomerProperty = PluginProperties.findPluginPropertyValue("createStripeCustomer", allProperties); if (existingCustomerId == null && (createStripeCustomerProperty == null || Boolean.parseBoolean(createStripeCustomerProperty))) { final Account account = getAccount(kbAccountId, context); final Map<String, Object> params = new HashMap<>(customerParams); params.put("metadata", ImmutableMap.of("kbAccountId", kbAccountId, "kbAccountExternalKey", account.getExternalKey())); logger.info("Creating customer in Stripe to be able to re-use the token"); final Customer customer = Customer.create(params, requestOptions); // The id to charge now is the default source (e.g. card), not the token if (customer.getDefaultSource() == null) { stripeId = paymentMethodIdInStripe; } else { stripeId = customer.getDefaultSource(); } // Add magic custom field logger.info("Mapping kbAccountId {} to Stripe customer {}", kbAccountId, customer.getId()); final CustomField customField = new PluginCustomField(kbAccountId, ObjectType.ACCOUNT, "STRIPE_CUSTOMER_ID", customer.getId(), clock.getUTCNow()); try { killbillAPI.getCustomFieldUserApi().addCustomFields(ImmutableList.<CustomField>of(customField), context); } catch (final CustomFieldApiException e) { throw new PaymentPluginApiException("Unable to add custom field", e); } } else { // Stripe Customer exists OR creation is disabled: in those cases use the default ID to charge stripeId = defaultStripeId; } return stripeId; } @Override protected String getPaymentMethodId(final StripePaymentMethodsRecord record) { return record.getKbPaymentMethodId(); } @Override public void deletePaymentMethod(final UUID kbAccountId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { // Retrieve our currently known payment method final StripePaymentMethodsRecord stripePaymentMethodsRecord; try { stripePaymentMethodsRecord = dao.getPaymentMethod(kbPaymentMethodId, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to retrieve payment method", e); } // Delete in Stripe final RequestOptions requestOptions = buildRequestOptions(context); try { PaymentMethod.retrieve(stripePaymentMethodsRecord.getStripeId(), requestOptions).detach(requestOptions); } catch (final StripeException e) { throw new PaymentPluginApiException("Unable to delete Stripe payment method", e); } super.deletePaymentMethod(kbAccountId, kbPaymentMethodId, properties, context); } @Override public List<PaymentMethodInfoPlugin> getPaymentMethods(final UUID kbAccountId, final boolean refreshFromGateway, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { // If refreshFromGateway isn't set, simply read our tables if (!refreshFromGateway) { return super.getPaymentMethods(kbAccountId, refreshFromGateway, properties, context); } // Retrieve our currently known payment methods final Map<String, StripePaymentMethodsRecord> existingPaymentMethodByStripeId = new HashMap<String, StripePaymentMethodsRecord>(); try { final List<StripePaymentMethodsRecord> existingStripePaymentMethodRecords = dao.getPaymentMethods(kbAccountId, context.getTenantId()); for (final StripePaymentMethodsRecord existingStripePaymentMethodRecord : existingStripePaymentMethodRecords) { existingPaymentMethodByStripeId.put(existingStripePaymentMethodRecord.getStripeId(), existingStripePaymentMethodRecord); } } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to retrieve existing payment methods", e); } // To retrieve all payment methods in Stripe, retrieve the Stripe customer id (custom field on the account) final String stripeCustomerId = getCustomerId(kbAccountId, context); // Sync Stripe payment methods (source of truth) final RequestOptions requestOptions = buildRequestOptions(context); // Track the objects (the various Stripe APIs can return the same objects under a different type) final Set<String> stripeObjectsTreated = new HashSet<String>(); try { // Start with PaymentMethod... final Map<String, Object> paymentMethodParams = new HashMap<String, Object>(); paymentMethodParams.put("customer", stripeCustomerId); paymentMethodParams.put("type", "card"); final Iterable<PaymentMethod> stripePaymentMethodsCard = PaymentMethod.list(paymentMethodParams, requestOptions).autoPagingIterable(); syncPaymentMethods(kbAccountId, stripePaymentMethodsCard, existingPaymentMethodByStripeId, stripeObjectsTreated, context); paymentMethodParams.put("type", "sepa_debit"); final Iterable<PaymentMethod> stripePaymentMethodsSepaDebit = PaymentMethod.list(paymentMethodParams, requestOptions).autoPagingIterable(); syncPaymentMethods(kbAccountId, stripePaymentMethodsSepaDebit, existingPaymentMethodByStripeId, stripeObjectsTreated, context); // Then go through the sources final PaymentSourceCollection psc = Customer.retrieve(stripeCustomerId, expandSourcesParams, requestOptions).getSources(); if (psc != null) { final Iterable<? extends HasId> stripeSources = psc.autoPagingIterable(); syncPaymentMethods(kbAccountId, stripeSources, existingPaymentMethodByStripeId, stripeObjectsTreated, context); } } catch (final StripeException e) { throw new PaymentPluginApiException("Error connecting to Stripe", e); } catch (final PaymentApiException e) { throw new PaymentPluginApiException("Error creating payment method", e); } catch (final SQLException e) { throw new PaymentPluginApiException("Error creating payment method", e); } for (final StripePaymentMethodsRecord stripePaymentMethodsRecord : existingPaymentMethodByStripeId.values()) { logger.info("Deactivating local Stripe payment method {} - not found in Stripe", stripePaymentMethodsRecord.getStripeId()); super.deletePaymentMethod(kbAccountId, UUID.fromString(stripePaymentMethodsRecord.getKbPaymentMethodId()), properties, context); } // Refresh the state return super.getPaymentMethods(kbAccountId, false, properties, context); } private void syncPaymentMethods(final UUID kbAccountId, final Iterable<? extends HasId> stripeObjects, final Map<String, StripePaymentMethodsRecord> existingPaymentMethodByStripeId, final Set<String> stripeObjectsTreated, final CallContext context) throws PaymentApiException, SQLException { for (final HasId stripeObject : stripeObjects) { if (stripeObjectsTreated.contains(stripeObject.getId())) { continue; } stripeObjectsTreated.add(stripeObject.getId()); final Map<String, Object> additionalDataMap; if (stripeObject instanceof PaymentMethod) { additionalDataMap = StripePluginProperties.toAdditionalDataMap((PaymentMethod) stripeObject); } else if (stripeObject instanceof PaymentSource) { additionalDataMap = StripePluginProperties.toAdditionalDataMap((PaymentSource) stripeObject); } else { throw new UnsupportedOperationException("Unsupported object: " + stripeObject); } // We remove it here to build the list of local payment methods to delete final StripePaymentMethodsRecord existingPaymentMethodRecord = existingPaymentMethodByStripeId.remove(stripeObject.getId()); if (existingPaymentMethodRecord == null) { // We don't know about it yet, create it logger.info("Creating new local Stripe payment method {}", stripeObject.getId()); final List<PluginProperty> properties = PluginProperties.buildPluginProperties(additionalDataMap); final StripePaymentMethodPlugin paymentMethodInfo = new StripePaymentMethodPlugin(null, stripeObject.getId(), false, properties); killbillAPI.getPaymentApi().addPaymentMethod(getAccount(kbAccountId, context), stripeObject.getId(), StripeActivator.PLUGIN_NAME, false, paymentMethodInfo, ImmutableList.<PluginProperty>of(), context); } else { logger.info("Updating existing local Stripe payment method {}", stripeObject.getId()); dao.updatePaymentMethod(UUID.fromString(existingPaymentMethodRecord.getKbPaymentMethodId()), additionalDataMap, stripeObject.getId(), clock.getUTCNow(), context.getTenantId()); } } } @Override public PaymentTransactionInfoPlugin authorizePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final StripeResponsesRecord stripeResponsesRecord; try { stripeResponsesRecord = dao.getSuccessfulAuthorizationResponse(kbPaymentId, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("SQL exception when fetching response", e); } final boolean isHPPCompletion = stripeResponsesRecord != null && Boolean.valueOf(MoreObjects.firstNonNull(StripeDao.fromAdditionalData(stripeResponsesRecord.getAdditionalData()).get(PROPERTY_FROM_HPP), false).toString()); if (!isHPPCompletion) { updateResponseWithAdditionalProperties(kbTransactionId, properties, context.getTenantId()); // We don't have any record for that payment: we want to trigger an actual authorization call (or complete a 3D-S authorization) return executeInitialTransaction(TransactionType.AUTHORIZE, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } else { // We already have a record for that payment transaction: we just update the response row with additional properties // (the API can be called for instance after the user is redirected back from the HPP) updateResponseWithAdditionalProperties(kbTransactionId, PluginProperties.merge(ImmutableMap.of(PROPERTY_HPP_COMPLETION, true), properties), context.getTenantId()); } return buildPaymentTransactionInfoPlugin(stripeResponsesRecord); } private void updateResponseWithAdditionalProperties(final UUID kbTransactionId, final Iterable<PluginProperty> properties, final UUID tenantId) throws PaymentPluginApiException { try { dao.updateResponse(kbTransactionId, properties, tenantId); } catch (final SQLException e) { throw new PaymentPluginApiException("SQL exception when updating response", e); } } @Override public PaymentTransactionInfoPlugin capturePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { return executeFollowUpTransaction(TransactionType.CAPTURE, new TransactionExecutor<PaymentIntent>() { @Override public PaymentIntent execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord, final StripeResponsesRecord previousResponse) throws StripeException { final RequestOptions requestOptions = buildRequestOptions(context); final PaymentIntent intent = PaymentIntent.retrieve((String) StripeDao.fromAdditionalData(previousResponse.getAdditionalData()).get("id"), requestOptions); final Map<String, Object> paymentIntentParams = new HashMap<String, Object>(); paymentIntentParams.put("amount_to_capture", KillBillMoney.toMinorUnits(currency.toString(), amount)); return intent.capture(paymentIntentParams, requestOptions); } }, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } @Override public PaymentTransactionInfoPlugin purchasePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final StripeResponsesRecord stripeResponsesRecord; try { stripeResponsesRecord = dao.updateResponse(kbTransactionId, properties, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("HPP notification came through, but we encountered a database error", e); } if (stripeResponsesRecord == null) { // We don't have any record for that payment: we want to trigger an actual purchase (auto-capture) call return executeInitialTransaction(TransactionType.PURCHASE, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } else { // We already have a record for that payment transaction and we just updated the response row with additional properties // (the API can be called for instance after the user is redirected back from the HPP) } return buildPaymentTransactionInfoPlugin(stripeResponsesRecord); } @Override public PaymentTransactionInfoPlugin voidPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { return executeFollowUpTransaction(TransactionType.VOID, new TransactionExecutor<PaymentIntent>() { @Override public PaymentIntent execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord, final StripeResponsesRecord previousResponse) throws StripeException { final RequestOptions requestOptions = buildRequestOptions(context); final PaymentIntent intent = PaymentIntent.retrieve((String) StripeDao.fromAdditionalData(previousResponse.getAdditionalData()).get("id"), requestOptions); return intent.cancel(requestOptions); } }, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, null, null, properties, context); } @Override public PaymentTransactionInfoPlugin creditPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { throw new PaymentPluginApiException("INTERNAL", "#creditPayment not yet implemented, please contact [email protected]"); } @Override public PaymentTransactionInfoPlugin refundPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { return executeFollowUpTransaction(TransactionType.REFUND, new TransactionExecutor<PaymentIntent>() { @Override public PaymentIntent execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord, final StripeResponsesRecord previousResponse) throws StripeException { final RequestOptions requestOptions = buildRequestOptions(context); final Map additionalData = StripeDao.fromAdditionalData(previousResponse.getAdditionalData()); final String paymentIntent = (String) additionalData.get("id"); // The PaymentIntent API doesn't have a refund API - refund the charge created behind the scenes instead final String lastChargeId = (String) additionalData.get("last_charge_id"); final Map<String, Object> params = new HashMap<>(); params.put("charge", lastChargeId); params.put("amount", KillBillMoney.toMinorUnits(currency.toString(), amount)); Refund.create(params, requestOptions); return PaymentIntent.retrieve(paymentIntent, requestOptions); } }, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } @VisibleForTesting RequestOptions buildRequestOptions(final TenantContext context) { final StripeConfigProperties stripeConfigProperties = stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId()); return RequestOptions.builder() .setConnectTimeout(Integer.parseInt(stripeConfigProperties.getConnectionTimeout())) .setReadTimeout(Integer.parseInt(stripeConfigProperties.getReadTimeout())) .setApiKey(stripeConfigProperties.getApiKey()) .build(); } @Override public HostedPaymentPageFormDescriptor buildFormDescriptor(final UUID kbAccountId, final Iterable<PluginProperty> customFields, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final RequestOptions requestOptions = buildRequestOptions(context); final Account account = getAccount(kbAccountId, context); String stripeCustomerId = getCustomerIdNoException(kbAccountId, context); if (stripeCustomerId == null) { // add new customer to stripe account Map<String, Object> address = new HashMap<>(); address.put("city", account.getCity()); address.put("country", account.getCountry()); address.put("line1", account.getAddress1()); address.put("line2", account.getAddress2()); address.put("postal_code", account.getPostalCode()); address.put("state", account.getStateOrProvince()); Map<String, Object> params = new HashMap<>(); params.put("email", account.getEmail()); params.put("name", account.getName()); params.put("address", address); params.put("description", "created via KB"); try { Customer customer = Customer.create(params, requestOptions); stripeCustomerId = customer.getId(); } catch (StripeException e) { throw new PaymentPluginApiException("Unable to create Stripe customer", e); } } final Map<String, Object> params = new HashMap<String, Object>(); final Map<String, Object> metadata = new HashMap<String, Object>(); StreamSupport.stream(customFields.spliterator(), false) .filter(entry -> !metadataFilter.contains(entry.getKey())) .forEach(p -> metadata.put(p.getKey(), p.getValue())); params.put("metadata", metadata); params.put("customer", stripeCustomerId); final List<String> defaultPaymentMethodTypes = new ArrayList<String>(); defaultPaymentMethodTypes.add("card"); final PluginProperty customPaymentMethods = StreamSupport.stream(customFields.spliterator(), false) .filter(entry -> "payment_method_types".equals(entry.getKey())) .findFirst().orElse(null); params.put("payment_method_types", customPaymentMethods != null && customPaymentMethods.getValue() != null ? customPaymentMethods.getValue() : defaultPaymentMethodTypes); params.put("mode", "setup"); params.put("success_url", PluginProperties.getValue("success_url", "https://example.com/success?sessionId={CHECKOUT_SESSION_ID}", customFields)); params.put("cancel_url", PluginProperties.getValue("cancel_url", "https://example.com/cancel", customFields)); final StripeConfigProperties stripeConfigProperties = stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId()); try { logger.info("Creating Stripe session"); final Session session = Session.create(params, requestOptions); dao.addHppRequest(kbAccountId, null, null, session, clock.getUTCNow(), context.getTenantId()); return new PluginHostedPaymentPageFormDescriptor(kbAccountId, null, PluginProperties.buildPluginProperties(StripePluginProperties.toAdditionalDataMap(session, stripeConfigProperties.getPublicKey()))); } catch (final StripeException e) { throw new PaymentPluginApiException("Unable to create Stripe session", e); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to save Stripe session", e); } } @Override public GatewayNotification processNotification(final String notification, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { throw new PaymentPluginApiException("INTERNAL", "#processNotification not yet implemented, please contact [email protected]"); } private abstract static class TransactionExecutor<T> { public T execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord) throws StripeException { throw new UnsupportedOperationException(); } public T execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord, final StripeResponsesRecord previousResponse) throws StripeException { throw new UnsupportedOperationException(); } } private PaymentTransactionInfoPlugin executeInitialTransaction(final TransactionType transactionType, final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final String customerId = getCustomerIdNoException(kbAccountId, context); return executeInitialTransaction(transactionType, new TransactionExecutor<PaymentIntent>() { @Override public PaymentIntent execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord) throws StripeException { final RequestOptions requestOptions = buildRequestOptions(context); final CaptureMethod captureMethod = transactionType == TransactionType.AUTHORIZE ? CaptureMethod.MANUAL : CaptureMethod.AUTOMATIC; final Map<String, Object> paymentIntentParams = new HashMap<>(); paymentIntentParams.put("amount", KillBillMoney.toMinorUnits(currency.toString(), amount)); paymentIntentParams.put("currency", currency.toString()); paymentIntentParams.put("capture_method", captureMethod.value); // TODO Do we need to switch to manual confirmation to be able to set off_session=recurring? paymentIntentParams.put("confirm", true); // See https://stripe.com/docs/api/payment_intents/create#create_payment_intent-return_url final String returnUrl = PluginProperties.findPluginPropertyValue("return_url", properties); if (returnUrl != null) { paymentIntentParams.put("return_url", returnUrl); } // See https://groups.google.com/forum/?#!msg/killbilling-users/li3RNs-YmIA/oaUrBElMFQAJ paymentIntentParams.put("confirmation_method", "automatic"); if (customerId != null) { paymentIntentParams.put("customer", customerId); } paymentIntentParams.put("metadata", ImmutableMap.of("kbAccountId", kbAccountId, "kbPaymentId", kbPaymentId, "kbTransactionId", kbTransactionId, "kbPaymentMethodId", kbPaymentMethodId)); final Map additionalData = StripeDao.fromAdditionalData(paymentMethodsRecord.getAdditionalData()); if (paymentMethodsRecord.getStripeId().startsWith("tok")) { // https://github.com/stripe/stripe-java/issues/821 paymentIntentParams.put("payment_method_data", ImmutableMap.of("type", "card", "card", ImmutableMap.of("token", paymentMethodsRecord.getStripeId()))); } else { final String objectType = MoreObjects.firstNonNull((String) additionalData.get("object"), "payment_method"); if ("payment_method".equals(objectType)) { paymentIntentParams.put(objectType, paymentMethodsRecord.getStripeId()); } else { paymentIntentParams.put("source", paymentMethodsRecord.getStripeId()); } } final ImmutableList.Builder<String> paymentMethodTypesBuilder = ImmutableList.builder(); paymentMethodTypesBuilder.add("card"); if (captureMethod == CaptureMethod.AUTOMATIC && currency == Currency.EUR) { paymentMethodTypesBuilder.add("sepa_debit"); } if (transactionType == TransactionType.PURCHASE && currency == Currency.USD) { // See https://groups.google.com/forum/?#!msg/killbilling-users/li3RNs-YmIA/oaUrBElMFQAJ paymentMethodTypesBuilder.add("ach_debit"); } paymentIntentParams.put("payment_method_types", paymentMethodTypesBuilder.build()); final StripeConfigProperties stripeConfigProperties = stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId()); paymentIntentParams.put("description", stripeConfigProperties.getChargeDescription()); paymentIntentParams.put("statement_descriptor", stripeConfigProperties.getChargeStatementDescriptor()); logger.info("Creating Stripe PaymentIntent"); return PaymentIntent.create(paymentIntentParams, requestOptions); } }, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } private PaymentTransactionInfoPlugin executeInitialTransaction(final TransactionType transactionType, final TransactionExecutor<PaymentIntent> transactionExecutor, final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException { final Account account = getAccount(kbAccountId, context); final StripePaymentMethodsRecord nonNullPaymentMethodsRecord = getStripePaymentMethodsRecord(kbPaymentMethodId, context); final DateTime utcNow = clock.getUTCNow(); PaymentIntent response; if (shouldSkipStripe(properties)) { throw new UnsupportedOperationException("TODO"); } else { try { response = transactionExecutor.execute(account, nonNullPaymentMethodsRecord); } catch (final CardException e) { try { final RequestOptions requestOptions = buildRequestOptions(context); final Charge charge = Charge.retrieve(e.getCharge(), requestOptions); final String paymentIntentId = charge.getPaymentIntent(); final PaymentIntent paymentIntent = PaymentIntent.retrieve(paymentIntentId, requestOptions); response = paymentIntent; } catch (final StripeException e2) { throw new PaymentPluginApiException("Error getting card error details from Stripe", e2); } } catch (final StripeException e) { throw new PaymentPluginApiException("Error connecting to Stripe", e); } } try { final StripeResponsesRecord responsesRecord = dao.addResponse(kbAccountId, kbPaymentId, kbTransactionId, transactionType, amount, currency, response, utcNow, context.getTenantId()); return StripePaymentTransactionInfoPlugin.build(responsesRecord); } catch (final SQLException e) { throw new PaymentPluginApiException("Payment went through, but we encountered a database error. Payment details: " + response.toString(), e); } } private PaymentTransactionInfoPlugin executeFollowUpTransaction(final TransactionType transactionType, final TransactionExecutor<PaymentIntent> transactionExecutor, final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, @Nullable final BigDecimal amount, @Nullable final Currency currency, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException { final Account account = getAccount(kbAccountId, context); final StripePaymentMethodsRecord nonNullPaymentMethodsRecord = getStripePaymentMethodsRecord(kbPaymentMethodId, context); final StripeResponsesRecord previousResponse; try { previousResponse = dao.getSuccessfulAuthorizationResponse(kbPaymentId, context.getTenantId()); if (previousResponse == null) { throw new PaymentPluginApiException(null, "Unable to retrieve previous payment response for kbTransactionId " + kbTransactionId); } } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to retrieve previous payment response for kbTransactionId " + kbTransactionId, e); } final DateTime utcNow = clock.getUTCNow(); final PaymentIntent response; if (shouldSkipStripe(properties)) { throw new UnsupportedOperationException("TODO"); } else { try { response = transactionExecutor.execute(account, nonNullPaymentMethodsRecord, previousResponse); } catch (final StripeException e) { throw new PaymentPluginApiException("Error connecting to Stripe", e); } } try { final StripeResponsesRecord responsesRecord = dao.addResponse(kbAccountId, kbPaymentId, kbTransactionId, transactionType, amount, currency, response, utcNow, context.getTenantId()); return StripePaymentTransactionInfoPlugin.build(responsesRecord); } catch (final SQLException e) { throw new PaymentPluginApiException("Payment went through, but we encountered a database error. Payment details: " + (response.toString()), e); } } private String getCustomerId(final UUID kbAccountId, final CallContext context) throws PaymentPluginApiException { final String stripeCustomerId = getCustomerIdNoException(kbAccountId, context); if (stripeCustomerId == null) { throw new PaymentPluginApiException("INTERNAL", "Missing STRIPE_CUSTOMER_ID custom field"); } return stripeCustomerId; } private String getCustomerIdNoException(final UUID kbAccountId, final CallContext context) { final List<CustomField> customFields = killbillAPI.getCustomFieldUserApi().getCustomFieldsForAccountType(kbAccountId, ObjectType.ACCOUNT, context); String stripeCustomerId = null; for (final CustomField customField : customFields) { if (customField.getFieldName().equals("STRIPE_CUSTOMER_ID")) { stripeCustomerId = customField.getFieldValue(); break; } } return stripeCustomerId; } private StripePaymentMethodsRecord getStripePaymentMethodsRecord(@Nullable final UUID kbPaymentMethodId, final TenantContext context) throws PaymentPluginApiException { StripePaymentMethodsRecord paymentMethodsRecord = null; if (kbPaymentMethodId != null) { try { paymentMethodsRecord = dao.getPaymentMethod(kbPaymentMethodId, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("Failed to retrieve payment method", e); } } return MoreObjects.firstNonNull(paymentMethodsRecord, emptyRecord(kbPaymentMethodId)); } private StripePaymentMethodsRecord emptyRecord(@Nullable final UUID kbPaymentMethodId) { final StripePaymentMethodsRecord record = new StripePaymentMethodsRecord(); if (kbPaymentMethodId != null) { record.setKbPaymentMethodId(kbPaymentMethodId.toString()); } return record; } private boolean shouldSkipStripe(final Iterable<PluginProperty> properties) { return "true".equals(PluginProperties.findPluginPropertyValue("skipGw", properties)) || "true".equals(PluginProperties.findPluginPropertyValue("skip_gw", properties)); } }
src/main/java/org/killbill/billing/plugin/stripe/StripePaymentPluginApi.java
/* * Copyright 2020-2020 Equinix, Inc * Copyright 2014-2020 The Billing Project, LLC * * The Billing Project licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.plugin.stripe; import java.math.BigDecimal; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.stream.StreamSupport; import javax.annotation.Nullable; import org.joda.time.DateTime; import org.killbill.billing.ObjectType; import org.killbill.billing.account.api.Account; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.osgi.libs.killbill.OSGIConfigPropertiesService; import org.killbill.billing.osgi.libs.killbill.OSGIKillbillAPI; import org.killbill.billing.payment.api.PaymentApiException; import org.killbill.billing.payment.api.PaymentMethodPlugin; import org.killbill.billing.payment.api.PluginProperty; import org.killbill.billing.payment.api.TransactionType; import org.killbill.billing.payment.plugin.api.GatewayNotification; import org.killbill.billing.payment.plugin.api.HostedPaymentPageFormDescriptor; import org.killbill.billing.payment.plugin.api.PaymentMethodInfoPlugin; import org.killbill.billing.payment.plugin.api.PaymentPluginApiException; import org.killbill.billing.payment.plugin.api.PaymentPluginStatus; import org.killbill.billing.payment.plugin.api.PaymentTransactionInfoPlugin; import org.killbill.billing.plugin.api.PluginProperties; import org.killbill.billing.plugin.api.core.PluginCustomField; import org.killbill.billing.plugin.api.payment.PluginHostedPaymentPageFormDescriptor; import org.killbill.billing.plugin.api.payment.PluginPaymentPluginApi; import org.killbill.billing.plugin.stripe.dao.StripeDao; import org.killbill.billing.plugin.stripe.dao.gen.tables.StripePaymentMethods; import org.killbill.billing.plugin.stripe.dao.gen.tables.StripeResponses; import org.killbill.billing.plugin.stripe.dao.gen.tables.records.StripeHppRequestsRecord; import org.killbill.billing.plugin.stripe.dao.gen.tables.records.StripePaymentMethodsRecord; import org.killbill.billing.plugin.stripe.dao.gen.tables.records.StripeResponsesRecord; import org.killbill.billing.plugin.util.KillBillMoney; import org.killbill.billing.util.api.CustomFieldApiException; import org.killbill.billing.util.callcontext.CallContext; import org.killbill.billing.util.callcontext.TenantContext; import org.killbill.billing.util.customfield.CustomField; import org.killbill.clock.Clock; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.MoreObjects; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.stripe.exception.CardException; import com.stripe.exception.StripeException; import com.stripe.model.Charge; import com.stripe.model.Customer; import com.stripe.model.HasId; import com.stripe.model.PaymentIntent; import com.stripe.model.PaymentMethod; import com.stripe.model.PaymentSource; import com.stripe.model.PaymentSourceCollection; import com.stripe.model.Refund; import com.stripe.model.SetupIntent; import com.stripe.model.Source; import com.stripe.model.Token; import com.stripe.model.checkout.Session; import com.stripe.net.RequestOptions; import com.stripe.param.PaymentIntentCancelParams; public class StripePaymentPluginApi extends PluginPaymentPluginApi<StripeResponsesRecord, StripeResponses, StripePaymentMethodsRecord, StripePaymentMethods> { private enum CaptureMethod { AUTOMATIC("automatic"), MANUAL("manual"); public final String value; CaptureMethod(String value) { this.value = value; } } private static final Logger logger = LoggerFactory.getLogger(StripePaymentPluginApi.class); public static final String PROPERTY_FROM_HPP = "fromHPP"; public static final String PROPERTY_HPP_COMPLETION = "fromHPPCompletion"; public static final String PROPERTY_OVERRIDDEN_TRANSACTION_STATUS = "overriddenTransactionStatus"; private final StripeConfigPropertiesConfigurationHandler stripeConfigPropertiesConfigurationHandler; private final StripeDao dao; static final List<String> metadataFilter = ImmutableList.of("payment_method_types"); // needed for API calls to expand the response to contain the 'Sources' // https://stripe.com/docs/api/expanding_objects?lang=java private final Map<String, Object> expandSourcesParams; public StripePaymentPluginApi(final StripeConfigPropertiesConfigurationHandler stripeConfigPropertiesConfigurationHandler, final OSGIKillbillAPI killbillAPI, final OSGIConfigPropertiesService configProperties, final Clock clock, final StripeDao dao) { super(killbillAPI, configProperties, clock, dao); this.stripeConfigPropertiesConfigurationHandler = stripeConfigPropertiesConfigurationHandler; this.dao = dao; expandSourcesParams = new HashMap<String, Object>(); expandSourcesParams.put("expand", ImmutableList.of("sources")); } @Override public List<PaymentTransactionInfoPlugin> getPaymentInfo(final UUID kbAccountId, final UUID kbPaymentId, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException { final List<PaymentTransactionInfoPlugin> transactions = super.getPaymentInfo(kbAccountId, kbPaymentId, properties, context); if (transactions.isEmpty()) { // We don't know about this payment (maybe it was aborted in a control plugin) return transactions; } // Check if a HPP payment needs to be canceled final ExpiredPaymentPolicy expiredPaymentPolicy = new ExpiredPaymentPolicy(clock, stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId())); final StripePaymentTransactionInfoPlugin transactionToExpire = expiredPaymentPolicy.isExpired(transactions); if (transactionToExpire != null) { logger.info("Canceling expired Stripe transaction {} (created {})", transactionToExpire.getStripeResponseRecord().getStripeId(), transactionToExpire.getStripeResponseRecord().getCreatedDate()); final Map additionalMetadata = ImmutableMap.builder() .put(PROPERTY_OVERRIDDEN_TRANSACTION_STATUS, PaymentPluginStatus.CANCELED.toString()) .put("message", "Payment Expired - Cancelled by Janitor") .build(); try { dao.updateResponse(transactionToExpire.getStripeResponseRecord(), additionalMetadata); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to update expired payment", e); } // Reload payment return super.getPaymentInfo(kbAccountId, kbPaymentId, properties, context); } // Refresh, if needed boolean wasRefreshed = false; final RequestOptions requestOptions = buildRequestOptions(context); for (final PaymentTransactionInfoPlugin transaction : transactions) { if (transaction.getStatus() == PaymentPluginStatus.PENDING) { final String paymentIntentId = PluginProperties.findPluginPropertyValue("id", transaction.getProperties()); try { PaymentIntent intent = PaymentIntent.retrieve(paymentIntentId, requestOptions); // 3DS validated: must confirm the PaymentIntent if ("requires_confirmation".equals(intent.getStatus())) { logger.info("Confirming Stripe transaction {}", intent.getId()); intent = intent.confirm(requestOptions); } // 3DS authorization failure - Fail payment according to property else if (stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId()).isCancelOn3DSAuthorizationFailure() && "requires_payment_method".equals(intent.getStatus()) && intent.getLastPaymentError() != null && "payment_intent_authentication_failure".equals(intent.getLastPaymentError().getCode())) { logger.info("Cancelling Stripe PaymentIntent after 3DS authorization failure {}", intent.getId()); intent = intent.cancel( PaymentIntentCancelParams.builder() .setCancellationReason(PaymentIntentCancelParams.CancellationReason.ABANDONED) .build(), requestOptions ); } dao.updateResponse(transaction.getKbTransactionPaymentId(), intent, context.getTenantId()); wasRefreshed = true; } catch (final StripeException e) { logger.warn("Unable to fetch latest payment state in Stripe, data might be stale", e); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to refresh payment", e); } } } return wasRefreshed ? super.getPaymentInfo(kbAccountId, kbPaymentId, properties, context) : transactions; } @Override protected PaymentTransactionInfoPlugin buildPaymentTransactionInfoPlugin(final StripeResponsesRecord record) { return StripePaymentTransactionInfoPlugin.build(record); } @Override public PaymentMethodPlugin getPaymentMethodDetail(final UUID kbAccountId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException { final StripePaymentMethodsRecord record; try { record = dao.getPaymentMethod(kbPaymentMethodId, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to retrieve payment method for kbPaymentMethodId " + kbPaymentMethodId, e); } if (record == null) { // Known in KB but deleted in Stripe? return new StripePaymentMethodPlugin(kbPaymentMethodId, null, false, ImmutableList.<PluginProperty>of()); } else { return buildPaymentMethodPlugin(record); } } @Override protected PaymentMethodPlugin buildPaymentMethodPlugin(final StripePaymentMethodsRecord record) { return StripePaymentMethodPlugin.build(record); } @Override protected PaymentMethodInfoPlugin buildPaymentMethodInfoPlugin(final StripePaymentMethodsRecord record) { return StripePaymentMethodInfoPlugin.build(record); } @Override public void addPaymentMethod(final UUID kbAccountId, final UUID kbPaymentMethodId, final PaymentMethodPlugin paymentMethodProps, final boolean setDefault, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final RequestOptions requestOptions = buildRequestOptions(context); // Support both body and query parameters based plugin properties final Iterable<PluginProperty> allProperties = PluginProperties.merge(paymentMethodProps.getProperties(), properties); String paymentMethodIdInStripe = paymentMethodProps.getExternalPaymentMethodId(); String objectType = PluginProperties.getValue("object", "payment_method", allProperties); if (paymentMethodIdInStripe == null) { // Support also a source plugin property as it is a easier to pass it in cURLs and recommended by Stripe paymentMethodIdInStripe = PluginProperties.findPluginPropertyValue("source", allProperties); if (paymentMethodIdInStripe != null) { objectType = "source"; } else { // Support also a token plugin property as it is a bit easier to pass it in cURLs (also sent by kbcmd in the body) paymentMethodIdInStripe = PluginProperties.findPluginPropertyValue("token", allProperties); if (paymentMethodIdInStripe != null) { objectType = "token"; } } // Otherwise, defaults to payment_method (session flow) } final String sessionId = PluginProperties.findPluginPropertyValue("sessionId", allProperties); if (sessionId != null) { // Checkout flow try { final StripeHppRequestsRecord hppRecord = dao.getHppRequest(sessionId, context.getTenantId().toString()); if (hppRecord == null) { throw new PaymentPluginApiException("INTERNAL", "Unable to add payment method: missing StripeHppRequestsRecord for sessionId " + sessionId); } final String setupIntentId = (String) StripeDao.fromAdditionalData(hppRecord.getAdditionalData()).get("setup_intent_id"); final SetupIntent setupIntent = SetupIntent.retrieve(setupIntentId, requestOptions); if ("succeeded".equals(setupIntent.getStatus())) { final String existingCustomerId = getCustomerIdNoException(kbAccountId, context); if (existingCustomerId == null) { // Add magic custom field logger.info("Mapping kbAccountId {} to Stripe customer {}", kbAccountId, setupIntent.getCustomer()); killbillAPI.getCustomFieldUserApi().addCustomFields(ImmutableList.of(new PluginCustomField(kbAccountId, ObjectType.ACCOUNT, "STRIPE_CUSTOMER_ID", setupIntent.getCustomer(), clock.getUTCNow())), context); } else if (!existingCustomerId.equals(setupIntent.getCustomer())) { throw new PaymentPluginApiException("USER", "Unable to add payment method : setupIntent customerId is " + setupIntent.getCustomer() + " but account already mapped to " + existingCustomerId); } // Used below to create the row in the plugin // TODO This implicitly assumes the payment method type if "payment_method", is this always true? paymentMethodIdInStripe = setupIntent.getPaymentMethod(); } else { throw new PaymentPluginApiException("EXTERNAL", "Unable to add payment method: setupIntent status is: " + setupIntent.getStatus()); } } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to add payment method", e); } catch (final CustomFieldApiException e) { throw new PaymentPluginApiException("Unable to add custom field", e); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } final Map<String, Object> additionalDataMap; final String stripeId; if (paymentMethodIdInStripe != null) { if ("payment_method".equals(objectType)) { try { final PaymentMethod stripePaymentMethod = PaymentMethod.retrieve(paymentMethodIdInStripe, requestOptions); additionalDataMap = StripePluginProperties.toAdditionalDataMap(stripePaymentMethod); ImmutableMap<String, Object> params = ImmutableMap.of("payment_method", paymentMethodIdInStripe, "invoice_settings", ImmutableMap.of("default_payment_method", paymentMethodIdInStripe)); stripeId = createStripeCustomer(kbAccountId, paymentMethodIdInStripe, stripePaymentMethod.getId(), params, requestOptions, allProperties, context); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } else if ("token".equals(objectType)) { try { final Token stripeToken = Token.retrieve(paymentMethodIdInStripe, requestOptions); additionalDataMap = StripePluginProperties.toAdditionalDataMap(stripeToken); stripeId = createStripeCustomer(kbAccountId, paymentMethodIdInStripe, stripeToken.getId(), ImmutableMap.of("source", paymentMethodIdInStripe), requestOptions, allProperties, context); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } else if ("source".equals(objectType)) { try { // The Stripe sourceId must be passed as the PaymentMethodPlugin#getExternalPaymentMethodId final Source stripeSource = Source.retrieve(paymentMethodIdInStripe, requestOptions); additionalDataMap = StripePluginProperties.toAdditionalDataMap(stripeSource); stripeId = createStripeCustomer(kbAccountId, paymentMethodIdInStripe, stripeSource.getId(), ImmutableMap.of("source", paymentMethodIdInStripe), requestOptions, allProperties, context); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } else if ("bank_account".equals(objectType)) { try { // The Stripe bankAccountId must be passed as the PaymentMethodPlugin#getExternalPaymentMethodId final String existingCustomerId = getCustomerId(kbAccountId, context); final PaymentSource paymentSource = Customer.retrieve(existingCustomerId, expandSourcesParams, requestOptions) .getSources() .retrieve(paymentMethodIdInStripe, requestOptions); additionalDataMap = StripePluginProperties.toAdditionalDataMap(paymentSource); stripeId = paymentSource.getId(); } catch (final StripeException e) { throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e); } } else { throw new UnsupportedOperationException("Payment Method type not yet supported: " + objectType); } } else { throw new PaymentPluginApiException("USER", "PaymentMethodPlugin#getExternalPaymentMethodId or sessionId plugin property must be passed"); } final DateTime utcNow = clock.getUTCNow(); try { dao.addPaymentMethod(kbAccountId, kbPaymentMethodId, additionalDataMap, stripeId, utcNow, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to add payment method", e); } } private String createStripeCustomer(final UUID kbAccountId, final String paymentMethodIdInStripe, final String defaultStripeId, final ImmutableMap<String, Object> customerParams, final RequestOptions requestOptions, final Iterable<PluginProperty> allProperties, final CallContext context) throws StripeException, PaymentPluginApiException { final String stripeId; final String existingCustomerId = getCustomerIdNoException(kbAccountId, context); final String createStripeCustomerProperty = PluginProperties.findPluginPropertyValue("createStripeCustomer", allProperties); if (existingCustomerId == null && (createStripeCustomerProperty == null || Boolean.parseBoolean(createStripeCustomerProperty))) { final Account account = getAccount(kbAccountId, context); final Map<String, Object> params = new HashMap<>(customerParams); params.put("metadata", ImmutableMap.of("kbAccountId", kbAccountId, "kbAccountExternalKey", account.getExternalKey())); logger.info("Creating customer in Stripe to be able to re-use the token"); final Customer customer = Customer.create(params, requestOptions); // The id to charge now is the default source (e.g. card), not the token if (customer.getDefaultSource() == null) { stripeId = paymentMethodIdInStripe; } else { stripeId = customer.getDefaultSource(); } // Add magic custom field logger.info("Mapping kbAccountId {} to Stripe customer {}", kbAccountId, customer.getId()); final CustomField customField = new PluginCustomField(kbAccountId, ObjectType.ACCOUNT, "STRIPE_CUSTOMER_ID", customer.getId(), clock.getUTCNow()); try { killbillAPI.getCustomFieldUserApi().addCustomFields(ImmutableList.<CustomField>of(customField), context); } catch (final CustomFieldApiException e) { throw new PaymentPluginApiException("Unable to add custom field", e); } } else { // Stripe Customer exists OR creation is disabled: in those cases use the default ID to charge stripeId = defaultStripeId; } return stripeId; } @Override protected String getPaymentMethodId(final StripePaymentMethodsRecord record) { return record.getKbPaymentMethodId(); } @Override public void deletePaymentMethod(final UUID kbAccountId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { // Retrieve our currently known payment method final StripePaymentMethodsRecord stripePaymentMethodsRecord; try { stripePaymentMethodsRecord = dao.getPaymentMethod(kbPaymentMethodId, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to retrieve payment method", e); } // Delete in Stripe final RequestOptions requestOptions = buildRequestOptions(context); try { PaymentMethod.retrieve(stripePaymentMethodsRecord.getStripeId(), requestOptions).detach(requestOptions); } catch (final StripeException e) { throw new PaymentPluginApiException("Unable to delete Stripe payment method", e); } super.deletePaymentMethod(kbAccountId, kbPaymentMethodId, properties, context); } @Override public List<PaymentMethodInfoPlugin> getPaymentMethods(final UUID kbAccountId, final boolean refreshFromGateway, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { // If refreshFromGateway isn't set, simply read our tables if (!refreshFromGateway) { return super.getPaymentMethods(kbAccountId, refreshFromGateway, properties, context); } // Retrieve our currently known payment methods final Map<String, StripePaymentMethodsRecord> existingPaymentMethodByStripeId = new HashMap<String, StripePaymentMethodsRecord>(); try { final List<StripePaymentMethodsRecord> existingStripePaymentMethodRecords = dao.getPaymentMethods(kbAccountId, context.getTenantId()); for (final StripePaymentMethodsRecord existingStripePaymentMethodRecord : existingStripePaymentMethodRecords) { existingPaymentMethodByStripeId.put(existingStripePaymentMethodRecord.getStripeId(), existingStripePaymentMethodRecord); } } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to retrieve existing payment methods", e); } // To retrieve all payment methods in Stripe, retrieve the Stripe customer id (custom field on the account) final String stripeCustomerId = getCustomerId(kbAccountId, context); // Sync Stripe payment methods (source of truth) final RequestOptions requestOptions = buildRequestOptions(context); // Track the objects (the various Stripe APIs can return the same objects under a different type) final Set<String> stripeObjectsTreated = new HashSet<String>(); try { // Start with PaymentMethod... final Map<String, Object> paymentMethodParams = new HashMap<String, Object>(); paymentMethodParams.put("customer", stripeCustomerId); paymentMethodParams.put("type", "card"); final Iterable<PaymentMethod> stripePaymentMethodsCard = PaymentMethod.list(paymentMethodParams, requestOptions).autoPagingIterable(); syncPaymentMethods(kbAccountId, stripePaymentMethodsCard, existingPaymentMethodByStripeId, stripeObjectsTreated, context); paymentMethodParams.put("type", "sepa_debit"); final Iterable<PaymentMethod> stripePaymentMethodsSepaDebit = PaymentMethod.list(paymentMethodParams, requestOptions).autoPagingIterable(); syncPaymentMethods(kbAccountId, stripePaymentMethodsSepaDebit, existingPaymentMethodByStripeId, stripeObjectsTreated, context); // Then go through the sources final PaymentSourceCollection psc = Customer.retrieve(stripeCustomerId, expandSourcesParams, requestOptions).getSources(); if (psc != null) { final Iterable<? extends HasId> stripeSources = psc.autoPagingIterable(); syncPaymentMethods(kbAccountId, stripeSources, existingPaymentMethodByStripeId, stripeObjectsTreated, context); } } catch (final StripeException e) { throw new PaymentPluginApiException("Error connecting to Stripe", e); } catch (final PaymentApiException e) { throw new PaymentPluginApiException("Error creating payment method", e); } catch (final SQLException e) { throw new PaymentPluginApiException("Error creating payment method", e); } for (final StripePaymentMethodsRecord stripePaymentMethodsRecord : existingPaymentMethodByStripeId.values()) { logger.info("Deactivating local Stripe payment method {} - not found in Stripe", stripePaymentMethodsRecord.getStripeId()); super.deletePaymentMethod(kbAccountId, UUID.fromString(stripePaymentMethodsRecord.getKbPaymentMethodId()), properties, context); } // Refresh the state return super.getPaymentMethods(kbAccountId, false, properties, context); } private void syncPaymentMethods(final UUID kbAccountId, final Iterable<? extends HasId> stripeObjects, final Map<String, StripePaymentMethodsRecord> existingPaymentMethodByStripeId, final Set<String> stripeObjectsTreated, final CallContext context) throws PaymentApiException, SQLException { for (final HasId stripeObject : stripeObjects) { if (stripeObjectsTreated.contains(stripeObject.getId())) { continue; } stripeObjectsTreated.add(stripeObject.getId()); final Map<String, Object> additionalDataMap; if (stripeObject instanceof PaymentMethod) { additionalDataMap = StripePluginProperties.toAdditionalDataMap((PaymentMethod) stripeObject); } else if (stripeObject instanceof PaymentSource) { additionalDataMap = StripePluginProperties.toAdditionalDataMap((PaymentSource) stripeObject); } else { throw new UnsupportedOperationException("Unsupported object: " + stripeObject); } // We remove it here to build the list of local payment methods to delete final StripePaymentMethodsRecord existingPaymentMethodRecord = existingPaymentMethodByStripeId.remove(stripeObject.getId()); if (existingPaymentMethodRecord == null) { // We don't know about it yet, create it logger.info("Creating new local Stripe payment method {}", stripeObject.getId()); final List<PluginProperty> properties = PluginProperties.buildPluginProperties(additionalDataMap); final StripePaymentMethodPlugin paymentMethodInfo = new StripePaymentMethodPlugin(null, stripeObject.getId(), false, properties); killbillAPI.getPaymentApi().addPaymentMethod(getAccount(kbAccountId, context), stripeObject.getId(), StripeActivator.PLUGIN_NAME, false, paymentMethodInfo, ImmutableList.<PluginProperty>of(), context); } else { logger.info("Updating existing local Stripe payment method {}", stripeObject.getId()); dao.updatePaymentMethod(UUID.fromString(existingPaymentMethodRecord.getKbPaymentMethodId()), additionalDataMap, stripeObject.getId(), clock.getUTCNow(), context.getTenantId()); } } } @Override public PaymentTransactionInfoPlugin authorizePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final StripeResponsesRecord stripeResponsesRecord; try { stripeResponsesRecord = dao.getSuccessfulAuthorizationResponse(kbPaymentId, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("SQL exception when fetching response", e); } final boolean isHPPCompletion = stripeResponsesRecord != null && Boolean.valueOf(MoreObjects.firstNonNull(StripeDao.fromAdditionalData(stripeResponsesRecord.getAdditionalData()).get(PROPERTY_FROM_HPP), false).toString()); if (!isHPPCompletion) { updateResponseWithAdditionalProperties(kbTransactionId, properties, context.getTenantId()); // We don't have any record for that payment: we want to trigger an actual authorization call (or complete a 3D-S authorization) return executeInitialTransaction(TransactionType.AUTHORIZE, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } else { // We already have a record for that payment transaction: we just update the response row with additional properties // (the API can be called for instance after the user is redirected back from the HPP) updateResponseWithAdditionalProperties(kbTransactionId, PluginProperties.merge(ImmutableMap.of(PROPERTY_HPP_COMPLETION, true), properties), context.getTenantId()); } return buildPaymentTransactionInfoPlugin(stripeResponsesRecord); } private void updateResponseWithAdditionalProperties(final UUID kbTransactionId, final Iterable<PluginProperty> properties, final UUID tenantId) throws PaymentPluginApiException { try { dao.updateResponse(kbTransactionId, properties, tenantId); } catch (final SQLException e) { throw new PaymentPluginApiException("SQL exception when updating response", e); } } @Override public PaymentTransactionInfoPlugin capturePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { return executeFollowUpTransaction(TransactionType.CAPTURE, new TransactionExecutor<PaymentIntent>() { @Override public PaymentIntent execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord, final StripeResponsesRecord previousResponse) throws StripeException { final RequestOptions requestOptions = buildRequestOptions(context); final PaymentIntent intent = PaymentIntent.retrieve((String) StripeDao.fromAdditionalData(previousResponse.getAdditionalData()).get("id"), requestOptions); final Map<String, Object> paymentIntentParams = new HashMap<String, Object>(); paymentIntentParams.put("amount_to_capture", KillBillMoney.toMinorUnits(currency.toString(), amount)); return intent.capture(paymentIntentParams, requestOptions); } }, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } @Override public PaymentTransactionInfoPlugin purchasePayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final StripeResponsesRecord stripeResponsesRecord; try { stripeResponsesRecord = dao.updateResponse(kbTransactionId, properties, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("HPP notification came through, but we encountered a database error", e); } if (stripeResponsesRecord == null) { // We don't have any record for that payment: we want to trigger an actual purchase (auto-capture) call return executeInitialTransaction(TransactionType.PURCHASE, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } else { // We already have a record for that payment transaction and we just updated the response row with additional properties // (the API can be called for instance after the user is redirected back from the HPP) } return buildPaymentTransactionInfoPlugin(stripeResponsesRecord); } @Override public PaymentTransactionInfoPlugin voidPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { return executeFollowUpTransaction(TransactionType.VOID, new TransactionExecutor<PaymentIntent>() { @Override public PaymentIntent execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord, final StripeResponsesRecord previousResponse) throws StripeException { final RequestOptions requestOptions = buildRequestOptions(context); final PaymentIntent intent = PaymentIntent.retrieve((String) StripeDao.fromAdditionalData(previousResponse.getAdditionalData()).get("id"), requestOptions); return intent.cancel(requestOptions); } }, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, null, null, properties, context); } @Override public PaymentTransactionInfoPlugin creditPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { throw new PaymentPluginApiException("INTERNAL", "#creditPayment not yet implemented, please contact [email protected]"); } @Override public PaymentTransactionInfoPlugin refundPayment(final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { return executeFollowUpTransaction(TransactionType.REFUND, new TransactionExecutor<PaymentIntent>() { @Override public PaymentIntent execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord, final StripeResponsesRecord previousResponse) throws StripeException { final RequestOptions requestOptions = buildRequestOptions(context); final Map additionalData = StripeDao.fromAdditionalData(previousResponse.getAdditionalData()); final String paymentIntent = (String) additionalData.get("id"); // The PaymentIntent API doesn't have a refund API - refund the charge created behind the scenes instead final String lastChargeId = (String) additionalData.get("last_charge_id"); final Map<String, Object> params = new HashMap<>(); params.put("charge", lastChargeId); params.put("amount", KillBillMoney.toMinorUnits(currency.toString(), amount)); Refund.create(params, requestOptions); return PaymentIntent.retrieve(paymentIntent, requestOptions); } }, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } @VisibleForTesting RequestOptions buildRequestOptions(final TenantContext context) { final StripeConfigProperties stripeConfigProperties = stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId()); return RequestOptions.builder() .setConnectTimeout(Integer.parseInt(stripeConfigProperties.getConnectionTimeout())) .setReadTimeout(Integer.parseInt(stripeConfigProperties.getReadTimeout())) .setApiKey(stripeConfigProperties.getApiKey()) .build(); } @Override public HostedPaymentPageFormDescriptor buildFormDescriptor(final UUID kbAccountId, final Iterable<PluginProperty> customFields, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final RequestOptions requestOptions = buildRequestOptions(context); final Account account = getAccount(kbAccountId, context); String stripeCustomerId = getCustomerIdNoException(kbAccountId, context); if (stripeCustomerId == null) { // add new customer to stripe account Map<String, Object> address = new HashMap<>(); address.put("city", account.getCity()); address.put("country", account.getCountry()); address.put("line1", account.getAddress1()); address.put("line2", account.getAddress2()); address.put("postal_code", account.getPostalCode()); address.put("state", account.getStateOrProvince()); Map<String, Object> params = new HashMap<>(); params.put("email", account.getEmail()); params.put("name", account.getName()); params.put("address", address); params.put("description", "created via KB"); try { Customer customer = Customer.create(params, requestOptions); stripeCustomerId = customer.getId(); } catch (StripeException e) { throw new PaymentPluginApiException("Unable to create Stripe customer", e); } } final Map<String, Object> params = new HashMap<String, Object>(); final Map<String, Object> metadata = new HashMap<String, Object>(); StreamSupport.stream(customFields.spliterator(), false) .filter(entry -> !metadataFilter.contains(entry.getKey())) .forEach(p -> metadata.put(p.getKey(), p.getValue())); params.put("metadata", metadata); params.put("customer", stripeCustomerId); final List<String> defaultPaymentMethodTypes = new ArrayList<String>(); defaultPaymentMethodTypes.add("card"); final PluginProperty customPaymentMethods = StreamSupport.stream(customFields.spliterator(), false) .filter(entry -> "payment_method_types".equals(entry.getKey())) .findFirst().orElse(null); params.put("payment_method_types", customPaymentMethods != null && customPaymentMethods.getValue() != null ? customPaymentMethods.getValue() : defaultPaymentMethodTypes); params.put("mode", "setup"); params.put("success_url", PluginProperties.getValue("success_url", "https://example.com/success?sessionId={CHECKOUT_SESSION_ID}", customFields)); params.put("cancel_url", PluginProperties.getValue("cancel_url", "https://example.com/cancel", customFields)); final StripeConfigProperties stripeConfigProperties = stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId()); try { logger.info("Creating Stripe session"); final Session session = Session.create(params, requestOptions); dao.addHppRequest(kbAccountId, null, null, session, clock.getUTCNow(), context.getTenantId()); return new PluginHostedPaymentPageFormDescriptor(kbAccountId, null, PluginProperties.buildPluginProperties(StripePluginProperties.toAdditionalDataMap(session, stripeConfigProperties.getPublicKey()))); } catch (final StripeException e) { throw new PaymentPluginApiException("Unable to create Stripe session", e); } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to save Stripe session", e); } } @Override public GatewayNotification processNotification(final String notification, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { throw new PaymentPluginApiException("INTERNAL", "#processNotification not yet implemented, please contact [email protected]"); } private abstract static class TransactionExecutor<T> { public T execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord) throws StripeException { throw new UnsupportedOperationException(); } public T execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord, final StripeResponsesRecord previousResponse) throws StripeException { throw new UnsupportedOperationException(); } } private PaymentTransactionInfoPlugin executeInitialTransaction(final TransactionType transactionType, final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final CallContext context) throws PaymentPluginApiException { final String customerId = getCustomerIdNoException(kbAccountId, context); return executeInitialTransaction(transactionType, new TransactionExecutor<PaymentIntent>() { @Override public PaymentIntent execute(final Account account, final StripePaymentMethodsRecord paymentMethodsRecord) throws StripeException { final RequestOptions requestOptions = buildRequestOptions(context); final CaptureMethod captureMethod = transactionType == TransactionType.AUTHORIZE ? CaptureMethod.MANUAL : CaptureMethod.AUTOMATIC; final Map<String, Object> paymentIntentParams = new HashMap<>(); paymentIntentParams.put("amount", KillBillMoney.toMinorUnits(currency.toString(), amount)); paymentIntentParams.put("currency", currency.toString()); paymentIntentParams.put("capture_method", captureMethod.value); // TODO Do we need to switch to manual confirmation to be able to set off_session=recurring? paymentIntentParams.put("confirm", true); // See https://stripe.com/docs/api/payment_intents/create#create_payment_intent-return_url final String returnUrl = PluginProperties.findPluginPropertyValue("return_url", properties); if (returnUrl != null) { paymentIntentParams.put("return_url", returnUrl); } // See https://groups.google.com/forum/?#!msg/killbilling-users/li3RNs-YmIA/oaUrBElMFQAJ paymentIntentParams.put("confirmation_method", "automatic"); if (customerId != null) { paymentIntentParams.put("customer", customerId); } paymentIntentParams.put("metadata", ImmutableMap.of("kbAccountId", kbAccountId, "kbPaymentId", kbPaymentId, "kbTransactionId", kbTransactionId, "kbPaymentMethodId", kbPaymentMethodId)); final Map additionalData = StripeDao.fromAdditionalData(paymentMethodsRecord.getAdditionalData()); if (paymentMethodsRecord.getStripeId().startsWith("tok")) { // https://github.com/stripe/stripe-java/issues/821 paymentIntentParams.put("payment_method_data", ImmutableMap.of("type", "card", "card", ImmutableMap.of("token", paymentMethodsRecord.getStripeId()))); } else { final String objectType = MoreObjects.firstNonNull((String) additionalData.get("object"), "payment_method"); if ("payment_method".equals(objectType)) { paymentIntentParams.put(objectType, paymentMethodsRecord.getStripeId()); } else { paymentIntentParams.put("source", paymentMethodsRecord.getStripeId()); } } final ImmutableList.Builder<String> paymentMethodTypesBuilder = ImmutableList.builder(); paymentMethodTypesBuilder.add("card"); if (captureMethod == CaptureMethod.AUTOMATIC && currency == Currency.EUR) { paymentMethodTypesBuilder.add("sepa_debit"); } if (transactionType == TransactionType.PURCHASE && currency == Currency.USD) { // See https://groups.google.com/forum/?#!msg/killbilling-users/li3RNs-YmIA/oaUrBElMFQAJ paymentMethodTypesBuilder.add("ach_debit"); } paymentIntentParams.put("payment_method_types", paymentMethodTypesBuilder.build()); final StripeConfigProperties stripeConfigProperties = stripeConfigPropertiesConfigurationHandler.getConfigurable(context.getTenantId()); paymentIntentParams.put("description", stripeConfigProperties.getChargeDescription()); paymentIntentParams.put("statement_descriptor", stripeConfigProperties.getChargeStatementDescriptor()); logger.info("Creating Stripe PaymentIntent"); return PaymentIntent.create(paymentIntentParams, requestOptions); } }, kbAccountId, kbPaymentId, kbTransactionId, kbPaymentMethodId, amount, currency, properties, context); } private PaymentTransactionInfoPlugin executeInitialTransaction(final TransactionType transactionType, final TransactionExecutor<PaymentIntent> transactionExecutor, final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, final BigDecimal amount, final Currency currency, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException { final Account account = getAccount(kbAccountId, context); final StripePaymentMethodsRecord nonNullPaymentMethodsRecord = getStripePaymentMethodsRecord(kbPaymentMethodId, context); final DateTime utcNow = clock.getUTCNow(); PaymentIntent response; if (shouldSkipStripe(properties)) { throw new UnsupportedOperationException("TODO"); } else { try { response = transactionExecutor.execute(account, nonNullPaymentMethodsRecord); } catch (final CardException e) { try { final RequestOptions requestOptions = buildRequestOptions(context); final Charge charge = Charge.retrieve(e.getCharge(), requestOptions); final String paymentIntentId = charge.getPaymentIntent(); final PaymentIntent paymentIntent = PaymentIntent.retrieve(paymentIntentId, requestOptions); response = paymentIntent; } catch (final StripeException e2) { throw new PaymentPluginApiException("Error getting card error details from Stripe", e2); } } catch (final StripeException e) { throw new PaymentPluginApiException("Error connecting to Stripe", e); } } try { final StripeResponsesRecord responsesRecord = dao.addResponse(kbAccountId, kbPaymentId, kbTransactionId, transactionType, amount, currency, response, utcNow, context.getTenantId()); return StripePaymentTransactionInfoPlugin.build(responsesRecord); } catch (final SQLException e) { throw new PaymentPluginApiException("Payment went through, but we encountered a database error. Payment details: " + response.toString(), e); } } private PaymentTransactionInfoPlugin executeFollowUpTransaction(final TransactionType transactionType, final TransactionExecutor<PaymentIntent> transactionExecutor, final UUID kbAccountId, final UUID kbPaymentId, final UUID kbTransactionId, final UUID kbPaymentMethodId, @Nullable final BigDecimal amount, @Nullable final Currency currency, final Iterable<PluginProperty> properties, final TenantContext context) throws PaymentPluginApiException { final Account account = getAccount(kbAccountId, context); final StripePaymentMethodsRecord nonNullPaymentMethodsRecord = getStripePaymentMethodsRecord(kbPaymentMethodId, context); final StripeResponsesRecord previousResponse; try { previousResponse = dao.getSuccessfulAuthorizationResponse(kbPaymentId, context.getTenantId()); if (previousResponse == null) { throw new PaymentPluginApiException(null, "Unable to retrieve previous payment response for kbTransactionId " + kbTransactionId); } } catch (final SQLException e) { throw new PaymentPluginApiException("Unable to retrieve previous payment response for kbTransactionId " + kbTransactionId, e); } final DateTime utcNow = clock.getUTCNow(); final PaymentIntent response; if (shouldSkipStripe(properties)) { throw new UnsupportedOperationException("TODO"); } else { try { response = transactionExecutor.execute(account, nonNullPaymentMethodsRecord, previousResponse); } catch (final StripeException e) { throw new PaymentPluginApiException("Error connecting to Stripe", e); } } try { final StripeResponsesRecord responsesRecord = dao.addResponse(kbAccountId, kbPaymentId, kbTransactionId, transactionType, amount, currency, response, utcNow, context.getTenantId()); return StripePaymentTransactionInfoPlugin.build(responsesRecord); } catch (final SQLException e) { throw new PaymentPluginApiException("Payment went through, but we encountered a database error. Payment details: " + (response.toString()), e); } } private String getCustomerId(final UUID kbAccountId, final CallContext context) throws PaymentPluginApiException { final String stripeCustomerId = getCustomerIdNoException(kbAccountId, context); if (stripeCustomerId == null) { throw new PaymentPluginApiException("INTERNAL", "Missing STRIPE_CUSTOMER_ID custom field"); } return stripeCustomerId; } private String getCustomerIdNoException(final UUID kbAccountId, final CallContext context) { final List<CustomField> customFields = killbillAPI.getCustomFieldUserApi().getCustomFieldsForAccountType(kbAccountId, ObjectType.ACCOUNT, context); String stripeCustomerId = null; for (final CustomField customField : customFields) { if (customField.getFieldName().equals("STRIPE_CUSTOMER_ID")) { stripeCustomerId = customField.getFieldValue(); break; } } return stripeCustomerId; } private StripePaymentMethodsRecord getStripePaymentMethodsRecord(@Nullable final UUID kbPaymentMethodId, final TenantContext context) throws PaymentPluginApiException { StripePaymentMethodsRecord paymentMethodsRecord = null; if (kbPaymentMethodId != null) { try { paymentMethodsRecord = dao.getPaymentMethod(kbPaymentMethodId, context.getTenantId()); } catch (final SQLException e) { throw new PaymentPluginApiException("Failed to retrieve payment method", e); } } return MoreObjects.firstNonNull(paymentMethodsRecord, emptyRecord(kbPaymentMethodId)); } private StripePaymentMethodsRecord emptyRecord(@Nullable final UUID kbPaymentMethodId) { final StripePaymentMethodsRecord record = new StripePaymentMethodsRecord(); if (kbPaymentMethodId != null) { record.setKbPaymentMethodId(kbPaymentMethodId.toString()); } return record; } private boolean shouldSkipStripe(final Iterable<PluginProperty> properties) { return "true".equals(PluginProperties.findPluginPropertyValue("skipGw", properties)) || "true".equals(PluginProperties.findPluginPropertyValue("skip_gw", properties)); } }
Set default Payment Method when creating Customer
src/main/java/org/killbill/billing/plugin/stripe/StripePaymentPluginApi.java
Set default Payment Method when creating Customer
<ide><path>rc/main/java/org/killbill/billing/plugin/stripe/StripePaymentPluginApi.java <ide> try { <ide> final PaymentMethod stripePaymentMethod = PaymentMethod.retrieve(paymentMethodIdInStripe, requestOptions); <ide> additionalDataMap = StripePluginProperties.toAdditionalDataMap(stripePaymentMethod); <add> ImmutableMap<String, Object> defaultPaymentMethod = ImmutableMap.of("default_payment_method", paymentMethodIdInStripe); <ide> ImmutableMap<String, Object> params = ImmutableMap.of("payment_method", paymentMethodIdInStripe, <del> "invoice_settings", ImmutableMap.of("default_payment_method", paymentMethodIdInStripe)); <add> "invoice_settings", setDefault ? defaultPaymentMethod : ImmutableMap.of()); <ide> stripeId = createStripeCustomer(kbAccountId, paymentMethodIdInStripe, stripePaymentMethod.getId(), params, requestOptions, allProperties, context); <ide> } catch (final StripeException e) { <ide> throw new PaymentPluginApiException("Error calling Stripe while adding payment method", e);
Java
mit
11963eaf0125d087ecb488027dae9c64e5a2b7ac
0
lorisdanto/automatark,lorisdanto/automatark,lorisdanto/automatark
package testBenchmarks; import java.io.ByteArrayOutputStream; import java.io.FileDescriptor; import java.io.FileOutputStream; import java.io.PrintStream; import java.util.*; import static org.junit.Assert.*; import java.io.FileReader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import org.junit.Test; import LTLparser.LTLNode; import LTLparser.LTLParserProvider; import RegexParser.RegexParserProvider; import RegexParser.RegexNode; import TreeAutomataParser.TreeNode; import TreeAutomataParser.TreeParserProvider; import MonaParser.MonaParserProvider; /* * This class parses and removes duplicate benchmarks, * The parser for regular expression currently does not support non regular operators * The MONA predicate features is not supported by the mona parser, thus the Regsy and STRAND set in WS1S are not tested here. * The above features will be implemented in the near future. * */ public class TestAllBenchmarks { @Test public void testLTL() { //redirect and gather error message to String for easier debugging ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); //this array will store duplicate LTLs ArrayList<String> dupFile = new ArrayList<>(); try { Files.walk(Paths.get("../LTL/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".ltl"))) { try { FileReader file = new FileReader(filePath.toFile()); List<LTLNode> nodes = LTLParserProvider.parse(file); for (LTLNode node : nodes) { String cur = node.toString(); if (set.contains(cur)) { dupFile.add(cur); } else { set.add(cur); } } System.out.println(filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { //redirect system.err back System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFile.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFile) { System.out.println(s); } } assertTrue(noFail); } @Test public void testRegex() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFile = new ArrayList<>(); try { Files.walk(Paths.get("../regex/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".re"))) { try { FileReader file = new FileReader(filePath.toFile()); List<RegexNode> nodes = RegexParserProvider.parse(file); for (RegexNode node : nodes) { String cur = node.toString(); if (set.contains(cur)) { dupFile.add(cur); } else { set.add(cur); } } System.out.println(filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; } finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFile.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFile) { System.out.println(s); } } String errors = errMsgs.toString(); System.err.println(errors); assertTrue(noFail); } @Test public void testNFA() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFilePath = new ArrayList<>(); try { Files.walk(Paths.get("../NFA/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".timbuk"))) { try { FileReader file = new FileReader(filePath.toFile()); System.out.println(filePath); TreeNode node = TreeParserProvider.parse(file); System.out.println(filePath + " parsed correctly"); String cur = node.toString(); if (set.contains(cur)) { dupFilePath.add(filePath.toString()); } else { set.add(cur); } } catch (Exception e1) { System.err.println(filePath); e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFilePath.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFilePath) { System.err.println("Duplicate found at: "+ s); Path p = Paths.get(s); try{ Files.delete(p); System.out.println("deleted "+ s); }catch(Exception e){ System.out.println("Error deleting " +s +": "+ e); } } } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } @Test public void testTreeAutomata() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFilePath = new ArrayList<>(); try { Files.walk(Paths.get("../tree-automata/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".timbuk"))) { try { FileReader file = new FileReader(filePath.toFile()); System.out.println(filePath); TreeNode node = TreeParserProvider.parse(file); System.out.println(filePath + "parsed correctly"); String cur = node.toString(); if (set.contains(cur)) { dupFilePath.add(filePath.toString()); } else { set.add(cur); } } catch (Exception e1) { System.err.println(filePath); e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); System.err.println(e); noFail = false; }finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFilePath.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFilePath) { System.err.println("Duplicate found at: "+ s); Path p = Paths.get(s); try{ Files.delete(p); System.out.println("deleted "+ s); }catch(Exception e){ System.out.println("Error deleting " +s +": "+ e); } } } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } @Test public void testM2L() { //redirect and gather error message to String for easier debugging ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; try { Files.walk(Paths.get("../m2l-str/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".mona"))) { try { FileReader file = new FileReader(filePath.toFile()); MonaParserProvider monaProvider= new MonaParserProvider(file); System.out.println("parsing "+ filePath); monaProvider.parseFormula(); System.out.println("successfully parsed "+ filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { //redirect system.err back System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } }
Parsers/src/testBenchmarks/TestAllBenchmarks.java
package testBenchmarks; import java.io.ByteArrayOutputStream; import java.io.FileDescriptor; import java.io.FileOutputStream; import java.io.PrintStream; import java.util.*; import static org.junit.Assert.*; import java.io.FileReader; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import org.junit.Test; import LTLparser.LTLNode; import LTLparser.LTLParserProvider; import RegexParser.RegexParserProvider; import RegexParser.RegexNode; import TreeAutomataParser.TreeNode; import TreeAutomataParser.TreeParserProvider; import MonaParser.MonaParserProvider; public class TestAllBenchmarks { @Test public void testLTL() { //redirect and gather error message to String for easier debugging ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); //this array will store duplicate LTLs ArrayList<String> dupFile = new ArrayList<>(); try { Files.walk(Paths.get("../LTL/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".ltl"))) { try { FileReader file = new FileReader(filePath.toFile()); List<LTLNode> nodes = LTLParserProvider.parse(file); for (LTLNode node : nodes) { String cur = node.toString(); if (set.contains(cur)) { dupFile.add(cur); } else { set.add(cur); } } System.out.println(filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { //redirect system.err back System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFile.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFile) { System.out.println(s); } } assertTrue(noFail); } @Test public void testRegex() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFile = new ArrayList<>(); try { Files.walk(Paths.get("../regex/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".re"))) { try { FileReader file = new FileReader(filePath.toFile()); List<RegexNode> nodes = RegexParserProvider.parse(file); for (RegexNode node : nodes) { String cur = node.toString(); if (set.contains(cur)) { dupFile.add(cur); } else { set.add(cur); } } System.out.println(filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; } finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFile.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFile) { System.out.println(s); } } String errors = errMsgs.toString(); System.err.println(errors); assertTrue(noFail); } @Test public void testNFA() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFilePath = new ArrayList<>(); try { Files.walk(Paths.get("../NFA/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".timbuk"))) { try { FileReader file = new FileReader(filePath.toFile()); System.out.println(filePath); TreeNode node = TreeParserProvider.parse(file); System.out.println(filePath + " parsed correctly"); String cur = node.toString(); if (set.contains(cur)) { dupFilePath.add(filePath.toString()); } else { set.add(cur); } } catch (Exception e1) { System.err.println(filePath); e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFilePath.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFilePath) { System.err.println("Duplicate found at: "+ s); Path p = Paths.get(s); try{ Files.delete(p); System.out.println("deleted "+ s); }catch(Exception e){ System.out.println("Error deleting " +s +": "+ e); } } } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } @Test public void testTreeAutomata() { ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; HashSet<String> set = new HashSet<>(); ArrayList<String> dupFilePath = new ArrayList<>(); try { Files.walk(Paths.get("../tree-automata/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".timbuk"))) { try { FileReader file = new FileReader(filePath.toFile()); System.out.println(filePath); TreeNode node = TreeParserProvider.parse(file); System.out.println(filePath + "parsed correctly"); String cur = node.toString(); if (set.contains(cur)) { dupFilePath.add(filePath.toString()); } else { set.add(cur); } } catch (Exception e1) { System.err.println(filePath); e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); System.err.println(e); noFail = false; }finally { System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } if (dupFilePath.isEmpty()) { System.out.println("No duplicate formulas"); } else { System.err.println("Found duplicate formulas"); for (String s : dupFilePath) { System.err.println("Duplicate found at: "+ s); Path p = Paths.get(s); try{ Files.delete(p); System.out.println("deleted "+ s); }catch(Exception e){ System.out.println("Error deleting " +s +": "+ e); } } } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } @Test public void testM2L() { //redirect and gather error message to String for easier debugging ByteArrayOutputStream errMsgs = new ByteArrayOutputStream(); System.setErr(new PrintStream(errMsgs)); boolean noFail = true; try { Files.walk(Paths.get("../m2l-str/")).forEach(filePath -> { if (Files.isRegularFile(filePath) && (filePath.toString().endsWith(".mona"))) { try { FileReader file = new FileReader(filePath.toFile()); MonaParserProvider monaProvider= new MonaParserProvider(file); System.out.println("parsing "+ filePath); monaProvider.parseFormula(); System.out.println("successfully parsed "+ filePath); } catch (Exception e1) { e1.printStackTrace(); } } }); } catch (Exception e) { e.printStackTrace(); noFail = false; }finally { //redirect system.err back System.setErr(new PrintStream(new FileOutputStream(FileDescriptor.err))); } String errors = errMsgs.toString(); if(errors.length()>0){ System.err.println("Found errors!"); System.err.println(errors); } assertTrue(noFail); } }
delete redundant print message
Parsers/src/testBenchmarks/TestAllBenchmarks.java
delete redundant print message
<ide><path>arsers/src/testBenchmarks/TestAllBenchmarks.java <ide> <ide> import MonaParser.MonaParserProvider; <ide> <add>/* <add> * This class parses and removes duplicate benchmarks, <add> * The parser for regular expression currently does not support non regular operators <add> * The MONA predicate features is not supported by the mona parser, thus the Regsy and STRAND set in WS1S are not tested here. <add> * The above features will be implemented in the near future. <add> * */ <ide> public class TestAllBenchmarks { <ide> @Test <ide> public void testLTL() {
Java
mit
227b6cdf8b42dc5db799ba4181f7b43a06962484
0
mstojcevich/CopperBot2015
package org.usfirst.frc.team2586.robot.subsystems; import org.usfirst.frc.team2586.robot.RobotMap; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.SpeedController; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.command.Subsystem; /** * Controls the drive motors of the robot */ public class DriveTrain extends Subsystem { private final SpeedController leftRearDriveMotor; private final SpeedController leftFrontDriveMotor; private final SpeedController rightRearDriveMotor; private final SpeedController rightFrontDriveMotor; private final RobotDrive drive; /** * Create a DriveTrain using the default speed controller channels defined in RobotMap as Talons. */ public DriveTrain() { this(RobotMap.REAR_LEFT_DRIVE, RobotMap.REAR_RIGHT_DRIVE, RobotMap.FRONT_LEFT_DRIVE, RobotMap.FRONT_RIGHT_DRIVE); } /** * Create a DriveTrain using the specified speed controller channels using Talons * * @param rearLeft Channel for the rear left speed controller * @param rearRight Channel for the rear right speed controller * @param frontLeft Channel for the front left speed controller * @param frontRight Channel for the front right speed controller */ public DriveTrain(int rearLeft, int rearRight, int frontLeft, int frontRight) { this(new Talon(rearLeft), new Talon(rearRight), new Talon(frontLeft), new Talon(frontRight)); } /** * Create a DriveTrain using the spcified speed controllers * * @param rearLeft Rear left speed controller * @param rearRight Rear right speed controller * @param frontLeft Front left speed controller * @param frontRight Front right speed controller */ public DriveTrain(SpeedController rearLeft, SpeedController rearRight, SpeedController frontLeft, SpeedController frontRight) { super("Drive Train"); this.leftRearDriveMotor = rearLeft; this.rightRearDriveMotor = rearRight; this.leftFrontDriveMotor = frontLeft; this.rightFrontDriveMotor = frontRight; this.drive = new RobotDrive( leftFrontDriveMotor, leftRearDriveMotor, rightFrontDriveMotor, rightRearDriveMotor ); this.drive.setSafetyEnabled(true); } /** * Drive the robot with a left and right speed * @param leftSpeed Number between 0 and 1 representing the speed to drive the left side * @param rightSpeed Number between 0 and 1 representing the speed to drive the right side */ public void teleDrive(double leftSpeed, double rightSpeed) { this.drive.tankDrive(leftSpeed, rightSpeed); } @Override protected void initDefaultCommand() {} }
src/org/usfirst/frc/team2586/robot/subsystems/DriveTrain.java
package org.usfirst.frc.team2586.robot.subsystems; import org.usfirst.frc.team2586.robot.RobotMap; import edu.wpi.first.wpilibj.RobotDrive; import edu.wpi.first.wpilibj.Talon; import edu.wpi.first.wpilibj.command.Subsystem; /** * Controls the drive motors of the robot */ public class DriveTrain extends Subsystem { private final Talon leftRearDriveMotor; private final Talon leftFrontDriveMotor; private final Talon rightRearDriveMotor; private final Talon rightFrontDriveMotor; private final RobotDrive drive; public DriveTrain() { super("Drive Train"); this.leftRearDriveMotor = new Talon(RobotMap.REAR_LEFT_DRIVE); this.leftFrontDriveMotor = new Talon(RobotMap.FRONT_LEFT_DRIVE); this.rightRearDriveMotor = new Talon(RobotMap.REAR_RIGHT_DRIVE); this.rightFrontDriveMotor = new Talon(RobotMap.FRONT_RIGHT_DRIVE); this.drive = new RobotDrive( leftFrontDriveMotor, leftRearDriveMotor, rightFrontDriveMotor, rightRearDriveMotor ); this.drive.setSafetyEnabled(true); } /** * Drive the robot with a left and right speed * @param leftSpeed Number between 0 and 1 representing the speed to drive the left side * @param rightSpeed Number between 0 and 1 representing the speed to drive the right side */ public void teleDrive(double leftSpeed, double rightSpeed) { this.drive.tankDrive(leftSpeed, rightSpeed); } @Override protected void initDefaultCommand() { } }
Allow more abstract use of DriveTrain (different speed controllers, different channels
src/org/usfirst/frc/team2586/robot/subsystems/DriveTrain.java
Allow more abstract use of DriveTrain (different speed controllers, different channels
<ide><path>rc/org/usfirst/frc/team2586/robot/subsystems/DriveTrain.java <ide> import org.usfirst.frc.team2586.robot.RobotMap; <ide> <ide> import edu.wpi.first.wpilibj.RobotDrive; <add>import edu.wpi.first.wpilibj.SpeedController; <ide> import edu.wpi.first.wpilibj.Talon; <ide> import edu.wpi.first.wpilibj.command.Subsystem; <ide> <ide> */ <ide> public class DriveTrain extends Subsystem { <ide> <del> private final Talon leftRearDriveMotor; <del> private final Talon leftFrontDriveMotor; <del> private final Talon rightRearDriveMotor; <del> private final Talon rightFrontDriveMotor; <add> private final SpeedController leftRearDriveMotor; <add> private final SpeedController leftFrontDriveMotor; <add> private final SpeedController rightRearDriveMotor; <add> private final SpeedController rightFrontDriveMotor; <ide> <ide> private final RobotDrive drive; <ide> <add> /** <add> * Create a DriveTrain using the default speed controller channels defined in RobotMap as Talons. <add> */ <ide> public DriveTrain() { <add> this(RobotMap.REAR_LEFT_DRIVE, RobotMap.REAR_RIGHT_DRIVE, <add> RobotMap.FRONT_LEFT_DRIVE, RobotMap.FRONT_RIGHT_DRIVE); <add> } <add> <add> /** <add> * Create a DriveTrain using the specified speed controller channels using Talons <add> * <add> * @param rearLeft Channel for the rear left speed controller <add> * @param rearRight Channel for the rear right speed controller <add> * @param frontLeft Channel for the front left speed controller <add> * @param frontRight Channel for the front right speed controller <add> */ <add> public DriveTrain(int rearLeft, int rearRight, int frontLeft, int frontRight) { <add> this(new Talon(rearLeft), new Talon(rearRight), <add> new Talon(frontLeft), new Talon(frontRight)); <add> } <add> <add> /** <add> * Create a DriveTrain using the spcified speed controllers <add> * <add> * @param rearLeft Rear left speed controller <add> * @param rearRight Rear right speed controller <add> * @param frontLeft Front left speed controller <add> * @param frontRight Front right speed controller <add> */ <add> public DriveTrain(SpeedController rearLeft, SpeedController rearRight, <add> SpeedController frontLeft, SpeedController frontRight) { <ide> super("Drive Train"); <ide> <del> this.leftRearDriveMotor = new Talon(RobotMap.REAR_LEFT_DRIVE); <del> this.leftFrontDriveMotor = new Talon(RobotMap.FRONT_LEFT_DRIVE); <del> this.rightRearDriveMotor = new Talon(RobotMap.REAR_RIGHT_DRIVE); <del> this.rightFrontDriveMotor = new Talon(RobotMap.FRONT_RIGHT_DRIVE); <add> this.leftRearDriveMotor = rearLeft; <add> this.rightRearDriveMotor = rearRight; <add> this.leftFrontDriveMotor = frontLeft; <add> this.rightFrontDriveMotor = frontRight; <ide> <ide> this.drive = new RobotDrive( <ide> leftFrontDriveMotor, leftRearDriveMotor, <ide> } <ide> <ide> @Override <del> protected void initDefaultCommand() { <del> <del> } <add> protected void initDefaultCommand() {} <add> <ide> } <ide>
Java
apache-2.0
2f709b5092aa1bfec68773e7965f2cfb85afd236
0
dbflute-test/dbflute-test-active-hangar,dbflute-test/dbflute-test-active-hangar,dbflute-test/dbflute-test-active-hangar,dbflute-test/dbflute-test-active-hangar,dbflute-test/dbflute-test-active-hangar
package org.docksidestage.hangar.freeflute.dbcollabo; import java.io.Serializable; import java.text.MessageFormat; import java.util.ResourceBundle; /** * @author FreeGen */ public class MessagesAndDatabase implements Serializable { private static final long serialVersionUID = 1L; private static final ResourceBundle BUNDLE = ResourceBundle.getBundle("application"); // MEMBER // - MEMBER_ID // - MEMBER_NAME // - MEMBER_ACCOUNT // - MEMBER_STATUS_CODE // - FORMALIZED_DATETIME // - BIRTHDATE // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // MEMBER_ADDRESS // - MEMBER_ADDRESS_ID // - MEMBER_ID // - VALID_BEGIN_DATE // - VALID_END_DATE // - ADDRESS // - REGION_ID // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // MEMBER_FOLLOWING // - MEMBER_FOLLOWING_ID // - MY_MEMBER_ID // - YOUR_MEMBER_ID // - FOLLOW_DATETIME // MEMBER_LOGIN // - MEMBER_LOGIN_ID // - MEMBER_ID // - LOGIN_DATETIME // - MOBILE_LOGIN_FLG // - LOGIN_MEMBER_STATUS_CODE // MEMBER_SECURITY // - MEMBER_ID // - LOGIN_PASSWORD // - REMINDER_QUESTION // - REMINDER_ANSWER // - REMINDER_USE_COUNT // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // MEMBER_SERVICE // - MEMBER_SERVICE_ID // - MEMBER_ID // - SERVICE_POINT_COUNT // - SERVICE_RANK_CODE // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // MEMBER_STATUS // - MEMBER_STATUS_CODE // - MEMBER_STATUS_NAME // - DESCRIPTION // - DISPLAY_ORDER // MEMBER_WITHDRAWAL // - MEMBER_ID // - WITHDRAWAL_REASON_CODE // - WITHDRAWAL_REASON_INPUT_TEXT // - WITHDRAWAL_DATETIME // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // PRODUCT // - PRODUCT_ID // - PRODUCT_NAME // - PRODUCT_HANDLE_CODE // - PRODUCT_CATEGORY_CODE // - PRODUCT_STATUS_CODE // - REGULAR_PRICE // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // PRODUCT_CATEGORY // - PRODUCT_CATEGORY_CODE // - PRODUCT_CATEGORY_NAME // - PARENT_CATEGORY_CODE // PRODUCT_STATUS // - PRODUCT_STATUS_CODE // - PRODUCT_STATUS_NAME // - DISPLAY_ORDER // PURCHASE // - PURCHASE_ID // - MEMBER_ID // - PRODUCT_ID // - PURCHASE_DATETIME // - PURCHASE_COUNT // - PURCHASE_PRICE // - PAYMENT_COMPLETE_FLG // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // PURCHASE_PAYMENT // - PURCHASE_PAYMENT_ID // - PURCHASE_ID // - PAYMENT_AMOUNT // - PAYMENT_DATETIME // - PAYMENT_METHOD_CODE // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // REGION // - REGION_ID // - REGION_NAME // SERVICE_RANK // - SERVICE_RANK_CODE // - SERVICE_RANK_NAME // - SERVICE_POINT_INCIDENCE // - NEW_ACCEPTABLE_FLG // - DESCRIPTION // - DISPLAY_ORDER // SUMMARY_PRODUCT // - PRODUCT_ID // - PRODUCT_NAME // - PRODUCT_HANDLE_CODE // - PRODUCT_STATUS_CODE // - LATEST_PURCHASE_DATETIME // SUMMARY_WITHDRAWAL // - MEMBER_ID // - MEMBER_NAME // - WITHDRAWAL_REASON_CODE // - WITHDRAWAL_REASON_TEXT // - WITHDRAWAL_REASON_INPUT_TEXT // - WITHDRAWAL_DATETIME // - MEMBER_STATUS_CODE // - MEMBER_STATUS_NAME // - MAX_PURCHASE_PRICE // VENDOR_$_DOLLAR // - VENDOR_$_DOLLAR_ID // - VENDOR_$_DOLLAR_NAME // VENDOR_CHECK // - VENDOR_CHECK_ID // - TYPE_OF_CHAR // - TYPE_OF_VARCHAR // - TYPE_OF_CLOB // - TYPE_OF_TEXT // - TYPE_OF_NUMERIC_INTEGER // - TYPE_OF_NUMERIC_BIGINT // - TYPE_OF_NUMERIC_DECIMAL // - TYPE_OF_NUMERIC_INTEGER_MIN // - TYPE_OF_NUMERIC_INTEGER_MAX // - TYPE_OF_NUMERIC_BIGINT_MIN // - TYPE_OF_NUMERIC_BIGINT_MAX // - TYPE_OF_NUMERIC_SUPERINT_MIN // - TYPE_OF_NUMERIC_SUPERINT_MAX // - TYPE_OF_NUMERIC_MAXDECIMAL // - TYPE_OF_INTEGER // - TYPE_OF_BIGINT // - TYPE_OF_DATE // - TYPE_OF_TIMESTAMP // - TYPE_OF_TIME // - TYPE_OF_BOOLEAN // - TYPE_OF_BINARY // - TYPE_OF_BLOB // - TYPE_OF_UUID // - TYPE_OF_ARRAY // - TYPE_OF_OTHER // - J_A_V_A_BEANS_PROPERTY // - J_POP_BEANS_PROPERTY // VENDOR_IDENTITY_ONLY // - IDENTITY_ONLY_ID // VENDOR_PRIMARY_KEY_ONLY // - PRIMARY_KEY_ONLY_ID // VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_NAME // - SHORT_NAME // - SHORT_SIZE // VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE // - SHORT_DATE // WHITE_BASE // - BASE_ID // - BASE_NAME // - SEA_ID // - LAND_ID // - PIARI_ID // - BONVO_ID // - DSTORE_ID // - AMBA_ID // - MIRACO_ID // - DOHOTEL_ID // WHITE_BASE_ONE01_SEA // - SEA_ID // - SEA_NAME // - BROADWAY_ID // - DOCKSIDE_ID // WHITE_BASE_ONE01_SEA_BROADWAY // - BROADWAY_ID // - BROADWAY_NAME // WHITE_BASE_ONE01_SEA_DOCKSIDE // - DOCKSIDE_ID // - DOCKSIDE_NAME // WHITE_BASE_ONE01_SEA_HANGAR // - HANGAR_ID // - HANGAR_NAME // - SEA_ID // WHITE_BASE_ONE01_SEA_MAGICLAMP // - MAGICLAMP_ID // - MAGICLAMP_NAME // - SEA_ID // WHITE_BASE_ONE02_LAND // - LAND_ID // - LAND_NAME // WHITE_BASE_ONE03_PIARI // - PIARI_ID // - PIARI_NAME // WHITE_BASE_ONE04_BONVO // - BONVO_ID // - BONVO_NAME // - PARKSIDE_ID // - STATIONSIDE_ID // WHITE_BASE_ONE04_BONVO_PARKSIDE // - PARKSIDE_ID // - PARKSIDE_NAME // WHITE_BASE_ONE04_BONVO_STATIONSIDE // - STATIONSIDE_ID // - STATIONSIDE_NAME // WHITE_BASE_ONE05_DSTORE // - DSTORE_ID // - DSTORE_NAME // WHITE_BASE_ONE06_AMBA // - AMBA_ID // - AMBA_NAME // WHITE_BASE_ONE07_MIRACO // - MIRACO_ID // - MIRACO_NAME // WHITE_BASE_ONE08_DOHOTEL // - DOHOTEL_ID // - DOHOTEL_NAME // WHITE_BASE_ONE09_PALM // - PALM_ID // - PALM_NAME // - BASE_ID // WHITE_BASE_ONE10_CELEB // - CELEB_ID // - CELEB_NAME // - BASE_ID // WHITE_BASE_ONE11_CIRQUE // - CIRQUE_ID // - CIRQUE_NAME // WHITE_BASE_ONE12_AMPHI // - AMPHI_ID // - AMPHI_NAME // - BASE_ID // WHITE_CLASSIFICATION_DEPLOYMENT // - DEPLOYMENT_ID // - SEA_FLG // - DEPLOYMENT_TYPE_CODE // WHITE_COMPOUND_PK // - PK_FIRST_ID // - PK_SECOND_ID // - COMPOUND_PK_NAME // - REFERRED_ID // WHITE_COMPOUND_PK_REF_PHYSICAL // - REF_FIRST_ID // - REF_SECOND_ID // - REF_THIRD_ID // - COMPOUND_REF_NAME // WHITE_COMPOUND_PK_REF_VIRTURL // - REF_FIRST_ID // - REF_SECOND_ID // - REF_THIRD_ID // - COMPOUND_REF_NAME // WHITE_DATE_TERM // - DATE_TERM_ID // - DATE_TERM_VALUE // - BEGIN_DATE // - END_DATE // WHITE_DB_COMMENT // - DB_COMMENT_ID // - DB_COMMENT_NAME // WHITE_DEPRECATED_SELECT_BY_PKUQ // - SELECT_BY_PKUQ_ID // - SELECT_BY_PKUQ_NAME // - SELECT_BY_PKUQ_CODE // WHITE_DEPRECATED_SPECIFY_BATCH_COLUMN // - SPECIFY_BATCH_COLUMN_ID // - SPECIFY_BATCH_COLUMN_NAME // WHITE_FIRST_DATE // - FIRST_DATE_ID // - FIRST_DATE_NAME // - ADDED_AT_TABLE_SAME_DATE // WHITE_ON_PARADE // - ON_PARADE_ID // - ON_PARADE_NAME // WHITE_ON_PARADE_NULLABLE_TO_MANY // - MANY_ID // - MANY_NAME // WHITE_ON_PARADE_REF // - REF_ID // - REF_NAME // - NULLABLE_FK_ON_PARADE_ID // - NULLABLE_FK_TO_MANY_ID // WHITE_READ_ONLY // - READ_ONLY_ID // - READ_ONLY_NAME // WHITE_SCHEMA_DIFF // - SCHEMA_DIFF_ID // - SCHEMA_DIFF_NAME // - SCHEMA_DIFF_DATE // WHITE_SELF_REFERENCE // - SELF_REFERENCE_ID // - SELF_REFERENCE_NAME // - PARENT_ID // WHITE_SIMPLE_DTO_EXCEPT // - SIMPLE_DTO_EXCEPT_ID // - SIMPLE_DTO_EXCEPT_NAME // WHITE_SINGLE_PK // - ONLY_ONE_PK_ID // - SINGLE_PK_NAME // - REFERRED_ID // WHITE_TSV_LOADING // - TSV_LOADING_ID // - TSV_LOADING_NAME // - LOADING_COUNT // - LOADING_DATE // - BEGIN_DATETIME // - END_DATETIME // - LARGE_FROM_FILE // - DONE_FLG // WITHDRAWAL_REASON // - WITHDRAWAL_REASON_CODE // - WITHDRAWAL_REASON_TEXT // - DISPLAY_ORDER /** , name=ErrorsHeader, variable=0 */ public static final String ERRORS_HEADER = "errors.header"; /** * value=&lt;font color="red"&gt;&lt;ul&gt; * @return The formatted message. (NotNull) */ public static String getErrorsHeader() { // <font color="red"><ul> return MessageFormat.format(BUNDLE.getString(ERRORS_HEADER), (Object[])null); } /** , name=ErrorsFooter, variable=0 */ public static final String ERRORS_FOOTER = "errors.footer"; /** * value=&lt;/ul&gt;&lt;/font&gt; * @return The formatted message. (NotNull) */ public static String getErrorsFooter() { // </ul></font> return MessageFormat.format(BUNDLE.getString(ERRORS_FOOTER), (Object[])null); } /** , name=ErrorsPrefix, variable=0 */ public static final String ERRORS_PREFIX = "errors.prefix"; /** * value=&lt;li&gt; * @return The formatted message. (NotNull) */ public static String getErrorsPrefix() { // <li> return MessageFormat.format(BUNDLE.getString(ERRORS_PREFIX), (Object[])null); } /** , name=ErrorsSuffix, variable=0 */ public static final String ERRORS_SUFFIX = "errors.suffix"; /** * value=&lt;/li&gt; * @return The formatted message. (NotNull) */ public static String getErrorsSuffix() { // </li> return MessageFormat.format(BUNDLE.getString(ERRORS_SUFFIX), (Object[])null); } /** , name=ErrorsInvalid, variable=1 */ public static final String ERRORS_INVALID = "errors.invalid"; /** * value={0} is invalid. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsInvalid(String arg0) { // {0} is invalid. return MessageFormat.format(BUNDLE.getString(ERRORS_INVALID), arg0); } /** , name=ErrorsMaxlength, variable=2 */ public static final String ERRORS_MAXLENGTH = "errors.maxlength"; /** * value={0} can not be greater than {1} characters. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsMaxlength(String arg0, String arg1) { // {0} can not be greater than {1} characters. return MessageFormat.format(BUNDLE.getString(ERRORS_MAXLENGTH), arg0, arg1); } /** , name=ErrorsMinlength, variable=2 */ public static final String ERRORS_MINLENGTH = "errors.minlength"; /** * value={0} can not be less than {1} characters. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsMinlength(String arg0, String arg1) { // {0} can not be less than {1} characters. return MessageFormat.format(BUNDLE.getString(ERRORS_MINLENGTH), arg0, arg1); } /** , name=ErrorsMaxbytelength, variable=2 */ public static final String ERRORS_MAXBYTELENGTH = "errors.maxbytelength"; /** * value={0} can not be greater than {1} bytes. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsMaxbytelength(String arg0, String arg1) { // {0} can not be greater than {1} bytes. return MessageFormat.format(BUNDLE.getString(ERRORS_MAXBYTELENGTH), arg0, arg1); } /** , name=ErrorsMinbytelength, variable=2 */ public static final String ERRORS_MINBYTELENGTH = "errors.minbytelength"; /** * value={0} can not be less than {1} bytes. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsMinbytelength(String arg0, String arg1) { // {0} can not be less than {1} bytes. return MessageFormat.format(BUNDLE.getString(ERRORS_MINBYTELENGTH), arg0, arg1); } /** , name=ErrorsRange, variable=3 */ public static final String ERRORS_RANGE = "errors.range"; /** * value={0} is not in the range {1} through {2}. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @param arg2 The parameter 2 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsRange(String arg0, String arg1, String arg2) { // {0} is not in the range {1} through {2}. return MessageFormat.format(BUNDLE.getString(ERRORS_RANGE), arg0, arg1, arg2); } /** , name=ErrorsRequired, variable=1 */ public static final String ERRORS_REQUIRED = "errors.required"; /** * value={0} is required. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsRequired(String arg0) { // {0} is required. return MessageFormat.format(BUNDLE.getString(ERRORS_REQUIRED), arg0); } /** , name=ErrorsByte, variable=1 */ public static final String ERRORS_BYTE = "errors.byte"; /** * value={0} must be an byte. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsByte(String arg0) { // {0} must be an byte. return MessageFormat.format(BUNDLE.getString(ERRORS_BYTE), arg0); } /** , name=ErrorsDate, variable=1 */ public static final String ERRORS_DATE = "errors.date"; /** * value={0} is not a date. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsDate(String arg0) { // {0} is not a date. return MessageFormat.format(BUNDLE.getString(ERRORS_DATE), arg0); } /** , name=ErrorsDouble, variable=1 */ public static final String ERRORS_DOUBLE = "errors.double"; /** * value={0} must be an double. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsDouble(String arg0) { // {0} must be an double. return MessageFormat.format(BUNDLE.getString(ERRORS_DOUBLE), arg0); } /** , name=ErrorsFloat, variable=1 */ public static final String ERRORS_FLOAT = "errors.float"; /** * value={0} must be an float. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsFloat(String arg0) { // {0} must be an float. return MessageFormat.format(BUNDLE.getString(ERRORS_FLOAT), arg0); } /** , name=ErrorsInteger, variable=1 */ public static final String ERRORS_INTEGER = "errors.integer"; /** * value={0} must be an integer. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsInteger(String arg0) { // {0} must be an integer. return MessageFormat.format(BUNDLE.getString(ERRORS_INTEGER), arg0); } /** , name=ErrorsLong, variable=1 */ public static final String ERRORS_LONG = "errors.long"; /** * value={0} must be an long. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsLong(String arg0) { // {0} must be an long. return MessageFormat.format(BUNDLE.getString(ERRORS_LONG), arg0); } /** , name=ErrorsShort, variable=1 */ public static final String ERRORS_SHORT = "errors.short"; /** * value={0} must be an short. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsShort(String arg0) { // {0} must be an short. return MessageFormat.format(BUNDLE.getString(ERRORS_SHORT), arg0); } /** , name=ErrorsCreditcard, variable=1 */ public static final String ERRORS_CREDITCARD = "errors.creditcard"; /** * value={0} is not a valid credit card number. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsCreditcard(String arg0) { // {0} is not a valid credit card number. return MessageFormat.format(BUNDLE.getString(ERRORS_CREDITCARD), arg0); } /** , name=ErrorsEmail, variable=1 */ public static final String ERRORS_EMAIL = "errors.email"; /** * value={0} is an invalid e-mail address. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsEmail(String arg0) { // {0} is an invalid e-mail address. return MessageFormat.format(BUNDLE.getString(ERRORS_EMAIL), arg0); } /** , name=ErrorsUrl, variable=1 */ public static final String ERRORS_URL = "errors.url"; /** * value={0} is an invalid url (web address). * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsUrl(String arg0) { // {0} is an invalid url (web address). return MessageFormat.format(BUNDLE.getString(ERRORS_URL), arg0); } /** -------------, name=ErrorsNumber, variable=1 */ public static final String ERRORS_NUMBER = "errors.number"; /** * value=input number for {0} * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsNumber(String arg0) { // input number for {0} return MessageFormat.format(BUNDLE.getString(ERRORS_NUMBER), arg0); } /** , name=ErrorsSameValue, variable=1 */ public static final String ERRORS_SAME_VALUE = "errors.same.value"; /** * value=same value is selected in {0} * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsSameValue(String arg0) { // same value is selected in {0} return MessageFormat.format(BUNDLE.getString(ERRORS_SAME_VALUE), arg0); } /** , name=ErrorsGreaterThan, variable=2 */ public static final String ERRORS_GREATER_THAN = "errors.greater.than"; /** * value=input {0} greater than {1} * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsGreaterThan(String arg0, String arg1) { // input {0} greater than {1} return MessageFormat.format(BUNDLE.getString(ERRORS_GREATER_THAN), arg0, arg1); } /** , name=ErrorsRequiredAtLeastOne, variable=1 */ public static final String ERRORS_REQUIRED_AT_LEAST_ONE = "errors.required.at.least.one"; /** * value=input {0} at least one * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsRequiredAtLeastOne(String arg0) { // input {0} at least one return MessageFormat.format(BUNDLE.getString(ERRORS_REQUIRED_AT_LEAST_ONE), arg0); } /** , name=ErrorsRequiredOr, variable=2 */ public static final String ERRORS_REQUIRED_OR = "errors.required.or"; /** * value=input either {0} or {1} * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsRequiredOr(String arg0, String arg1) { // input either {0} or {1} return MessageFormat.format(BUNDLE.getString(ERRORS_REQUIRED_OR), arg0, arg1); } /** , name=ErrorsUploadSize, variable=2 */ public static final String ERRORS_UPLOAD_SIZE = "errors.upload.size"; /** * value=Uploading failed, because actual size {0} bytes exceeded limit size {1} bytes. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsUploadSize(String arg0, String arg1) { // Uploading failed, because actual size {0} bytes exceeded limit size {1} bytes. return MessageFormat.format(BUNDLE.getString(ERRORS_UPLOAD_SIZE), arg0, arg1); } /** ----------------, name=ErrorsEmptyLogin, variable=0 */ public static final String ERRORS_EMPTY_LOGIN = "errors.empty.login"; /** * value=input mail address or password * @return The formatted message. (NotNull) */ public static String getErrorsEmptyLogin() { // input mail address or password return MessageFormat.format(BUNDLE.getString(ERRORS_EMPTY_LOGIN), (Object[])null); } /** , name=ErrorsNotLogin, variable=0 */ public static final String ERRORS_NOT_LOGIN = "errors.not.login"; /** * value=invalid mail address or password * @return The formatted message. (NotNull) */ public static String getErrorsNotLogin() { // invalid mail address or password return MessageFormat.format(BUNDLE.getString(ERRORS_NOT_LOGIN), (Object[])null); } /** , name=ErrorsEmailExists, variable=0 */ public static final String ERRORS_EMAIL_EXISTS = "errors.email.exists"; /** * value=mail address already registered * @return The formatted message. (NotNull) */ public static String getErrorsEmailExists() { // mail address already registered return MessageFormat.format(BUNDLE.getString(ERRORS_EMAIL_EXISTS), (Object[])null); } /** , name=ErrorsAlreadyRegistered, variable=2 */ public static final String ERRORS_ALREADY_REGISTERED = "errors.already.registered"; /** * value={0} is already-regsitered {1} * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsAlreadyRegistered(String arg0, String arg1) { // {0} is already-regsitered {1} return MessageFormat.format(BUNDLE.getString(ERRORS_ALREADY_REGISTERED), arg0, arg1); } /** ---------------------, name=ErrorsAppIllegalTransition, variable=0 */ public static final String ERRORS_APP_ILLEGAL_TRANSITION = "errors.app.illegal.transition"; /** * value=retry because of illegal transition * @return The formatted message. (NotNull) */ public static String getErrorsAppIllegalTransition() { // retry because of illegal transition return MessageFormat.format(BUNDLE.getString(ERRORS_APP_ILLEGAL_TRANSITION), (Object[])null); } /** , name=ErrorsAppAlreadyDeleted, variable=0 */ public static final String ERRORS_APP_ALREADY_DELETED = "errors.app.already.deleted"; /** * value=others might be updated, so retry * @return The formatted message. (NotNull) */ public static String getErrorsAppAlreadyDeleted() { // others might be updated, so retry return MessageFormat.format(BUNDLE.getString(ERRORS_APP_ALREADY_DELETED), (Object[])null); } /** , name=ErrorsAppAlreadyUpdated, variable=0 */ public static final String ERRORS_APP_ALREADY_UPDATED = "errors.app.already.updated"; /** * value=others might be updated, so retry * @return The formatted message. (NotNull) */ public static String getErrorsAppAlreadyUpdated() { // others might be updated, so retry return MessageFormat.format(BUNDLE.getString(ERRORS_APP_ALREADY_UPDATED), (Object[])null); } /** , name=ErrorsAppAlreadyExists, variable=0 */ public static final String ERRORS_APP_ALREADY_EXISTS = "errors.app.already.exists"; /** * value=already existing data, so retry * @return The formatted message. (NotNull) */ public static String getErrorsAppAlreadyExists() { // already existing data, so retry return MessageFormat.format(BUNDLE.getString(ERRORS_APP_ALREADY_EXISTS), (Object[])null); } /** ------, name=LabelsMember, variable=0 */ public static final String LABELS_MEMBER = "labels.member"; /** * value=Member * @return The formatted message. (NotNull) */ public static String getLabelsMember() { // Member return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER), (Object[])null); } /** ---------, name=LabelsMemberId, variable=0 */ public static final String LABELS_MEMBER_ID = "labels.memberId"; /** * value=Member ID * @return The formatted message. (NotNull) */ public static String getLabelsMemberId() { // Member ID return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_ID), (Object[])null); } /** , name=LabelsMemberName, variable=0 */ public static final String LABELS_MEMBER_NAME = "labels.memberName"; /** * value=Member Name * @return The formatted message. (NotNull) */ public static String getLabelsMemberName() { // Member Name return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_NAME), (Object[])null); } /** , name=LabelsEmail, variable=0 */ public static final String LABELS_EMAIL = "labels.email"; /** * value=Mail Address * @return The formatted message. (NotNull) */ public static String getLabelsEmail() { // Mail Address return MessageFormat.format(BUNDLE.getString(LABELS_EMAIL), (Object[])null); } /** , name=LabelsEmailOrAccount, variable=0 */ public static final String LABELS_EMAIL_OR_ACCOUNT = "labels.emailOrAccount"; /** * value=Mail or Account * @return The formatted message. (NotNull) */ public static String getLabelsEmailOrAccount() { // Mail or Account return MessageFormat.format(BUNDLE.getString(LABELS_EMAIL_OR_ACCOUNT), (Object[])null); } /** , name=LabelsPassword, variable=0 */ public static final String LABELS_PASSWORD = "labels.password"; /** * value=Password * @return The formatted message. (NotNull) */ public static String getLabelsPassword() { // Password return MessageFormat.format(BUNDLE.getString(LABELS_PASSWORD), (Object[])null); } /** , name=LabelsVersionNo, variable=0 */ public static final String LABELS_VERSION_NO = "labels.versionNo"; /** * value=Version No * @return The formatted message. (NotNull) */ public static String getLabelsVersionNo() { // Version No return MessageFormat.format(BUNDLE.getString(LABELS_VERSION_NO), (Object[])null); } /** ------, name=LabelsList, variable=0 */ public static final String LABELS_LIST = "labels.list"; /** * value=List * @return The formatted message. (NotNull) */ public static String getLabelsList() { // List return MessageFormat.format(BUNDLE.getString(LABELS_LIST), (Object[])null); } /** , name=LabelsEdit, variable=0 */ public static final String LABELS_EDIT = "labels.edit"; /** * value=Edit * @return The formatted message. (NotNull) */ public static String getLabelsEdit() { // Edit return MessageFormat.format(BUNDLE.getString(LABELS_EDIT), (Object[])null); } /** , name=LabelsAdd, variable=0 */ public static final String LABELS_ADD = "labels.add"; /** * value=Add * @return The formatted message. (NotNull) */ public static String getLabelsAdd() { // Add return MessageFormat.format(BUNDLE.getString(LABELS_ADD), (Object[])null); } /** , name=LabelsSearch, variable=0 */ public static final String LABELS_SEARCH = "labels.search"; /** * value=Search * @return The formatted message. (NotNull) */ public static String getLabelsSearch() { // Search return MessageFormat.format(BUNDLE.getString(LABELS_SEARCH), (Object[])null); } /** , name=LabelsRegister, variable=0 */ public static final String LABELS_REGISTER = "labels.register"; /** * value=Register * @return The formatted message. (NotNull) */ public static String getLabelsRegister() { // Register return MessageFormat.format(BUNDLE.getString(LABELS_REGISTER), (Object[])null); } /** , name=LabelsUpdate, variable=0 */ public static final String LABELS_UPDATE = "labels.update"; /** * value=Update * @return The formatted message. (NotNull) */ public static String getLabelsUpdate() { // Update return MessageFormat.format(BUNDLE.getString(LABELS_UPDATE), (Object[])null); } /** , name=LabelsMemberList, variable=0 */ public static final String LABELS_MEMBER_LIST = "labels.member.list"; /** * value=@[labels.list] of @[labels.member] * @return The formatted message. (NotNull) */ public static String getLabelsMemberList() { // @[labels.list] of @[labels.member] return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_LIST), (Object[])null); } /** , name=LabelsMemberAdd, variable=0 */ public static final String LABELS_MEMBER_ADD = "labels.member.add"; /** * value=@[labels.add] @[labels.member] * @return The formatted message. (NotNull) */ public static String getLabelsMemberAdd() { // @[labels.add] @[labels.member] return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_ADD), (Object[])null); } /** , name=LabelsMemberEdit, variable=0 */ public static final String LABELS_MEMBER_EDIT = "labels.member.edit"; /** * value=@[labels.edit] @[labels.member] * @return The formatted message. (NotNull) */ public static String getLabelsMemberEdit() { // @[labels.edit] @[labels.member] return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_EDIT), (Object[])null); } /** ------------, name=LabelsHeaderTitleErrorMessage, variable=0 */ public static final String LABELS_HEADER_TITLE_ERROR_MESSAGE = "labels.header.title.error.message"; /** * value=Notice * @return The formatted message. (NotNull) */ public static String getLabelsHeaderTitleErrorMessage() { // Notice return MessageFormat.format(BUNDLE.getString(LABELS_HEADER_TITLE_ERROR_MESSAGE), (Object[])null); } /** ----------, name=MessagesInputNoteKeyword, variable=0 */ public static final String MESSAGES_INPUT_NOTE_KEYWORD = "messages.input.note.keyword"; /** * value=Input keyword to search * @return The formatted message. (NotNull) */ public static String getMessagesInputNoteKeyword() { // Input keyword to search return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_KEYWORD), (Object[])null); } /** , name=MessagesInputNoteEmail, variable=0 */ public static final String MESSAGES_INPUT_NOTE_EMAIL = "messages.input.note.email"; /** * value=Input your E-mail address * @return The formatted message. (NotNull) */ public static String getMessagesInputNoteEmail() { // Input your E-mail address return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_EMAIL), (Object[])null); } /** , name=MessagesInputNoteEmailOrAccount, variable=0 */ public static final String MESSAGES_INPUT_NOTE_EMAIL_OR_ACCOUNT = "messages.input.note.emailOrAccount"; /** * value=Input your E-mail or account * @return The formatted message. (NotNull) */ public static String getMessagesInputNoteEmailOrAccount() { // Input your E-mail or account return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_EMAIL_OR_ACCOUNT), (Object[])null); } /** , name=MessagesInputNotePassword, variable=0 */ public static final String MESSAGES_INPUT_NOTE_PASSWORD = "messages.input.note.password"; /** * value=Input your password * @return The formatted message. (NotNull) */ public static String getMessagesInputNotePassword() { // Input your password return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_PASSWORD), (Object[])null); } /** , name=MessagesInputNoteZipCode, variable=0 */ public static final String MESSAGES_INPUT_NOTE_ZIP_CODE = "messages.input.note.zipCode"; /** * value=e.g. 153-0051 * @return The formatted message. (NotNull) */ public static String getMessagesInputNoteZipCode() { // e.g. 153-0051 return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_ZIP_CODE), (Object[])null); } }
src/main/java/org/docksidestage/hangar/freeflute/dbcollabo/MessagesAndDatabase.java
package org.docksidestage.hangar.freeflute.dbcollabo; import java.io.Serializable; import java.text.MessageFormat; import java.util.ResourceBundle; /** * @author FreeGen */ public class MessagesAndDatabase implements Serializable { private static final long serialVersionUID = 1L; private static final ResourceBundle BUNDLE = ResourceBundle.getBundle("application"); // MEMBER // - MEMBER_ID // - MEMBER_NAME // - MEMBER_ACCOUNT // - MEMBER_STATUS_CODE // - FORMALIZED_DATETIME // - BIRTHDATE // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // MEMBER_ADDRESS // - MEMBER_ADDRESS_ID // - MEMBER_ID // - VALID_BEGIN_DATE // - VALID_END_DATE // - ADDRESS // - REGION_ID // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // MEMBER_FOLLOWING // - MEMBER_FOLLOWING_ID // - MY_MEMBER_ID // - YOUR_MEMBER_ID // - FOLLOW_DATETIME // MEMBER_LOGIN // - MEMBER_LOGIN_ID // - MEMBER_ID // - LOGIN_DATETIME // - MOBILE_LOGIN_FLG // - LOGIN_MEMBER_STATUS_CODE // MEMBER_SECURITY // - MEMBER_ID // - LOGIN_PASSWORD // - REMINDER_QUESTION // - REMINDER_ANSWER // - REMINDER_USE_COUNT // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // MEMBER_SERVICE // - MEMBER_SERVICE_ID // - MEMBER_ID // - SERVICE_POINT_COUNT // - SERVICE_RANK_CODE // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // MEMBER_STATUS // - MEMBER_STATUS_CODE // - MEMBER_STATUS_NAME // - DESCRIPTION // - DISPLAY_ORDER // MEMBER_WITHDRAWAL // - MEMBER_ID // - WITHDRAWAL_REASON_CODE // - WITHDRAWAL_REASON_INPUT_TEXT // - WITHDRAWAL_DATETIME // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // PRODUCT // - PRODUCT_ID // - PRODUCT_NAME // - PRODUCT_HANDLE_CODE // - PRODUCT_CATEGORY_CODE // - PRODUCT_STATUS_CODE // - REGULAR_PRICE // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // PRODUCT_CATEGORY // - PRODUCT_CATEGORY_CODE // - PRODUCT_CATEGORY_NAME // - PARENT_CATEGORY_CODE // PRODUCT_STATUS // - PRODUCT_STATUS_CODE // - PRODUCT_STATUS_NAME // - DISPLAY_ORDER // PURCHASE // - PURCHASE_ID // - MEMBER_ID // - PRODUCT_ID // - PURCHASE_DATETIME // - PURCHASE_COUNT // - PURCHASE_PRICE // - PAYMENT_COMPLETE_FLG // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // - VERSION_NO // PURCHASE_PAYMENT // - PURCHASE_PAYMENT_ID // - PURCHASE_ID // - PAYMENT_AMOUNT // - PAYMENT_DATETIME // - PAYMENT_METHOD_CODE // - REGISTER_DATETIME // - REGISTER_USER // - UPDATE_DATETIME // - UPDATE_USER // REGION // - REGION_ID // - REGION_NAME // SERVICE_RANK // - SERVICE_RANK_CODE // - SERVICE_RANK_NAME // - SERVICE_POINT_INCIDENCE // - NEW_ACCEPTABLE_FLG // - DESCRIPTION // - DISPLAY_ORDER // SUMMARY_PRODUCT // - PRODUCT_ID // - PRODUCT_NAME // - PRODUCT_HANDLE_CODE // - PRODUCT_STATUS_CODE // - LATEST_PURCHASE_DATETIME // SUMMARY_WITHDRAWAL // - MEMBER_ID // - MEMBER_NAME // - WITHDRAWAL_REASON_CODE // - WITHDRAWAL_REASON_TEXT // - WITHDRAWAL_REASON_INPUT_TEXT // - WITHDRAWAL_DATETIME // - MEMBER_STATUS_CODE // - MEMBER_STATUS_NAME // - MAX_PURCHASE_PRICE // VENDOR_$_DOLLAR // - VENDOR_$_DOLLAR_ID // - VENDOR_$_DOLLAR_NAME // VENDOR_CHECK // - VENDOR_CHECK_ID // - TYPE_OF_CHAR // - TYPE_OF_VARCHAR // - TYPE_OF_CLOB // - TYPE_OF_TEXT // - TYPE_OF_NUMERIC_INTEGER // - TYPE_OF_NUMERIC_BIGINT // - TYPE_OF_NUMERIC_DECIMAL // - TYPE_OF_NUMERIC_INTEGER_MIN // - TYPE_OF_NUMERIC_INTEGER_MAX // - TYPE_OF_NUMERIC_BIGINT_MIN // - TYPE_OF_NUMERIC_BIGINT_MAX // - TYPE_OF_NUMERIC_SUPERINT_MIN // - TYPE_OF_NUMERIC_SUPERINT_MAX // - TYPE_OF_NUMERIC_MAXDECIMAL // - TYPE_OF_INTEGER // - TYPE_OF_BIGINT // - TYPE_OF_DATE // - TYPE_OF_TIMESTAMP // - TYPE_OF_TIME // - TYPE_OF_BOOLEAN // - TYPE_OF_BINARY // - TYPE_OF_BLOB // - TYPE_OF_UUID // - TYPE_OF_ARRAY // - TYPE_OF_OTHER // - J_A_V_A_BEANS_PROPERTY // - J_POP_BEANS_PROPERTY // VENDOR_IDENTITY_ONLY // - IDENTITY_ONLY_ID // VENDOR_PRIMARY_KEY_ONLY // - PRIMARY_KEY_ONLY_ID // VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_NAME // - SHORT_NAME // - SHORT_SIZE // VENDOR_THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_ID // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE // - SHORT_DATE // WHITE_CLASSIFICATION_DEPLOYMENT // - DEPLOYMENT_ID // - SEA_FLG // - DEPLOYMENT_TYPE_CODE // WHITE_COMPOUND_PK // - PK_FIRST_ID // - PK_SECOND_ID // - COMPOUND_PK_NAME // - REFERRED_ID // WHITE_COMPOUND_PK_REF_PHYSICAL // - REF_FIRST_ID // - REF_SECOND_ID // - REF_THIRD_ID // - COMPOUND_REF_NAME // WHITE_COMPOUND_PK_REF_VIRTURL // - REF_FIRST_ID // - REF_SECOND_ID // - REF_THIRD_ID // - COMPOUND_REF_NAME // WHITE_DATE_TERM // - DATE_TERM_ID // - DATE_TERM_VALUE // - BEGIN_DATE // - END_DATE // WHITE_DB_COMMENT // - DB_COMMENT_ID // - DB_COMMENT_NAME // WHITE_DEPRECATED_SELECT_BY_PKUQ // - SELECT_BY_PKUQ_ID // - SELECT_BY_PKUQ_NAME // - SELECT_BY_PKUQ_CODE // WHITE_DEPRECATED_SPECIFY_BATCH_COLUMN // - SPECIFY_BATCH_COLUMN_ID // - SPECIFY_BATCH_COLUMN_NAME // WHITE_FIRST_DATE // - FIRST_DATE_ID // - FIRST_DATE_NAME // - ADDED_AT_TABLE_SAME_DATE // WHITE_ON_PARADE // - ON_PARADE_ID // - ON_PARADE_NAME // WHITE_ON_PARADE_NULLABLE_TO_MANY // - MANY_ID // - MANY_NAME // WHITE_ON_PARADE_REF // - REF_ID // - REF_NAME // - NULLABLE_FK_ON_PARADE_ID // - NULLABLE_FK_TO_MANY_ID // WHITE_READ_ONLY // - READ_ONLY_ID // - READ_ONLY_NAME // WHITE_SCHEMA_DIFF // - SCHEMA_DIFF_ID // - SCHEMA_DIFF_NAME // - SCHEMA_DIFF_DATE // WHITE_SELF_REFERENCE // - SELF_REFERENCE_ID // - SELF_REFERENCE_NAME // - PARENT_ID // WHITE_SIMPLE_DTO_EXCEPT // - SIMPLE_DTO_EXCEPT_ID // - SIMPLE_DTO_EXCEPT_NAME // WHITE_SINGLE_PK // - ONLY_ONE_PK_ID // - SINGLE_PK_NAME // - REFERRED_ID // WHITE_TSV_LOADING // - TSV_LOADING_ID // - TSV_LOADING_NAME // - LOADING_COUNT // - LOADING_DATE // - BEGIN_DATETIME // - END_DATETIME // - LARGE_FROM_FILE // - DONE_FLG // WITHDRAWAL_REASON // - WITHDRAWAL_REASON_CODE // - WITHDRAWAL_REASON_TEXT // - DISPLAY_ORDER /** , name=ErrorsHeader, variable=0 */ public static final String ERRORS_HEADER = "errors.header"; /** * value=&lt;font color="red"&gt;&lt;ul&gt; * @return The formatted message. (NotNull) */ public static String getErrorsHeader() { // <font color="red"><ul> return MessageFormat.format(BUNDLE.getString(ERRORS_HEADER), (Object[])null); } /** , name=ErrorsFooter, variable=0 */ public static final String ERRORS_FOOTER = "errors.footer"; /** * value=&lt;/ul&gt;&lt;/font&gt; * @return The formatted message. (NotNull) */ public static String getErrorsFooter() { // </ul></font> return MessageFormat.format(BUNDLE.getString(ERRORS_FOOTER), (Object[])null); } /** , name=ErrorsPrefix, variable=0 */ public static final String ERRORS_PREFIX = "errors.prefix"; /** * value=&lt;li&gt; * @return The formatted message. (NotNull) */ public static String getErrorsPrefix() { // <li> return MessageFormat.format(BUNDLE.getString(ERRORS_PREFIX), (Object[])null); } /** , name=ErrorsSuffix, variable=0 */ public static final String ERRORS_SUFFIX = "errors.suffix"; /** * value=&lt;/li&gt; * @return The formatted message. (NotNull) */ public static String getErrorsSuffix() { // </li> return MessageFormat.format(BUNDLE.getString(ERRORS_SUFFIX), (Object[])null); } /** , name=ErrorsInvalid, variable=1 */ public static final String ERRORS_INVALID = "errors.invalid"; /** * value={0} is invalid. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsInvalid(String arg0) { // {0} is invalid. return MessageFormat.format(BUNDLE.getString(ERRORS_INVALID), arg0); } /** , name=ErrorsMaxlength, variable=2 */ public static final String ERRORS_MAXLENGTH = "errors.maxlength"; /** * value={0} can not be greater than {1} characters. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsMaxlength(String arg0, String arg1) { // {0} can not be greater than {1} characters. return MessageFormat.format(BUNDLE.getString(ERRORS_MAXLENGTH), arg0, arg1); } /** , name=ErrorsMinlength, variable=2 */ public static final String ERRORS_MINLENGTH = "errors.minlength"; /** * value={0} can not be less than {1} characters. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsMinlength(String arg0, String arg1) { // {0} can not be less than {1} characters. return MessageFormat.format(BUNDLE.getString(ERRORS_MINLENGTH), arg0, arg1); } /** , name=ErrorsMaxbytelength, variable=2 */ public static final String ERRORS_MAXBYTELENGTH = "errors.maxbytelength"; /** * value={0} can not be greater than {1} bytes. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsMaxbytelength(String arg0, String arg1) { // {0} can not be greater than {1} bytes. return MessageFormat.format(BUNDLE.getString(ERRORS_MAXBYTELENGTH), arg0, arg1); } /** , name=ErrorsMinbytelength, variable=2 */ public static final String ERRORS_MINBYTELENGTH = "errors.minbytelength"; /** * value={0} can not be less than {1} bytes. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsMinbytelength(String arg0, String arg1) { // {0} can not be less than {1} bytes. return MessageFormat.format(BUNDLE.getString(ERRORS_MINBYTELENGTH), arg0, arg1); } /** , name=ErrorsRange, variable=3 */ public static final String ERRORS_RANGE = "errors.range"; /** * value={0} is not in the range {1} through {2}. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @param arg2 The parameter 2 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsRange(String arg0, String arg1, String arg2) { // {0} is not in the range {1} through {2}. return MessageFormat.format(BUNDLE.getString(ERRORS_RANGE), arg0, arg1, arg2); } /** , name=ErrorsRequired, variable=1 */ public static final String ERRORS_REQUIRED = "errors.required"; /** * value={0} is required. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsRequired(String arg0) { // {0} is required. return MessageFormat.format(BUNDLE.getString(ERRORS_REQUIRED), arg0); } /** , name=ErrorsByte, variable=1 */ public static final String ERRORS_BYTE = "errors.byte"; /** * value={0} must be an byte. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsByte(String arg0) { // {0} must be an byte. return MessageFormat.format(BUNDLE.getString(ERRORS_BYTE), arg0); } /** , name=ErrorsDate, variable=1 */ public static final String ERRORS_DATE = "errors.date"; /** * value={0} is not a date. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsDate(String arg0) { // {0} is not a date. return MessageFormat.format(BUNDLE.getString(ERRORS_DATE), arg0); } /** , name=ErrorsDouble, variable=1 */ public static final String ERRORS_DOUBLE = "errors.double"; /** * value={0} must be an double. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsDouble(String arg0) { // {0} must be an double. return MessageFormat.format(BUNDLE.getString(ERRORS_DOUBLE), arg0); } /** , name=ErrorsFloat, variable=1 */ public static final String ERRORS_FLOAT = "errors.float"; /** * value={0} must be an float. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsFloat(String arg0) { // {0} must be an float. return MessageFormat.format(BUNDLE.getString(ERRORS_FLOAT), arg0); } /** , name=ErrorsInteger, variable=1 */ public static final String ERRORS_INTEGER = "errors.integer"; /** * value={0} must be an integer. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsInteger(String arg0) { // {0} must be an integer. return MessageFormat.format(BUNDLE.getString(ERRORS_INTEGER), arg0); } /** , name=ErrorsLong, variable=1 */ public static final String ERRORS_LONG = "errors.long"; /** * value={0} must be an long. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsLong(String arg0) { // {0} must be an long. return MessageFormat.format(BUNDLE.getString(ERRORS_LONG), arg0); } /** , name=ErrorsShort, variable=1 */ public static final String ERRORS_SHORT = "errors.short"; /** * value={0} must be an short. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsShort(String arg0) { // {0} must be an short. return MessageFormat.format(BUNDLE.getString(ERRORS_SHORT), arg0); } /** , name=ErrorsCreditcard, variable=1 */ public static final String ERRORS_CREDITCARD = "errors.creditcard"; /** * value={0} is not a valid credit card number. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsCreditcard(String arg0) { // {0} is not a valid credit card number. return MessageFormat.format(BUNDLE.getString(ERRORS_CREDITCARD), arg0); } /** , name=ErrorsEmail, variable=1 */ public static final String ERRORS_EMAIL = "errors.email"; /** * value={0} is an invalid e-mail address. * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsEmail(String arg0) { // {0} is an invalid e-mail address. return MessageFormat.format(BUNDLE.getString(ERRORS_EMAIL), arg0); } /** , name=ErrorsUrl, variable=1 */ public static final String ERRORS_URL = "errors.url"; /** * value={0} is an invalid url (web address). * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsUrl(String arg0) { // {0} is an invalid url (web address). return MessageFormat.format(BUNDLE.getString(ERRORS_URL), arg0); } /** -------------, name=ErrorsNumber, variable=1 */ public static final String ERRORS_NUMBER = "errors.number"; /** * value=input number for {0} * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsNumber(String arg0) { // input number for {0} return MessageFormat.format(BUNDLE.getString(ERRORS_NUMBER), arg0); } /** , name=ErrorsSameValue, variable=1 */ public static final String ERRORS_SAME_VALUE = "errors.same.value"; /** * value=same value is selected in {0} * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsSameValue(String arg0) { // same value is selected in {0} return MessageFormat.format(BUNDLE.getString(ERRORS_SAME_VALUE), arg0); } /** , name=ErrorsGreaterThan, variable=2 */ public static final String ERRORS_GREATER_THAN = "errors.greater.than"; /** * value=input {0} greater than {1} * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsGreaterThan(String arg0, String arg1) { // input {0} greater than {1} return MessageFormat.format(BUNDLE.getString(ERRORS_GREATER_THAN), arg0, arg1); } /** , name=ErrorsRequiredAtLeastOne, variable=1 */ public static final String ERRORS_REQUIRED_AT_LEAST_ONE = "errors.required.at.least.one"; /** * value=input {0} at least one * @param arg0 The parameter 0 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsRequiredAtLeastOne(String arg0) { // input {0} at least one return MessageFormat.format(BUNDLE.getString(ERRORS_REQUIRED_AT_LEAST_ONE), arg0); } /** , name=ErrorsRequiredOr, variable=2 */ public static final String ERRORS_REQUIRED_OR = "errors.required.or"; /** * value=input either {0} or {1} * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsRequiredOr(String arg0, String arg1) { // input either {0} or {1} return MessageFormat.format(BUNDLE.getString(ERRORS_REQUIRED_OR), arg0, arg1); } /** , name=ErrorsUploadSize, variable=2 */ public static final String ERRORS_UPLOAD_SIZE = "errors.upload.size"; /** * value=Uploading failed, because actual size {0} bytes exceeded limit size {1} bytes. * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsUploadSize(String arg0, String arg1) { // Uploading failed, because actual size {0} bytes exceeded limit size {1} bytes. return MessageFormat.format(BUNDLE.getString(ERRORS_UPLOAD_SIZE), arg0, arg1); } /** ----------------, name=ErrorsEmptyLogin, variable=0 */ public static final String ERRORS_EMPTY_LOGIN = "errors.empty.login"; /** * value=input mail address or password * @return The formatted message. (NotNull) */ public static String getErrorsEmptyLogin() { // input mail address or password return MessageFormat.format(BUNDLE.getString(ERRORS_EMPTY_LOGIN), (Object[])null); } /** , name=ErrorsNotLogin, variable=0 */ public static final String ERRORS_NOT_LOGIN = "errors.not.login"; /** * value=invalid mail address or password * @return The formatted message. (NotNull) */ public static String getErrorsNotLogin() { // invalid mail address or password return MessageFormat.format(BUNDLE.getString(ERRORS_NOT_LOGIN), (Object[])null); } /** , name=ErrorsEmailExists, variable=0 */ public static final String ERRORS_EMAIL_EXISTS = "errors.email.exists"; /** * value=mail address already registered * @return The formatted message. (NotNull) */ public static String getErrorsEmailExists() { // mail address already registered return MessageFormat.format(BUNDLE.getString(ERRORS_EMAIL_EXISTS), (Object[])null); } /** , name=ErrorsAlreadyRegistered, variable=2 */ public static final String ERRORS_ALREADY_REGISTERED = "errors.already.registered"; /** * value={0} is already-regsitered {1} * @param arg0 The parameter 0 for message. (NotNull) * @param arg1 The parameter 1 for message. (NotNull) * @return The formatted message. (NotNull) */ public static String getErrorsAlreadyRegistered(String arg0, String arg1) { // {0} is already-regsitered {1} return MessageFormat.format(BUNDLE.getString(ERRORS_ALREADY_REGISTERED), arg0, arg1); } /** ---------------------, name=ErrorsAppIllegalTransition, variable=0 */ public static final String ERRORS_APP_ILLEGAL_TRANSITION = "errors.app.illegal.transition"; /** * value=retry because of illegal transition * @return The formatted message. (NotNull) */ public static String getErrorsAppIllegalTransition() { // retry because of illegal transition return MessageFormat.format(BUNDLE.getString(ERRORS_APP_ILLEGAL_TRANSITION), (Object[])null); } /** , name=ErrorsAppAlreadyDeleted, variable=0 */ public static final String ERRORS_APP_ALREADY_DELETED = "errors.app.already.deleted"; /** * value=others might be updated, so retry * @return The formatted message. (NotNull) */ public static String getErrorsAppAlreadyDeleted() { // others might be updated, so retry return MessageFormat.format(BUNDLE.getString(ERRORS_APP_ALREADY_DELETED), (Object[])null); } /** , name=ErrorsAppAlreadyUpdated, variable=0 */ public static final String ERRORS_APP_ALREADY_UPDATED = "errors.app.already.updated"; /** * value=others might be updated, so retry * @return The formatted message. (NotNull) */ public static String getErrorsAppAlreadyUpdated() { // others might be updated, so retry return MessageFormat.format(BUNDLE.getString(ERRORS_APP_ALREADY_UPDATED), (Object[])null); } /** , name=ErrorsAppAlreadyExists, variable=0 */ public static final String ERRORS_APP_ALREADY_EXISTS = "errors.app.already.exists"; /** * value=already existing data, so retry * @return The formatted message. (NotNull) */ public static String getErrorsAppAlreadyExists() { // already existing data, so retry return MessageFormat.format(BUNDLE.getString(ERRORS_APP_ALREADY_EXISTS), (Object[])null); } /** ------, name=LabelsMember, variable=0 */ public static final String LABELS_MEMBER = "labels.member"; /** * value=Member * @return The formatted message. (NotNull) */ public static String getLabelsMember() { // Member return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER), (Object[])null); } /** ---------, name=LabelsMemberId, variable=0 */ public static final String LABELS_MEMBER_ID = "labels.memberId"; /** * value=Member ID * @return The formatted message. (NotNull) */ public static String getLabelsMemberId() { // Member ID return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_ID), (Object[])null); } /** , name=LabelsMemberName, variable=0 */ public static final String LABELS_MEMBER_NAME = "labels.memberName"; /** * value=Member Name * @return The formatted message. (NotNull) */ public static String getLabelsMemberName() { // Member Name return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_NAME), (Object[])null); } /** , name=LabelsEmail, variable=0 */ public static final String LABELS_EMAIL = "labels.email"; /** * value=Mail Address * @return The formatted message. (NotNull) */ public static String getLabelsEmail() { // Mail Address return MessageFormat.format(BUNDLE.getString(LABELS_EMAIL), (Object[])null); } /** , name=LabelsEmailOrAccount, variable=0 */ public static final String LABELS_EMAIL_OR_ACCOUNT = "labels.emailOrAccount"; /** * value=Mail or Account * @return The formatted message. (NotNull) */ public static String getLabelsEmailOrAccount() { // Mail or Account return MessageFormat.format(BUNDLE.getString(LABELS_EMAIL_OR_ACCOUNT), (Object[])null); } /** , name=LabelsPassword, variable=0 */ public static final String LABELS_PASSWORD = "labels.password"; /** * value=Password * @return The formatted message. (NotNull) */ public static String getLabelsPassword() { // Password return MessageFormat.format(BUNDLE.getString(LABELS_PASSWORD), (Object[])null); } /** , name=LabelsVersionNo, variable=0 */ public static final String LABELS_VERSION_NO = "labels.versionNo"; /** * value=Version No * @return The formatted message. (NotNull) */ public static String getLabelsVersionNo() { // Version No return MessageFormat.format(BUNDLE.getString(LABELS_VERSION_NO), (Object[])null); } /** ------, name=LabelsList, variable=0 */ public static final String LABELS_LIST = "labels.list"; /** * value=List * @return The formatted message. (NotNull) */ public static String getLabelsList() { // List return MessageFormat.format(BUNDLE.getString(LABELS_LIST), (Object[])null); } /** , name=LabelsEdit, variable=0 */ public static final String LABELS_EDIT = "labels.edit"; /** * value=Edit * @return The formatted message. (NotNull) */ public static String getLabelsEdit() { // Edit return MessageFormat.format(BUNDLE.getString(LABELS_EDIT), (Object[])null); } /** , name=LabelsAdd, variable=0 */ public static final String LABELS_ADD = "labels.add"; /** * value=Add * @return The formatted message. (NotNull) */ public static String getLabelsAdd() { // Add return MessageFormat.format(BUNDLE.getString(LABELS_ADD), (Object[])null); } /** , name=LabelsSearch, variable=0 */ public static final String LABELS_SEARCH = "labels.search"; /** * value=Search * @return The formatted message. (NotNull) */ public static String getLabelsSearch() { // Search return MessageFormat.format(BUNDLE.getString(LABELS_SEARCH), (Object[])null); } /** , name=LabelsRegister, variable=0 */ public static final String LABELS_REGISTER = "labels.register"; /** * value=Register * @return The formatted message. (NotNull) */ public static String getLabelsRegister() { // Register return MessageFormat.format(BUNDLE.getString(LABELS_REGISTER), (Object[])null); } /** , name=LabelsUpdate, variable=0 */ public static final String LABELS_UPDATE = "labels.update"; /** * value=Update * @return The formatted message. (NotNull) */ public static String getLabelsUpdate() { // Update return MessageFormat.format(BUNDLE.getString(LABELS_UPDATE), (Object[])null); } /** , name=LabelsMemberList, variable=0 */ public static final String LABELS_MEMBER_LIST = "labels.member.list"; /** * value=@[labels.list] of @[labels.member] * @return The formatted message. (NotNull) */ public static String getLabelsMemberList() { // @[labels.list] of @[labels.member] return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_LIST), (Object[])null); } /** , name=LabelsMemberAdd, variable=0 */ public static final String LABELS_MEMBER_ADD = "labels.member.add"; /** * value=@[labels.add] @[labels.member] * @return The formatted message. (NotNull) */ public static String getLabelsMemberAdd() { // @[labels.add] @[labels.member] return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_ADD), (Object[])null); } /** , name=LabelsMemberEdit, variable=0 */ public static final String LABELS_MEMBER_EDIT = "labels.member.edit"; /** * value=@[labels.edit] @[labels.member] * @return The formatted message. (NotNull) */ public static String getLabelsMemberEdit() { // @[labels.edit] @[labels.member] return MessageFormat.format(BUNDLE.getString(LABELS_MEMBER_EDIT), (Object[])null); } /** ------------, name=LabelsHeaderTitleErrorMessage, variable=0 */ public static final String LABELS_HEADER_TITLE_ERROR_MESSAGE = "labels.header.title.error.message"; /** * value=Notice * @return The formatted message. (NotNull) */ public static String getLabelsHeaderTitleErrorMessage() { // Notice return MessageFormat.format(BUNDLE.getString(LABELS_HEADER_TITLE_ERROR_MESSAGE), (Object[])null); } /** ----------, name=MessagesInputNoteKeyword, variable=0 */ public static final String MESSAGES_INPUT_NOTE_KEYWORD = "messages.input.note.keyword"; /** * value=Input keyword to search * @return The formatted message. (NotNull) */ public static String getMessagesInputNoteKeyword() { // Input keyword to search return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_KEYWORD), (Object[])null); } /** , name=MessagesInputNoteEmail, variable=0 */ public static final String MESSAGES_INPUT_NOTE_EMAIL = "messages.input.note.email"; /** * value=Input your E-mail address * @return The formatted message. (NotNull) */ public static String getMessagesInputNoteEmail() { // Input your E-mail address return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_EMAIL), (Object[])null); } /** , name=MessagesInputNoteEmailOrAccount, variable=0 */ public static final String MESSAGES_INPUT_NOTE_EMAIL_OR_ACCOUNT = "messages.input.note.emailOrAccount"; /** * value=Input your E-mail or account * @return The formatted message. (NotNull) */ public static String getMessagesInputNoteEmailOrAccount() { // Input your E-mail or account return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_EMAIL_OR_ACCOUNT), (Object[])null); } /** , name=MessagesInputNotePassword, variable=0 */ public static final String MESSAGES_INPUT_NOTE_PASSWORD = "messages.input.note.password"; /** * value=Input your password * @return The formatted message. (NotNull) */ public static String getMessagesInputNotePassword() { // Input your password return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_PASSWORD), (Object[])null); } /** , name=MessagesInputNoteZipCode, variable=0 */ public static final String MESSAGES_INPUT_NOTE_ZIP_CODE = "messages.input.note.zipCode"; /** * value=e.g. 153-0051 * @return The formatted message. (NotNull) */ public static String getMessagesInputNoteZipCode() { // e.g. 153-0051 return MessageFormat.format(BUNDLE.getString(MESSAGES_INPUT_NOTE_ZIP_CODE), (Object[])null); } }
by adding WHITE_BASE tables
src/main/java/org/docksidestage/hangar/freeflute/dbcollabo/MessagesAndDatabase.java
by adding WHITE_BASE tables
<ide><path>rc/main/java/org/docksidestage/hangar/freeflute/dbcollabo/MessagesAndDatabase.java <ide> // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_ID <ide> // - THE_LONG_AND_WINDING_TABLE_AND_COLUMN_REF_DATE <ide> // - SHORT_DATE <add> // WHITE_BASE <add> // - BASE_ID <add> // - BASE_NAME <add> // - SEA_ID <add> // - LAND_ID <add> // - PIARI_ID <add> // - BONVO_ID <add> // - DSTORE_ID <add> // - AMBA_ID <add> // - MIRACO_ID <add> // - DOHOTEL_ID <add> // WHITE_BASE_ONE01_SEA <add> // - SEA_ID <add> // - SEA_NAME <add> // - BROADWAY_ID <add> // - DOCKSIDE_ID <add> // WHITE_BASE_ONE01_SEA_BROADWAY <add> // - BROADWAY_ID <add> // - BROADWAY_NAME <add> // WHITE_BASE_ONE01_SEA_DOCKSIDE <add> // - DOCKSIDE_ID <add> // - DOCKSIDE_NAME <add> // WHITE_BASE_ONE01_SEA_HANGAR <add> // - HANGAR_ID <add> // - HANGAR_NAME <add> // - SEA_ID <add> // WHITE_BASE_ONE01_SEA_MAGICLAMP <add> // - MAGICLAMP_ID <add> // - MAGICLAMP_NAME <add> // - SEA_ID <add> // WHITE_BASE_ONE02_LAND <add> // - LAND_ID <add> // - LAND_NAME <add> // WHITE_BASE_ONE03_PIARI <add> // - PIARI_ID <add> // - PIARI_NAME <add> // WHITE_BASE_ONE04_BONVO <add> // - BONVO_ID <add> // - BONVO_NAME <add> // - PARKSIDE_ID <add> // - STATIONSIDE_ID <add> // WHITE_BASE_ONE04_BONVO_PARKSIDE <add> // - PARKSIDE_ID <add> // - PARKSIDE_NAME <add> // WHITE_BASE_ONE04_BONVO_STATIONSIDE <add> // - STATIONSIDE_ID <add> // - STATIONSIDE_NAME <add> // WHITE_BASE_ONE05_DSTORE <add> // - DSTORE_ID <add> // - DSTORE_NAME <add> // WHITE_BASE_ONE06_AMBA <add> // - AMBA_ID <add> // - AMBA_NAME <add> // WHITE_BASE_ONE07_MIRACO <add> // - MIRACO_ID <add> // - MIRACO_NAME <add> // WHITE_BASE_ONE08_DOHOTEL <add> // - DOHOTEL_ID <add> // - DOHOTEL_NAME <add> // WHITE_BASE_ONE09_PALM <add> // - PALM_ID <add> // - PALM_NAME <add> // - BASE_ID <add> // WHITE_BASE_ONE10_CELEB <add> // - CELEB_ID <add> // - CELEB_NAME <add> // - BASE_ID <add> // WHITE_BASE_ONE11_CIRQUE <add> // - CIRQUE_ID <add> // - CIRQUE_NAME <add> // WHITE_BASE_ONE12_AMPHI <add> // - AMPHI_ID <add> // - AMPHI_NAME <add> // - BASE_ID <ide> // WHITE_CLASSIFICATION_DEPLOYMENT <ide> // - DEPLOYMENT_ID <ide> // - SEA_FLG
Java
mit
59c9b2eaeb225d5bc51295651dbe425583d5baa2
0
DrFuehrer/rdv,tectronics/rdv
/* * RDV * Real-time Data Viewer * http://it.nees.org/software/rdv/ * * Copyright (c) 2007 Palta Software * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * $URL$ * $Revision$ * $Date$ * $Author$ */ package org.rdv.viz.dial; import java.awt.BorderLayout; import java.awt.Color; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javax.swing.BorderFactory; import javax.swing.JPanel; import javax.swing.JTextField; import org.jfree.chart.ChartPanel; import org.jfree.chart.JFreeChart; import org.jfree.chart.plot.dial.DialCap; import org.jfree.chart.plot.dial.DialPlot; import org.jfree.chart.plot.dial.DialPointer; import org.jfree.chart.plot.dial.DialTextAnnotation; import org.jfree.chart.plot.dial.DialValueIndicator; import org.jfree.chart.plot.dial.StandardDialFrame; import org.jfree.chart.plot.dial.StandardDialRange; import org.jfree.chart.plot.dial.StandardDialScale; import org.jfree.data.Range; import org.jfree.data.general.DatasetChangeEvent; import org.jfree.data.general.DatasetChangeListener; import org.jfree.data.general.DefaultValueDataset; import org.rdv.util.EngineeringFormat; /** * A JPanel for displaying a dial visualization. * * @author Jason P. Hanley * @see DialModel */ public class DialPanel extends JPanel { /** serialization version identifier */ private static final long serialVersionUID = 4279289359942253323L; /** the dial model */ private final DialModel model; /** the dataset for the dial */ private DefaultValueDataset dataset; /** the plot for the dial */ private DialPlot plot; /** the indicator for the dial value */ private DialValueIndicator dialValueIndicator; /** the text annotation for the channel name */ private DialTextAnnotation dialTextAnnotation; /** the scale for the dial */ private StandardDialScale dialScale; /** the formatter for the dial ticks */ private EngineeringFormat engineeringFormat; /** the formatter for the dial value indicator */ private EngineeringFormat engineeringFormatWithUnit; /** the text field for the lower bound of the dial */ private JTextField lowerBoundTextField; /** the text field for the upper bound of the dial */ private JTextField upperBoundTextField; /** the value for the critical minimum threshold */ private double criticalMinimumThreshold; /** the value for the warning minimum threshold */ private double warningMinimumThreshold; /** the value for the warning maximum threshold */ private double warningMaximumThreshold; /** the value for the critical maximum threshold */ private double criticalMaxThreshold; /** the dial ranges for the thresholds */ private StandardDialRange[] thresholdDialRanges = new StandardDialRange[4]; /** * Creates the DialPanel with the default model. * * @see DialModel */ public DialPanel() { this(new DialModel()); } /** * Creates the DialPanel with the given DialModel. * * @param model the model for the dial panel */ public DialPanel(DialModel model) { super(); this.model = model; initDataset(); initPanel(); updateRange(); initModelListener(); } /** * Gets the model for this dial. * * @return the model for this dial */ public DialModel getModel() { return model; } /** * Initializes the DialPlot dataset. */ private void initDataset() { dataset = new DefaultValueDataset(model.getValue()); dataset.addChangeListener(new DatasetChangeListener() { public void datasetChanged(DatasetChangeEvent arg0) { checkThresholds(); } }); } /** * Initializes the dial panel. */ private void initPanel() { setLayout(new BorderLayout()); JPanel chartPanel = createDialPanel(); add(chartPanel, BorderLayout.CENTER); JPanel settingsPanel = createSettingsPanel(); add(settingsPanel, BorderLayout.PAGE_END); } /** * Creates the panel containing the dial. * * @return the dial panel */ private JPanel createDialPanel() { plot = new DialPlot(dataset); plot.setDialFrame(new StandardDialFrame()); engineeringFormat = new EngineeringFormat(); engineeringFormatWithUnit = new EngineeringFormat(); dialValueIndicator = new DialValueIndicator(); dialValueIndicator.setOutlinePaint(Color.black); dialValueIndicator.setRadius(0.7); dialValueIndicator.setVisible(false); dialValueIndicator.setNumberFormat(engineeringFormatWithUnit); plot.addLayer(dialValueIndicator); dialTextAnnotation = new DialTextAnnotation(""); dialTextAnnotation.setRadius(0.8); plot.addLayer(dialTextAnnotation); DialPointer dialPointer = new DialPointer.Pointer(); dialPointer.setRadius(0.9); plot.addPointer(dialPointer); plot.setCap(new DialCap()); dialScale = new BoundedDialScale(); dialScale.setStartAngle(-120); dialScale.setExtent(-300); dialScale.setMinorTickCount(5); dialScale.setTickLabelFormatter(engineeringFormat); dialScale.setTickRadius(0.9); JFreeChart chart = new JFreeChart(plot); chart.removeLegend(); return new ChartPanel(chart); } /** * Creates the panel containing settings. * * @return the settings panel */ private JPanel createSettingsPanel() { JPanel settingsPanel = new JPanel(); settingsPanel.setLayout(new BorderLayout()); settingsPanel.setBorder(BorderFactory.createEmptyBorder(0,5,5,5)); lowerBoundTextField = new JTextField(6); lowerBoundTextField.setToolTipText("The minimum value for the dial"); lowerBoundTextField.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ae) { setRangeFromTextFields(); } }); settingsPanel.add(lowerBoundTextField, BorderLayout.LINE_START); upperBoundTextField = new JTextField(6); upperBoundTextField.setToolTipText("The maximum value for the dial"); upperBoundTextField.setHorizontalAlignment(JTextField.TRAILING); upperBoundTextField.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ae) { setRangeFromTextFields(); } }); settingsPanel.add(upperBoundTextField, BorderLayout.LINE_END); return settingsPanel; } private void initModelListener() { model.addPropertyChangeListener(new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent pce) { if (pce.getPropertyName().equals("value")) { Number value = (Number)pce.getNewValue(); dataset.setValue(value); } else if (pce.getPropertyName().equals("name")) { String name = (String)pce.getNewValue(); dialValueIndicator.setVisible(name != null); if (name != null) { dialTextAnnotation.setLabel(name); } } else if (pce.getPropertyName().equals("unit")) { String unit = (String)pce.getNewValue(); engineeringFormatWithUnit.setUnit(unit); } else if (pce.getPropertyName().equals("range")) { updateRange(); } else if (pce.getPropertyName().equals("warningThreshold") || pce.getPropertyName().equals("criticalThreshold")) { updateThresholdRanges(); } } }); } /** * Sets the range of the dial according to the range text fields. */ private void setRangeFromTextFields() { double lowerBound; double upperBound; try { lowerBound = Double.parseDouble(lowerBoundTextField.getText()); upperBound = Double.parseDouble(upperBoundTextField.getText()); if (lowerBound >= upperBound) { throw new NumberFormatException(); } } catch (NumberFormatException e) { lowerBoundTextField.setText(engineeringFormat.format(model.getRange().getLowerBound())); upperBoundTextField.setText(engineeringFormat.format(model.getRange().getUpperBound())); return; } model.setRange(new Range(lowerBound, upperBound)); } /** * Updates the dial range. * * @param range the dial range */ private void updateRange() { Range range = model.getRange(); lowerBoundTextField.setText(engineeringFormat.format(model.getRange().getLowerBound())); upperBoundTextField.setText(engineeringFormat.format(model.getRange().getUpperBound())); dialScale.setLowerBound(range.getLowerBound()); dialScale.setUpperBound(range.getUpperBound()); double tickIncrement = range.getLength()/10; dialScale.setMajorTickIncrement(tickIncrement); plot.addScale(0, dialScale); updateThresholdRanges(); } /** * Updates the threshold ranges. * * @param range the dial range */ private void updateThresholdRanges() { Range range = model.getRange(); double warningThresh = model.getWarningThreshold() * (range.getLength()); double criticalThresh = model.getCriticalThreshold() * (range.getLength()); criticalMinimumThreshold = range.getLowerBound() + criticalThresh; warningMinimumThreshold = range.getLowerBound() + warningThresh; warningMaximumThreshold = range.getUpperBound() - warningThresh; criticalMaxThreshold = range.getUpperBound() - criticalThresh; // remove previous dial ranges for (StandardDialRange dialRange : thresholdDialRanges) { if (dialRange != null) { plot.removeLayer(dialRange); } } thresholdDialRanges[0] = new StandardDialRange(range.getLowerBound(), criticalMinimumThreshold, Color.red); thresholdDialRanges[0].setInnerRadius(0); thresholdDialRanges[0].setOuterRadius(0.9); plot.addLayer(thresholdDialRanges[0]); thresholdDialRanges[1] = new StandardDialRange(criticalMinimumThreshold, warningMinimumThreshold, Color.yellow); thresholdDialRanges[1].setInnerRadius(0); thresholdDialRanges[1].setOuterRadius(0.9); plot.addLayer(thresholdDialRanges[1]); thresholdDialRanges[2] = new StandardDialRange(warningMaximumThreshold, criticalMaxThreshold, Color.yellow); thresholdDialRanges[2].setInnerRadius(0); thresholdDialRanges[2].setOuterRadius(0.9); plot.addLayer(thresholdDialRanges[2]); thresholdDialRanges[3] = new StandardDialRange(criticalMaxThreshold, range.getUpperBound(), Color.red); thresholdDialRanges[3].setInnerRadius(0); thresholdDialRanges[3].setOuterRadius(0.9); plot.addLayer(thresholdDialRanges[3]); // make sure to do this after changing the dial ranges because it gets reset for some reason dialValueIndicator.setTemplateValue(-222.222e222); checkThresholds(); } /** * Checks the dial value against the warning and critical threshold values. If * a threshold is reached, the dial value indicator will change color * according to the type of threshold reached. */ private void checkThresholds() { Number numberValue = plot.getDataset().getValue(); if (numberValue == null) { dialValueIndicator.setBackgroundPaint(Color.white); } else { double value = numberValue.doubleValue(); if (value < criticalMinimumThreshold || value > criticalMaxThreshold) { dialValueIndicator.setBackgroundPaint(Color.red); } else if (value < warningMinimumThreshold || value > warningMaximumThreshold) { dialValueIndicator.setBackgroundPaint(Color.yellow); } else { dialValueIndicator.setBackgroundPaint(Color.white); } } } }
src/org/rdv/viz/dial/DialPanel.java
/* * RDV * Real-time Data Viewer * http://it.nees.org/software/rdv/ * * Copyright (c) 2007 Palta Software * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. * * $URL$ * $Revision$ * $Date$ * $Author$ */ package org.rdv.viz.dial; import java.awt.BorderLayout; import java.awt.Color; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import javax.swing.BorderFactory; import javax.swing.JPanel; import javax.swing.JTextField; import org.jfree.chart.ChartPanel; import org.jfree.chart.JFreeChart; import org.jfree.chart.plot.dial.DialCap; import org.jfree.chart.plot.dial.DialPlot; import org.jfree.chart.plot.dial.DialPointer; import org.jfree.chart.plot.dial.DialTextAnnotation; import org.jfree.chart.plot.dial.DialValueIndicator; import org.jfree.chart.plot.dial.StandardDialFrame; import org.jfree.chart.plot.dial.StandardDialRange; import org.jfree.chart.plot.dial.StandardDialScale; import org.jfree.data.Range; import org.jfree.data.general.DatasetChangeEvent; import org.jfree.data.general.DatasetChangeListener; import org.jfree.data.general.DefaultValueDataset; import org.rdv.util.EngineeringFormat; /** * A JPanel for displaying a dial visualization. * * @author Jason P. Hanley * @see DialModel */ public class DialPanel extends JPanel { /** serialization version identifier */ private static final long serialVersionUID = 4279289359942253323L; /** the dial model */ private final DialModel model; /** the dataset for the dial */ private DefaultValueDataset dataset; /** the plot for the dial */ private DialPlot plot; /** the indicator for the dial value */ private DialValueIndicator dialValueIndicator; /** the text annotation for the channel name */ private DialTextAnnotation dialTextAnnotation; /** the scale for the dial */ private StandardDialScale dialScale; /** the formatter for the dial value indicator */ private EngineeringFormat engineeringFormat; /** the text field for the lower bound of the dial */ private JTextField lowerBoundTextField; /** the text field for the upper bound of the dial */ private JTextField upperBoundTextField; /** the value for the critical minimum threshold */ private double criticalMinimumThreshold; /** the value for the warning minimum threshold */ private double warningMinimumThreshold; /** the value for the warning maximum threshold */ private double warningMaximumThreshold; /** the value for the critical maximum threshold */ private double criticalMaxThreshold; /** the dial ranges for the thresholds */ private StandardDialRange[] thresholdDialRanges = new StandardDialRange[4]; /** * Creates the DialPanel with the default model. * * @see DialModel */ public DialPanel() { this(new DialModel()); } /** * Creates the DialPanel with the given DialModel. * * @param model the model for the dial panel */ public DialPanel(DialModel model) { super(); this.model = model; initDataset(); initPanel(); updateRange(); initModelListener(); } /** * Gets the model for this dial. * * @return the model for this dial */ public DialModel getModel() { return model; } /** * Initializes the DialPlot dataset. */ private void initDataset() { dataset = new DefaultValueDataset(model.getValue()); dataset.addChangeListener(new DatasetChangeListener() { public void datasetChanged(DatasetChangeEvent arg0) { checkThresholds(); } }); } /** * Initializes the dial panel. */ private void initPanel() { setLayout(new BorderLayout()); JPanel chartPanel = createDialPanel(); add(chartPanel, BorderLayout.CENTER); JPanel settingsPanel = createSettingsPanel(); add(settingsPanel, BorderLayout.PAGE_END); } /** * Creates the panel containing the dial. * * @return the dial panel */ private JPanel createDialPanel() { plot = new DialPlot(dataset); plot.setDialFrame(new StandardDialFrame()); engineeringFormat = new EngineeringFormat(); dialValueIndicator = new DialValueIndicator(); dialValueIndicator.setOutlinePaint(Color.black); dialValueIndicator.setRadius(0.7); dialValueIndicator.setVisible(false); dialValueIndicator.setNumberFormat(engineeringFormat); plot.addLayer(dialValueIndicator); dialTextAnnotation = new DialTextAnnotation(""); dialTextAnnotation.setRadius(0.8); plot.addLayer(dialTextAnnotation); DialPointer dialPointer = new DialPointer.Pointer(); dialPointer.setRadius(0.9); plot.addPointer(dialPointer); plot.setCap(new DialCap()); dialScale = new BoundedDialScale(); dialScale.setStartAngle(-120); dialScale.setExtent(-300); dialScale.setMinorTickCount(5); dialScale.setTickLabelFormatter(engineeringFormat); dialScale.setTickRadius(0.9); JFreeChart chart = new JFreeChart(plot); chart.removeLegend(); return new ChartPanel(chart); } /** * Creates the panel containing settings. * * @return the settings panel */ private JPanel createSettingsPanel() { JPanel settingsPanel = new JPanel(); settingsPanel.setLayout(new BorderLayout()); settingsPanel.setBorder(BorderFactory.createEmptyBorder(0,5,5,5)); lowerBoundTextField = new JTextField(6); lowerBoundTextField.setToolTipText("The minimum value for the dial"); lowerBoundTextField.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ae) { setRangeFromTextFields(); } }); settingsPanel.add(lowerBoundTextField, BorderLayout.LINE_START); upperBoundTextField = new JTextField(6); upperBoundTextField.setToolTipText("The maximum value for the dial"); upperBoundTextField.setHorizontalAlignment(JTextField.TRAILING); upperBoundTextField.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent ae) { setRangeFromTextFields(); } }); settingsPanel.add(upperBoundTextField, BorderLayout.LINE_END); return settingsPanel; } private void initModelListener() { model.addPropertyChangeListener(new PropertyChangeListener() { public void propertyChange(PropertyChangeEvent pce) { if (pce.getPropertyName().equals("value")) { Number value = (Number)pce.getNewValue(); dataset.setValue(value); } else if (pce.getPropertyName().equals("name")) { String name = (String)pce.getNewValue(); dialValueIndicator.setVisible(name != null); if (name != null) { dialTextAnnotation.setLabel(name); } } else if (pce.getPropertyName().equals("unit")) { String unit = (String)pce.getNewValue(); engineeringFormat.setUnit(unit); } else if (pce.getPropertyName().equals("range")) { updateRange(); } else if (pce.getPropertyName().equals("warningThreshold") || pce.getPropertyName().equals("criticalThreshold")) { updateThresholdRanges(); } } }); } /** * Sets the range of the dial according to the range text fields. */ private void setRangeFromTextFields() { double lowerBound; double upperBound; try { lowerBound = Double.parseDouble(lowerBoundTextField.getText()); upperBound = Double.parseDouble(upperBoundTextField.getText()); if (lowerBound >= upperBound) { throw new NumberFormatException(); } } catch (NumberFormatException e) { lowerBoundTextField.setText(engineeringFormat.format(model.getRange().getLowerBound())); upperBoundTextField.setText(engineeringFormat.format(model.getRange().getUpperBound())); return; } model.setRange(new Range(lowerBound, upperBound)); } /** * Updates the dial range. * * @param range the dial range */ private void updateRange() { Range range = model.getRange(); lowerBoundTextField.setText(engineeringFormat.format(model.getRange().getLowerBound())); upperBoundTextField.setText(engineeringFormat.format(model.getRange().getUpperBound())); dialScale.setLowerBound(range.getLowerBound()); dialScale.setUpperBound(range.getUpperBound()); double tickIncrement = range.getLength()/10; dialScale.setMajorTickIncrement(tickIncrement); plot.addScale(0, dialScale); updateThresholdRanges(); } /** * Updates the threshold ranges. * * @param range the dial range */ private void updateThresholdRanges() { Range range = model.getRange(); double warningThresh = model.getWarningThreshold() * (range.getLength()); double criticalThresh = model.getCriticalThreshold() * (range.getLength()); criticalMinimumThreshold = range.getLowerBound() + criticalThresh; warningMinimumThreshold = range.getLowerBound() + warningThresh; warningMaximumThreshold = range.getUpperBound() - warningThresh; criticalMaxThreshold = range.getUpperBound() - criticalThresh; // remove previous dial ranges for (StandardDialRange dialRange : thresholdDialRanges) { if (dialRange != null) { plot.removeLayer(dialRange); } } thresholdDialRanges[0] = new StandardDialRange(range.getLowerBound(), criticalMinimumThreshold, Color.red); thresholdDialRanges[0].setInnerRadius(0); thresholdDialRanges[0].setOuterRadius(0.9); plot.addLayer(thresholdDialRanges[0]); thresholdDialRanges[1] = new StandardDialRange(criticalMinimumThreshold, warningMinimumThreshold, Color.yellow); thresholdDialRanges[1].setInnerRadius(0); thresholdDialRanges[1].setOuterRadius(0.9); plot.addLayer(thresholdDialRanges[1]); thresholdDialRanges[2] = new StandardDialRange(warningMaximumThreshold, criticalMaxThreshold, Color.yellow); thresholdDialRanges[2].setInnerRadius(0); thresholdDialRanges[2].setOuterRadius(0.9); plot.addLayer(thresholdDialRanges[2]); thresholdDialRanges[3] = new StandardDialRange(criticalMaxThreshold, range.getUpperBound(), Color.red); thresholdDialRanges[3].setInnerRadius(0); thresholdDialRanges[3].setOuterRadius(0.9); plot.addLayer(thresholdDialRanges[3]); // make sure to do this after changing the dial ranges because it gets reset for some reason dialValueIndicator.setTemplateValue(-222.222e222); checkThresholds(); } /** * Checks the dial value against the warning and critical threshold values. If * a threshold is reached, the dial value indicator will change color * according to the type of threshold reached. */ private void checkThresholds() { Number numberValue = plot.getDataset().getValue(); if (numberValue == null) { dialValueIndicator.setBackgroundPaint(Color.white); } else { double value = numberValue.doubleValue(); if (value < criticalMinimumThreshold || value > criticalMaxThreshold) { dialValueIndicator.setBackgroundPaint(Color.red); } else if (value < warningMinimumThreshold || value > warningMaximumThreshold) { dialValueIndicator.setBackgroundPaint(Color.yellow); } else { dialValueIndicator.setBackgroundPaint(Color.white); } } } }
Fix channel unit display in DialViz.
src/org/rdv/viz/dial/DialPanel.java
Fix channel unit display in DialViz.
<ide><path>rc/org/rdv/viz/dial/DialPanel.java <ide> /** the scale for the dial */ <ide> private StandardDialScale dialScale; <ide> <del> /** the formatter for the dial value indicator */ <add> /** the formatter for the dial ticks */ <ide> private EngineeringFormat engineeringFormat; <add> <add> /** the formatter for the dial value indicator */ <add> private EngineeringFormat engineeringFormatWithUnit; <ide> <ide> /** the text field for the lower bound of the dial */ <ide> private JTextField lowerBoundTextField; <ide> plot.setDialFrame(new StandardDialFrame()); <ide> <ide> engineeringFormat = new EngineeringFormat(); <add> engineeringFormatWithUnit = new EngineeringFormat(); <ide> <ide> dialValueIndicator = new DialValueIndicator(); <ide> dialValueIndicator.setOutlinePaint(Color.black); <ide> dialValueIndicator.setRadius(0.7); <ide> dialValueIndicator.setVisible(false); <del> dialValueIndicator.setNumberFormat(engineeringFormat); <add> dialValueIndicator.setNumberFormat(engineeringFormatWithUnit); <ide> plot.addLayer(dialValueIndicator); <ide> <ide> dialTextAnnotation = new DialTextAnnotation(""); <ide> } <ide> } else if (pce.getPropertyName().equals("unit")) { <ide> String unit = (String)pce.getNewValue(); <del> engineeringFormat.setUnit(unit); <add> engineeringFormatWithUnit.setUnit(unit); <ide> } else if (pce.getPropertyName().equals("range")) { <ide> updateRange(); <ide> } else if (pce.getPropertyName().equals("warningThreshold") ||
Java
apache-2.0
d9da4ac9033e5fb4dc614946b2c7ee3ac981699a
0
BackupTheBerlios/gavrog,BackupTheBerlios/gavrog,BackupTheBerlios/gavrog,BackupTheBerlios/gavrog,BackupTheBerlios/gavrog
/* Copyright 2006 Olaf Delgado-Friedrichs Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.gavrog.systre; import java.awt.Color; import java.awt.Insets; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import javax.swing.JFileChooser; import javax.swing.SwingUtilities; import org.gavrog.box.collections.FilteredIterator; import org.gavrog.box.collections.IteratorAdapter; import org.gavrog.box.collections.Pair; import org.gavrog.box.simple.DataFormatException; import org.gavrog.box.simple.Misc; import org.gavrog.joss.crossover.Skeleton; import org.gavrog.joss.dsyms.basic.DelaneySymbol; import org.gavrog.joss.dsyms.generators.InputIterator; import org.gavrog.joss.geometry.SpaceGroupCatalogue; import org.gavrog.joss.pgraphs.basic.PeriodicGraph; import org.gavrog.joss.pgraphs.io.NetParser; import org.gavrog.joss.pgraphs.io.Output; import buoy.event.CommandEvent; import buoy.event.WindowClosingEvent; import buoy.widget.BButton; import buoy.widget.BDialog; import buoy.widget.BFileChooser; import buoy.widget.BFrame; import buoy.widget.BLabel; import buoy.widget.BOutline; import buoy.widget.BScrollBar; import buoy.widget.BScrollPane; import buoy.widget.BStandardDialog; import buoy.widget.BTextArea; import buoy.widget.BorderContainer; import buoy.widget.ColumnContainer; import buoy.widget.GridContainer; import buoy.widget.LayoutInfo; /** * A simple GUI for Gavrog Systre. * * @author Olaf Delgado * @version $Id: SystreGUI.java,v 1.44 2006/05/05 06:41:52 odf Exp $ */ public class SystreGUI extends BFrame { // --- some constants used in the GUI final private static Color textColor = new Color(255, 250, 240); final private static Color buttonColor = new Color(224, 224, 240); final private static Insets defaultInsets = new Insets(5, 5, 5, 5); // --- file choosers final private BFileChooser inFileChooser = new BFileChooser(BFileChooser.OPEN_FILE, "Open data file"); final private BFileChooser outFileChooser = new BFileChooser(BFileChooser.SAVE_FILE, "Save output"); // --- GUI elements that need to be accessed by more than one method final private BTextArea output; final private BScrollBar vscroll; final private BButton openButton; final private BButton nextButton; final private BButton saveButton; final private BButton optionsButton; final private BLabel statusBar; // --- the object doing the actual processing private final SystreCmdline systre = new SystreCmdline(); // --- fields to store some temporary information private Iterator netsToProcess = null; private String strippedFileName; private String fullFileName; private StringBuffer currentTranscript = new StringBuffer(); private String lastFinishedTranscript = null; private List bufferedNets = new LinkedList(); private int count; // --- options private boolean singleWrite = false; private boolean readArchivesAsInput = false; /** * Constructs an instance. */ public SystreGUI() { super("Systre 1.0 beta"); final BorderContainer main = new BorderContainer(); main.setDefaultLayout(new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.BOTH, null, null)); main.setBackground(textColor); final BorderContainer top = new BorderContainer(); top.setDefaultLayout(new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.NONE, defaultInsets, null)); top.setBackground(null); final BLabel label = new BLabel("<html><h1>Gavrog Systre</h1><br>" + "Version 1.0 beta 060504<br><br>" + "by Olaf Delgado-Friedrichs 2001-2006</html>"); top.add(label, BorderContainer.NORTH); final GridContainer buttonBar = new GridContainer(4, 1); buttonBar.setDefaultLayout(new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.HORIZONTAL, null, null)); buttonBar.add(openButton = makeButton("Open...", this, "doOpen"), 0, 0); buttonBar.add(nextButton = makeButton("Next", this, "doNext"), 1, 0); buttonBar.add(saveButton = makeButton("Save as...", this, "doSave"), 2, 0); buttonBar.add(optionsButton = makeButton("Options...", this, "doOptions"), 3, 0); top.add(buttonBar, BorderContainer.CENTER, new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.HORIZONTAL, null, null)); statusBar = new BLabel(); final BOutline outline = BOutline.createLineBorder(statusBar, Color.BLACK, 2); outline.setBackground(Color.WHITE); top.add(outline, BorderContainer.SOUTH, new LayoutInfo(LayoutInfo.WEST, LayoutInfo.HORIZONTAL, null, null)); main.add(top, BorderContainer.NORTH); output = new BTextArea(20, 40); output.setBackground(null); final BScrollPane scrollPane = new BScrollPane(output, BScrollPane.SCROLLBAR_ALWAYS, BScrollPane.SCROLLBAR_ALWAYS); scrollPane.setForceHeight(true); scrollPane.setForceWidth(true); this.vscroll = scrollPane.getVerticalScrollBar(); scrollPane.setBackground(null); main.add(scrollPane, BorderContainer.CENTER); final BButton cancelButton = makeButton("Cancel", this, "doCancel"); final BButton exitButton = makeButton("Exit", this, "doQuit"); final BorderContainer bottom = new BorderContainer(); bottom.setDefaultLayout(new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.NONE, defaultInsets, null)); bottom.setBackground(null); bottom.add(cancelButton, BorderContainer.WEST); bottom.add(exitButton, BorderContainer.EAST); main.add(bottom, BorderContainer.SOUTH, new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.HORIZONTAL, null, null)); setContent(main); captureOutput(); addEventLink(WindowClosingEvent.class, this, "doQuit"); nextButton.setEnabled(false); saveButton.setEnabled(false); final JFileChooser inchsr = (JFileChooser) inFileChooser.getComponent(); inchsr.addChoosableFileFilter(new ExtensionFilter("ds", "Delaney-Dress Symbol Files")); inchsr.addChoosableFileFilter(new ExtensionFilter("arc", "Systre Archives")); inchsr.addChoosableFileFilter(new ExtensionFilter(new String[] {"cgd", "pgr" }, "Systre Input Files")); final JFileChooser outchsr = (JFileChooser) outFileChooser.getComponent(); outchsr.addChoosableFileFilter(new ExtensionFilter("arc", "Systre Archive Files")); outchsr.addChoosableFileFilter(new ExtensionFilter("cgd", "Embedded Nets")); outchsr.addChoosableFileFilter(new ExtensionFilter("pgr", "Abstract Topologies")); outchsr.addChoosableFileFilter(new ExtensionFilter("out", "Systre Transcripts")); systre.addEventLink(String.class, this, "status"); status("Your orders, Sir?"); pack(); setVisible(true); } public void status(final String text) { SwingUtilities.invokeLater(new Runnable() { public void run() { statusBar.setText("<html><font color=\"green\">" + text + "</font></html>"); } }); } private BButton makeButton(final String label, final Object target, final String method) { final BButton button = new BButton(label); button.setBackground(buttonColor); button.addEventLink(CommandEvent.class, target, method); return button; } private void captureOutput() { final OutputStream stream = new OutputStream() { private StringBuffer buffer = new StringBuffer(128); public void write(int b) throws IOException { final char c = (char) b; buffer.append(c); if (c == '\n' || buffer.length() > 1023) { flush(); } } public void flush() { output.append(buffer.toString()); currentTranscript.append(buffer); buffer.delete(0, buffer.length()); SwingUtilities.invokeLater(new Runnable() { public void run() { vscroll.setValue(vscroll.getMaximum()); } }); } }; this.systre.setOutStream(new PrintStream(stream)); } public void doOpen() { final boolean success = this.inFileChooser.showDialog(this); if (success) { this.netsToProcess = null; final String filename = this.inFileChooser.getSelectedFile().getName(); final File dir = this.inFileChooser.getDirectory(); final String path = new File(dir, filename).getAbsolutePath(); this.output.setText(""); disableMainButtons(); if (!this.readArchivesAsInput && filename.endsWith(".arc")) { systre.processArchive(path); enableMainButtons(); } else { openFile(path); doNext(); } } } public void doSave() { final String name = this.strippedFileName; this.outFileChooser.setSelectedFile(new File(name + ".out")); final boolean success = this.outFileChooser.showDialog(this); if (success) { final String filename = this.outFileChooser.getSelectedFile().getName(); final File dir = this.outFileChooser.getDirectory(); final File file = new File(dir, filename); final boolean append; if (file.exists()) { final int choice = new BStandardDialog("Systre - File exists", "File \"" + file + "\" exists. Overwrite?", BStandardDialog.QUESTION) .showOptionDialog(this, new String[] { "Overwrite", "Append", "Cancel" }, "Cancel"); if (choice > 1) { return; } else { append = choice == 1; } } else { append = false; } disableMainButtons(); new Thread(new Runnable() { public void run() { try { final BufferedWriter writer = new BufferedWriter(new FileWriter( file, append)); final int n = filename.lastIndexOf('.'); final String extension = filename.substring(n+1); if (singleWrite) { writeStructure(extension, writer, systre.getLastStructure(), lastFinishedTranscript); } else { for (final Iterator iter = bufferedNets.iterator(); iter .hasNext();) { final Pair item = (Pair) iter.next(); final ProcessedNet net = (ProcessedNet) item.getFirst(); final String transcript = (String) item.getSecond(); writeStructure(extension, writer, net, transcript); } } writer.flush(); writer.close(); } catch (IOException ex) { reportException(null, "FILE", "I/O error writing to " + file, false); } catch (Exception ex) { reportException(ex, "INTERNAL", "Unexpected exception while writing to " + file, true); } finally { enableMainButtons(); } } }).start(); } } private void writeStructure(final String extension, final BufferedWriter writer, final ProcessedNet net, final String transcript) throws IOException { if (net != null) { if ("arc".equals(extension)) { // --- write archive entry final String txt = new Archive.Entry(net.getGraph(), net.getName()) .toString(); writer.write(txt); writer.write("\n"); } else if ("cgd".equals(extension)) { // --- write embedding structure with full symmetry net.writeEmbedding(writer, true, systre.getOutputFullCell()); } else if ("pgr".equals(extension)) { // --- write abstract, unembedded periodic graph Output.writePGR(writer, net.getGraph().canonical(), net.getName()); writer.write("\n"); } else { final String lineSeparator = System.getProperty("line.separator"); // --- write the full transcript writer.write(transcript.replaceAll(lineSeparator, "\n")); writer.write("\n"); } } } public void doOptions() { final BDialog dialog = new BDialog(this, "Systre - Options", true); final ColumnContainer column = new ColumnContainer(); column.setDefaultLayout(new LayoutInfo(LayoutInfo.WEST, LayoutInfo.NONE, defaultInsets, null)); column.setBackground(textColor); try { column.add(new OptionCheckBox("Use Builtin Archive", this.systre, "useBuiltinArchive")); column.add(new OptionCheckBox("Process '.arc' files like normal input", this, "readArchivesAsInput")); column.add(new OptionCheckBox("Prefer Second Origin On Input", SpaceGroupCatalogue.class, "preferSecondOrigin")); column.add(new OptionCheckBox("Prefer Hexagonal Setting On Input", SpaceGroupCatalogue.class, "preferHexagonal")); column.add(new OptionCheckBox("Relax Node Positions", this.systre, "relaxPositions")); column.add(new OptionCheckBox("Output Full Conventional Cell", this.systre, "outputFullCell")); column.add(new OptionCheckBox("Save only last net finished", this, "singleWrite")); } catch (final Exception ex) { reportException(ex, "FATAL", "serious internal problem", true); return; } final BButton okButton = makeButton("Ok", dialog, "dispose"); column.add(okButton, new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.NONE, defaultInsets, null)); dialog.setContent(column); dialog.addEventLink(WindowClosingEvent.class, dialog, "dispose"); dialog.pack(); dialog.setVisible(true); } public void doNext() { disableMainButtons(); new Thread(new Runnable() { public void run() { nextNet(); enableMainButtons(); } }).start(); } public boolean moreNets() { return this.netsToProcess != null && this.netsToProcess.hasNext(); } public void nextNet() { if (!moreNets()) { finishFile(); return; } final PrintStream out = this.systre.getOutStream(); InputStructure net = null; PeriodicGraph G = null; Exception problem = null; this.currentTranscript.delete(0, this.currentTranscript.length()); final class BailOut extends Throwable {} try { // --- read the next net try { net = (InputStructure) this.netsToProcess.next(); G = net.getGraph(); } catch (DataFormatException ex) { problem = ex; } catch (Exception ex) { reportException(ex, "INTERNAL", "Unexpected exception", true); throw new BailOut(); } if (G == null) { reportException(problem, "INPUT", null, false); throw new BailOut(); } ++this.count; // --- some blank lines as separators out.println(); if (this.count > 1) { out.println(); out.println(); } final String archiveName; final String displayName; if (net.getName() == null) { archiveName = this.strippedFileName + "-#" + this.count; displayName = ""; } else { archiveName = net.getName(); displayName = " - \"" + net.getName() + "\""; } out.println("Structure #" + this.count + displayName + "."); out.println(); boolean success = false; if (problem != null) { reportException(problem, "INPUT", null, false); } else { try { this.systre.processGraph(G, archiveName, net.getGroup()); success = true; } catch (SystreException ex) { reportException(ex, ex.getType().toString(), null, false); } catch (Exception ex) { reportException(ex, "INTERNAL", "Unexpected exception", true); } } out.println(); out.println("Finished structure #" + this.count + displayName + "."); this.lastFinishedTranscript = this.currentTranscript.toString(); if (success) { final ProcessedNet tmp = this.systre.getLastStructure(); this.bufferedNets.add(new Pair(tmp, this.lastFinishedTranscript)); } } catch (BailOut ex) { } if (!moreNets()) { finishFile(); } } private boolean openFile(final String filePath) { final PrintStream out = this.systre.getOutStream(); this.netsToProcess = null; this.count = 0; final BufferedReader reader; try { reader = new BufferedReader(new FileReader(filePath)); } catch (FileNotFoundException ex) { reportException(ex, "FILE", null, false); return false; } this.fullFileName = filePath; this.strippedFileName = new File(filePath).getName().replaceFirst("\\..*$", ""); final String extension = filePath.substring(filePath.lastIndexOf('.') + 1); out.println("Data file \"" + filePath + "\"."); this.bufferedNets.clear(); if ("cgd".equals(extension) || "pgr".equals(extension)) { final NetParser parser = new NetParser(reader); this.netsToProcess = new IteratorAdapter() { protected Object findNext() throws NoSuchElementException { if (parser.atEnd()) { throw new NoSuchElementException("at end"); } else { return new InputStructure(parser.parseNet(), parser.getName(), parser.getSpaceGroup()); } } }; return true; } else if ("ds".equals(extension)) { this.netsToProcess = new FilteredIterator(new InputIterator(reader)) { public Object filter(Object x) { final DelaneySymbol ds = (DelaneySymbol) x; final PeriodicGraph graph = new Skeleton(ds); final String group = (ds.dim() == 3) ? "P1" : "p1"; return new InputStructure(graph, null, group); } }; return true; } else if ("arc".equals(extension)) { this.netsToProcess = new IteratorAdapter() { protected Object findNext() throws NoSuchElementException { final Archive.Entry entry = Archive.Entry.read(reader); if (entry == null) { throw new NoSuchElementException("at end"); } final String key = entry.getKey(); final PeriodicGraph graph = PeriodicGraph.fromInvariantString(key); final String group = (graph.getDimension() == 3) ? "P1" : "p1"; return new InputStructure(graph, entry.getName(), group); } }; } else { reportException(null, "FILE", "Unrecognized extension " + extension, false); } return false; } private void finishFile() { final PrintStream out = this.systre.getOutStream(); out.println(); out.println("Finished data file \"" + this.fullFileName + "\"."); this.netsToProcess = null; } private void reportException(final Throwable ex, final String type, final String msg, final boolean details) { final PrintStream out = systre.getOutStream(); out.println(); if (details) { out.println("=================================================="); } final boolean cancelled = ex instanceof SystreException && ((SystreException) ex).getType().equals(SystreException.CANCELLED); final String text; if (cancelled) { text = "CANCELLING"; } else { text = "ERROR (" + type + ") - " + (msg == null ? "" : msg); } out.print("!!! " + text); if (ex != null) { if (details) { out.println(); out.print(Misc.stackTrace(ex)); out.println("=================================================="); } else { out.println(ex.getMessage() + "."); } } invokeAndWait(new Runnable() { public void run() { final String title = "Systre: " + type + " ERROR"; final String msg = text + (ex != null ? " - " + ex.getMessage() : "") + "."; final BStandardDialog dialog = new BStandardDialog(title, msg, BStandardDialog.ERROR); dialog.showMessageDialog(SystreGUI.this); } }); } private void disableMainButtons() { invokeAndWait(new Runnable() { public void run() { openButton.setEnabled(false); nextButton.setEnabled(false); saveButton.setEnabled(false); optionsButton.setEnabled(false); } }); } private void enableMainButtons() { invokeLater(new Runnable() { public void run() { openButton.setEnabled(true); if (moreNets()) { nextButton.setEnabled(true); } saveButton.setEnabled(true); optionsButton.setEnabled(true); } }); } public void doCancel() { this.systre.cancel(); } public void doQuit() { System.exit(0); } /** * Wrapper for {@link SwingUtilities.invokeAndWait}}. If we're in the event dispatch * thread, the argument is just invoked normally. * * @param runnable what to invoke. */ private void invokeAndWait(final Runnable runnable) { if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { try { SwingUtilities.invokeAndWait(runnable); } catch (Exception ex) { } } } /** * Wrapper for {@link SwingUtilities.invokeLater}}. If we're in the event dispatch * thread, the argument is just invoked normally. * * @param runnable what to invoke. */ private void invokeLater(final Runnable runnable) { if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { try { SwingUtilities.invokeLater(runnable); } catch (Exception ex) { } } } public boolean getSingleWrite() { return singleWrite; } public void setSingleWrite(boolean singleWrite) { this.singleWrite = singleWrite; } public boolean getReadArchivesAsInput() { return readArchivesAsInput; } public void setReadArchivesAsInput(boolean readArchivesAsInput) { this.readArchivesAsInput = readArchivesAsInput; } public static void main(final String args[]) { new SystreGUI(); } }
src/org/gavrog/systre/SystreGUI.java
/* Copyright 2006 Olaf Delgado-Friedrichs Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.gavrog.systre; import java.awt.Color; import java.awt.Insets; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.NoSuchElementException; import javax.swing.JFileChooser; import javax.swing.SwingUtilities; import org.gavrog.box.collections.FilteredIterator; import org.gavrog.box.collections.IteratorAdapter; import org.gavrog.box.collections.Pair; import org.gavrog.box.simple.DataFormatException; import org.gavrog.box.simple.Misc; import org.gavrog.joss.crossover.Skeleton; import org.gavrog.joss.dsyms.basic.DelaneySymbol; import org.gavrog.joss.dsyms.generators.InputIterator; import org.gavrog.joss.geometry.SpaceGroupCatalogue; import org.gavrog.joss.pgraphs.basic.PeriodicGraph; import org.gavrog.joss.pgraphs.io.NetParser; import org.gavrog.joss.pgraphs.io.Output; import buoy.event.CommandEvent; import buoy.event.WindowClosingEvent; import buoy.widget.BButton; import buoy.widget.BDialog; import buoy.widget.BFileChooser; import buoy.widget.BFrame; import buoy.widget.BLabel; import buoy.widget.BOutline; import buoy.widget.BScrollBar; import buoy.widget.BScrollPane; import buoy.widget.BStandardDialog; import buoy.widget.BTextArea; import buoy.widget.BorderContainer; import buoy.widget.ColumnContainer; import buoy.widget.GridContainer; import buoy.widget.LayoutInfo; import buoy.widget.Widget; /** * A simple GUI for Gavrog Systre. * * @author Olaf Delgado * @version $Id: SystreGUI.java,v 1.43 2006/05/05 01:23:04 odf Exp $ */ public class SystreGUI extends BFrame { // --- some constants used in the GUI final private static Color textColor = new Color(255, 250, 240); final private static Color buttonColor = new Color(224, 224, 240); final private static Insets defaultInsets = new Insets(5, 5, 5, 5); // --- file choosers final private BFileChooser inFileChooser = new BFileChooser(BFileChooser.OPEN_FILE, "Open data file"); final private BFileChooser outFileChooser = new BFileChooser(BFileChooser.SAVE_FILE, "Save output"); // --- GUI elements that need to be accessed by more than one method final private BTextArea output; final private BScrollBar vscroll; final private BButton openButton; final private BButton nextButton; final private BButton saveButton; final private BButton optionsButton; final private Widget statusBar; // --- the object doing the actual processing private final SystreCmdline systre = new SystreCmdline(); // --- fields to store some temporary information private Iterator netsToProcess = null; private String strippedFileName; private String fullFileName; private StringBuffer currentTranscript = new StringBuffer(); private String lastFinishedTranscript = null; private List bufferedNets = new LinkedList(); private int count; // --- options private boolean singleWrite = false; private boolean readArchivesAsInput = false; /** * Constructs an instance. */ public SystreGUI() { super("Systre 1.0 beta"); final BorderContainer main = new BorderContainer(); main.setDefaultLayout(new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.BOTH, null, null)); main.setBackground(textColor); final BorderContainer top = new BorderContainer(); top.setDefaultLayout(new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.NONE, defaultInsets, null)); top.setBackground(null); final BLabel label = new BLabel("<html><h1>Gavrog Systre</h1><br>" + "Version 1.0 beta 060504<br><br>" + "by Olaf Delgado-Friedrichs 2001-2006</html>"); top.add(label, BorderContainer.NORTH); final GridContainer buttonBar = new GridContainer(4, 1); buttonBar.setDefaultLayout(new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.HORIZONTAL, null, null)); buttonBar.add(openButton = makeButton("Open...", this, "doOpen"), 0, 0); buttonBar.add(nextButton = makeButton("Next", this, "doNext"), 1, 0); buttonBar.add(saveButton = makeButton("Save as...", this, "doSave"), 2, 0); buttonBar.add(optionsButton = makeButton("Options...", this, "doOptions"), 3, 0); top.add(buttonBar, BorderContainer.CENTER, new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.HORIZONTAL, null, null)); statusBar = new BLabel("<html><font color=\"green\">Okay!<font></html>"); final BOutline outline = BOutline.createLineBorder(statusBar, Color.BLACK, 2); outline.setBackground(Color.WHITE); top.add(outline, BorderContainer.SOUTH, new LayoutInfo(LayoutInfo.WEST, LayoutInfo.HORIZONTAL, null, null)); main.add(top, BorderContainer.NORTH); output = new BTextArea(20, 40); output.setBackground(null); final BScrollPane scrollPane = new BScrollPane(output, BScrollPane.SCROLLBAR_ALWAYS, BScrollPane.SCROLLBAR_ALWAYS); scrollPane.setForceHeight(true); scrollPane.setForceWidth(true); this.vscroll = scrollPane.getVerticalScrollBar(); scrollPane.setBackground(null); main.add(scrollPane, BorderContainer.CENTER); final BButton cancelButton = makeButton("Cancel", this, "doCancel"); final BButton exitButton = makeButton("Exit", this, "doQuit"); final BorderContainer bottom = new BorderContainer(); bottom.setDefaultLayout(new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.NONE, defaultInsets, null)); bottom.setBackground(null); bottom.add(cancelButton, BorderContainer.WEST); bottom.add(exitButton, BorderContainer.EAST); main.add(bottom, BorderContainer.SOUTH, new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.HORIZONTAL, null, null)); setContent(main); captureOutput(); addEventLink(WindowClosingEvent.class, this, "doQuit"); nextButton.setEnabled(false); saveButton.setEnabled(false); final JFileChooser inchsr = (JFileChooser) inFileChooser.getComponent(); inchsr.addChoosableFileFilter(new ExtensionFilter("ds", "Delaney-Dress Symbol Files")); inchsr.addChoosableFileFilter(new ExtensionFilter("arc", "Systre Archives")); inchsr.addChoosableFileFilter(new ExtensionFilter(new String[] {"cgd", "pgr" }, "Systre Input Files")); final JFileChooser outchsr = (JFileChooser) outFileChooser.getComponent(); outchsr.addChoosableFileFilter(new ExtensionFilter("arc", "Systre Archive Files")); outchsr.addChoosableFileFilter(new ExtensionFilter("cgd", "Embedded Nets")); outchsr.addChoosableFileFilter(new ExtensionFilter("pgr", "Abstract Topologies")); outchsr.addChoosableFileFilter(new ExtensionFilter("out", "Systre Transcripts")); pack(); setVisible(true); } private BButton makeButton(final String label, final Object target, final String method) { final BButton button = new BButton(label); button.setBackground(buttonColor); button.addEventLink(CommandEvent.class, target, method); return button; } private void captureOutput() { final OutputStream stream = new OutputStream() { private StringBuffer buffer = new StringBuffer(128); public void write(int b) throws IOException { final char c = (char) b; buffer.append(c); if (c == '\n' || buffer.length() > 1023) { flush(); } } public void flush() { output.append(buffer.toString()); currentTranscript.append(buffer); buffer.delete(0, buffer.length()); SwingUtilities.invokeLater(new Runnable() { public void run() { vscroll.setValue(vscroll.getMaximum()); } }); } }; this.systre.setOutStream(new PrintStream(stream)); } public void doOpen() { final boolean success = this.inFileChooser.showDialog(this); if (success) { this.netsToProcess = null; final String filename = this.inFileChooser.getSelectedFile().getName(); final File dir = this.inFileChooser.getDirectory(); final String path = new File(dir, filename).getAbsolutePath(); this.output.setText(""); disableMainButtons(); if (!this.readArchivesAsInput && filename.endsWith(".arc")) { systre.processArchive(path); enableMainButtons(); } else { openFile(path); doNext(); } } } public void doSave() { final String name = this.strippedFileName; this.outFileChooser.setSelectedFile(new File(name + ".out")); final boolean success = this.outFileChooser.showDialog(this); if (success) { final String filename = this.outFileChooser.getSelectedFile().getName(); final File dir = this.outFileChooser.getDirectory(); final File file = new File(dir, filename); final boolean append; if (file.exists()) { final int choice = new BStandardDialog("Systre - File exists", "File \"" + file + "\" exists. Overwrite?", BStandardDialog.QUESTION) .showOptionDialog(this, new String[] { "Overwrite", "Append", "Cancel" }, "Cancel"); if (choice > 1) { return; } else { append = choice == 1; } } else { append = false; } disableMainButtons(); new Thread(new Runnable() { public void run() { try { final BufferedWriter writer = new BufferedWriter(new FileWriter( file, append)); final int n = filename.lastIndexOf('.'); final String extension = filename.substring(n+1); if (singleWrite) { writeStructure(extension, writer, systre.getLastStructure(), lastFinishedTranscript); } else { for (final Iterator iter = bufferedNets.iterator(); iter .hasNext();) { final Pair item = (Pair) iter.next(); final ProcessedNet net = (ProcessedNet) item.getFirst(); final String transcript = (String) item.getSecond(); writeStructure(extension, writer, net, transcript); } } writer.flush(); writer.close(); } catch (IOException ex) { reportException(null, "FILE", "I/O error writing to " + file, false); } catch (Exception ex) { reportException(ex, "INTERNAL", "Unexpected exception while writing to " + file, true); } finally { enableMainButtons(); } } }).start(); } } private void writeStructure(final String extension, final BufferedWriter writer, final ProcessedNet net, final String transcript) throws IOException { if (net != null) { if ("arc".equals(extension)) { // --- write archive entry final String txt = new Archive.Entry(net.getGraph(), net.getName()) .toString(); writer.write(txt); writer.write("\n"); } else if ("cgd".equals(extension)) { // --- write embedding structure with full symmetry net.writeEmbedding(writer, true, systre.getOutputFullCell()); } else if ("pgr".equals(extension)) { // --- write abstract, unembedded periodic graph Output.writePGR(writer, net.getGraph().canonical(), net.getName()); writer.write("\n"); } else { final String lineSeparator = System.getProperty("line.separator"); // --- write the full transcript writer.write(transcript.replaceAll(lineSeparator, "\n")); writer.write("\n"); } } } public void doOptions() { final BDialog dialog = new BDialog(this, "Systre - Options", true); final ColumnContainer column = new ColumnContainer(); column.setDefaultLayout(new LayoutInfo(LayoutInfo.WEST, LayoutInfo.NONE, defaultInsets, null)); column.setBackground(textColor); try { column.add(new OptionCheckBox("Use Builtin Archive", this.systre, "useBuiltinArchive")); column.add(new OptionCheckBox("Process '.arc' files like normal input", this, "readArchivesAsInput")); column.add(new OptionCheckBox("Prefer Second Origin On Input", SpaceGroupCatalogue.class, "preferSecondOrigin")); column.add(new OptionCheckBox("Prefer Hexagonal Setting On Input", SpaceGroupCatalogue.class, "preferHexagonal")); column.add(new OptionCheckBox("Relax Node Positions", this.systre, "relaxPositions")); column.add(new OptionCheckBox("Output Full Conventional Cell", this.systre, "outputFullCell")); column.add(new OptionCheckBox("Save only last net finished", this, "singleWrite")); } catch (final Exception ex) { reportException(ex, "FATAL", "serious internal problem", true); return; } final BButton okButton = makeButton("Ok", dialog, "dispose"); column.add(okButton, new LayoutInfo(LayoutInfo.CENTER, LayoutInfo.NONE, defaultInsets, null)); dialog.setContent(column); dialog.addEventLink(WindowClosingEvent.class, dialog, "dispose"); dialog.pack(); dialog.setVisible(true); } public void doNext() { disableMainButtons(); new Thread(new Runnable() { public void run() { nextNet(); enableMainButtons(); } }).start(); } public boolean moreNets() { return this.netsToProcess != null && this.netsToProcess.hasNext(); } public void nextNet() { if (!moreNets()) { finishFile(); return; } final PrintStream out = this.systre.getOutStream(); InputStructure net = null; PeriodicGraph G = null; Exception problem = null; this.currentTranscript.delete(0, this.currentTranscript.length()); final class BailOut extends Throwable {} try { // --- read the next net try { net = (InputStructure) this.netsToProcess.next(); G = net.getGraph(); } catch (DataFormatException ex) { problem = ex; } catch (Exception ex) { reportException(ex, "INTERNAL", "Unexpected exception", true); throw new BailOut(); } if (G == null) { reportException(problem, "INPUT", null, false); throw new BailOut(); } ++this.count; // --- some blank lines as separators out.println(); if (this.count > 1) { out.println(); out.println(); } final String archiveName; final String displayName; if (net.getName() == null) { archiveName = this.strippedFileName + "-#" + this.count; displayName = ""; } else { archiveName = net.getName(); displayName = " - \"" + net.getName() + "\""; } out.println("Structure #" + this.count + displayName + "."); out.println(); boolean success = false; if (problem != null) { reportException(problem, "INPUT", null, false); } else { try { this.systre.processGraph(G, archiveName, net.getGroup()); success = true; } catch (SystreException ex) { reportException(ex, ex.getType().toString(), null, false); } catch (Exception ex) { reportException(ex, "INTERNAL", "Unexpected exception", true); } } out.println(); out.println("Finished structure #" + this.count + displayName + "."); this.lastFinishedTranscript = this.currentTranscript.toString(); if (success) { final ProcessedNet tmp = this.systre.getLastStructure(); this.bufferedNets.add(new Pair(tmp, this.lastFinishedTranscript)); } } catch (BailOut ex) { } if (!moreNets()) { finishFile(); } } private boolean openFile(final String filePath) { final PrintStream out = this.systre.getOutStream(); this.netsToProcess = null; this.count = 0; final BufferedReader reader; try { reader = new BufferedReader(new FileReader(filePath)); } catch (FileNotFoundException ex) { reportException(ex, "FILE", null, false); return false; } this.fullFileName = filePath; this.strippedFileName = new File(filePath).getName().replaceFirst("\\..*$", ""); final String extension = filePath.substring(filePath.lastIndexOf('.') + 1); out.println("Data file \"" + filePath + "\"."); this.bufferedNets.clear(); if ("cgd".equals(extension) || "pgr".equals(extension)) { final NetParser parser = new NetParser(reader); this.netsToProcess = new IteratorAdapter() { protected Object findNext() throws NoSuchElementException { if (parser.atEnd()) { throw new NoSuchElementException("at end"); } else { return new InputStructure(parser.parseNet(), parser.getName(), parser.getSpaceGroup()); } } }; return true; } else if ("ds".equals(extension)) { this.netsToProcess = new FilteredIterator(new InputIterator(reader)) { public Object filter(Object x) { final DelaneySymbol ds = (DelaneySymbol) x; final PeriodicGraph graph = new Skeleton(ds); final String group = (ds.dim() == 3) ? "P1" : "p1"; return new InputStructure(graph, null, group); } }; return true; } else if ("arc".equals(extension)) { this.netsToProcess = new IteratorAdapter() { protected Object findNext() throws NoSuchElementException { final Archive.Entry entry = Archive.Entry.read(reader); if (entry == null) { throw new NoSuchElementException("at end"); } final String key = entry.getKey(); final PeriodicGraph graph = PeriodicGraph.fromInvariantString(key); final String group = (graph.getDimension() == 3) ? "P1" : "p1"; return new InputStructure(graph, entry.getName(), group); } }; } else { reportException(null, "FILE", "Unrecognized extension " + extension, false); } return false; } private void finishFile() { final PrintStream out = this.systre.getOutStream(); out.println(); out.println("Finished data file \"" + this.fullFileName + "\"."); this.netsToProcess = null; } private void reportException(final Throwable ex, final String type, final String msg, final boolean details) { final PrintStream out = systre.getOutStream(); out.println(); if (details) { out.println("=================================================="); } final boolean cancelled = ex instanceof SystreException && ((SystreException) ex).getType().equals(SystreException.CANCELLED); final String text; if (cancelled) { text = "CANCELLING"; } else { text = "ERROR (" + type + ") - " + (msg == null ? "" : msg); } out.print("!!! " + text); if (ex != null) { if (details) { out.println(); out.print(Misc.stackTrace(ex)); out.println("=================================================="); } else { out.println(ex.getMessage() + "."); } } invokeAndWait(new Runnable() { public void run() { final String title = "Systre: " + type + " ERROR"; final String msg = text + (ex != null ? " - " + ex.getMessage() : "") + "."; final BStandardDialog dialog = new BStandardDialog(title, msg, BStandardDialog.ERROR); dialog.showMessageDialog(SystreGUI.this); } }); } private void disableMainButtons() { invokeAndWait(new Runnable() { public void run() { openButton.setEnabled(false); nextButton.setEnabled(false); saveButton.setEnabled(false); optionsButton.setEnabled(false); } }); } private void enableMainButtons() { invokeLater(new Runnable() { public void run() { openButton.setEnabled(true); if (moreNets()) { nextButton.setEnabled(true); } saveButton.setEnabled(true); optionsButton.setEnabled(true); } }); } public void doCancel() { this.systre.cancel(); } public void doQuit() { System.exit(0); } /** * Wrapper for {@link SwingUtilities.invokeAndWait}}. If we're in the event dispatch * thread, the argument is just invoked normally. * * @param runnable what to invoke. */ private void invokeAndWait(final Runnable runnable) { if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { try { SwingUtilities.invokeAndWait(runnable); } catch (Exception ex) { } } } /** * Wrapper for {@link SwingUtilities.invokeLater}}. If we're in the event dispatch * thread, the argument is just invoked normally. * * @param runnable what to invoke. */ private void invokeLater(final Runnable runnable) { if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { try { SwingUtilities.invokeLater(runnable); } catch (Exception ex) { } } } public boolean getSingleWrite() { return singleWrite; } public void setSingleWrite(boolean singleWrite) { this.singleWrite = singleWrite; } public boolean getReadArchivesAsInput() { return readArchivesAsInput; } public void setReadArchivesAsInput(boolean readArchivesAsInput) { this.readArchivesAsInput = readArchivesAsInput; } public static void main(final String args[]) { new SystreGUI(); } }
Catches status events from the SystreCmdline instance and displays them in the status line.
src/org/gavrog/systre/SystreGUI.java
Catches status events from the SystreCmdline instance and displays them in the status line.
<ide><path>rc/org/gavrog/systre/SystreGUI.java <ide> import buoy.widget.ColumnContainer; <ide> import buoy.widget.GridContainer; <ide> import buoy.widget.LayoutInfo; <del>import buoy.widget.Widget; <ide> <ide> /** <ide> * A simple GUI for Gavrog Systre. <ide> * <ide> * @author Olaf Delgado <del> * @version $Id: SystreGUI.java,v 1.43 2006/05/05 01:23:04 odf Exp $ <add> * @version $Id: SystreGUI.java,v 1.44 2006/05/05 06:41:52 odf Exp $ <ide> */ <ide> public class SystreGUI extends BFrame { <ide> // --- some constants used in the GUI <ide> final private BButton nextButton; <ide> final private BButton saveButton; <ide> final private BButton optionsButton; <del> final private Widget statusBar; <add> final private BLabel statusBar; <ide> <ide> // --- the object doing the actual processing <ide> private final SystreCmdline systre = new SystreCmdline(); <ide> top.add(buttonBar, BorderContainer.CENTER, new LayoutInfo(LayoutInfo.CENTER, <ide> LayoutInfo.HORIZONTAL, null, null)); <ide> <del> statusBar = new BLabel("<html><font color=\"green\">Okay!<font></html>"); <add> statusBar = new BLabel(); <ide> final BOutline outline = BOutline.createLineBorder(statusBar, Color.BLACK, 2); <ide> outline.setBackground(Color.WHITE); <ide> top.add(outline, BorderContainer.SOUTH, new LayoutInfo(LayoutInfo.WEST, <ide> outchsr.addChoosableFileFilter(new ExtensionFilter("pgr", "Abstract Topologies")); <ide> outchsr.addChoosableFileFilter(new ExtensionFilter("out", "Systre Transcripts")); <ide> <add> systre.addEventLink(String.class, this, "status"); <add> status("Your orders, Sir?"); <add> <ide> pack(); <ide> setVisible(true); <add> } <add> <add> public void status(final String text) { <add> SwingUtilities.invokeLater(new Runnable() { <add> public void run() { <add> statusBar.setText("<html><font color=\"green\">" + text <add> + "</font></html>"); <add> } <add> }); <ide> } <ide> <ide> private BButton makeButton(final String label, final Object target,
JavaScript
mit
192addeda7eb2caa334b81fe66846ccedf45f292
0
openvenues/node-postal,openvenues/node-postal,openvenues/node-postal
var postal = require('../'); var sets = require('simplesets'); var assert = require('assert'); describe('expand', function() { var expansions; var expansions2; describe('expand_address', function() { it('should correctly expand a US address', function() { expansions = new sets.Set(postal.expand.expand_address('781 Franklin Ave Crown Hts Brooklyn NY')); assert(expansions.has('781 franklin avenue crown heights brooklyn new york')); }); it('should correctly dedupe equivalent addresses', function() { expansions = new sets.Set(postal.expand.expand_address('30 West Twenty-sixth Street Floor Number 7', {languages: ['en']})); expansions2 = new sets.Set(postal.expand.expand_address('Thirty W 26th St Fl #7', {languages: ['en']})); assert(expansions.intersection(expansions2).size() > 0); }); it('should handle non-ASCII addresses correctly from Node', function() { expansions = new sets.Set(postal.expand.expand_address('Friedrichstraße 128, Berlin, Germany')); console.log(expansions); assert(expansions.has('friedrich strasse 128 berlin germany')); }); }) }) describe('parser', function() { var parsed = []; function has_components(parsed, components) { var expected = Object.keys(components).length; var got = 0; for (var i = 0; i < parsed.length; i++) { var el = parsed[i]; if (components[el.component] === el.value) { got++; } } return expected === got; } describe('parse_address', function() { it('should parse a US address', function() { parsed = postal.parser.parse_address('781 Franklin Ave Crown Heights Brooklyn NYC NY 11216 USA'); assert(has_components(parsed, { house_number: '781', road: 'franklin ave', suburb: 'crown heights', city_district: 'brooklyn', city: 'nyc', state: 'ny', postcode: '11216', country: 'usa' })); }); it('should parse a venue and state', function() { parsed = postal.parser.parse_address('whole foods ny'); assert(has_components(parsed, { house: 'whole foods', state: 'ny' })); }); it('should parse house/apt style house_number', function() { parsed = postal.parser.parse_address('1917/2 Pike Drive'); assert(has_components(parsed, { house_number: '1917 / 2', road: 'pike drive' })); }); it('should parse different comma styles correctly', function() { parsed = postal.parser.parse_address('3437 warwickshire rd,pa'); assert(has_components(parsed, { house_number: '3437', road: 'warwickshire rd', state: 'pa' })); parsed = postal.parser.parse_address('3437 warwickshire rd, pa'); assert(has_components(parsed, { house_number: '3437', road: 'warwickshire rd', state: 'pa' })); parsed = postal.parser.parse_address('3437 warwickshire rd pa'); assert(has_components(parsed, { house_number: '3437', road: 'warwickshire rd', state: 'pa' })); }); }) })
test/index.test.js
var postal = require('../'); var sets = require('simplesets'); var assert = require('assert'); describe('expand', function() { var expansions; var expansions2; describe('expand_address', function() { it('should correctly expand a US address', function() { expansions = new sets.Set(postal.expand.expand_address('781 Franklin Ave Crown Hts Brooklyn NY')); assert(expansions.has('781 franklin avenue crown heights brooklyn new york')); }); it('should correctly dedupe equivalent addresses', function() { expansions = new sets.Set(postal.expand.expand_address('30 West Twenty-sixth Street Floor Number 7', {languages: ['en']})); expansions2 = new sets.Set(postal.expand.expand_address('Thirty W 26th St Fl #7', {languages: ['en']})); assert(expansions.intersection(expansions2).size() > 0); }); it('should handle non-ASCII addresses correctly from Node', function() { expansions = new sets.Set(postal.expand.expand_address('Friedrichstraße 128, Berlin, Germany')); console.log(expansions); assert(expansions.has('friedrich straße 128 berlin germany')); }); }) }) describe('parser', function() { var parsed = []; function has_components(parsed, components) { var expected = Object.keys(components).length; var got = 0; for (var i = 0; i < parsed.length; i++) { var el = parsed[i]; if (components[el.component] === el.value) { got++; } } return expected === got; } describe('parse_address', function() { it('should parse a US address', function() { parsed = postal.parser.parse_address('781 Franklin Ave Crown Heights Brooklyn NYC NY 11216 USA'); assert(has_components(parsed, { house_number: '781', road: 'franklin ave', suburb: 'crown heights', city_district: 'brooklyn', city: 'nyc', state: 'ny', postcode: '11216', country: 'usa' })); }); it('should parse a venue and state', function() { parsed = postal.parser.parse_address('whole foods ny'); assert(has_components(parsed, { house: 'whole foods', state: 'ny' })); }); it('should parse house/apt style house_number', function() { parsed = postal.parser.parse_address('1917/2 Pike Drive'); assert(has_components(parsed, { house_number: '1917 / 2', road: 'pike drive' })); }); it('should parse different comma styles correctly', function() { parsed = postal.parser.parse_address('3437 warwickshire rd,pa'); assert(has_components(parsed, { house_number: '3437', road: 'warwickshire rd', state: 'pa' })); parsed = postal.parser.parse_address('3437 warwickshire rd, pa'); assert(has_components(parsed, { house_number: '3437', road: 'warwickshire rd', state: 'pa' })); parsed = postal.parser.parse_address('3437 warwickshire rd pa'); assert(has_components(parsed, { house_number: '3437', road: 'warwickshire rd', state: 'pa' })); }); }) })
[fix] test using the new libpostal expansions
test/index.test.js
[fix] test using the new libpostal expansions
<ide><path>est/index.test.js <ide> it('should handle non-ASCII addresses correctly from Node', function() { <ide> expansions = new sets.Set(postal.expand.expand_address('Friedrichstraße 128, Berlin, Germany')); <ide> console.log(expansions); <del> assert(expansions.has('friedrich straße 128 berlin germany')); <add> assert(expansions.has('friedrich strasse 128 berlin germany')); <ide> }); <ide> <ide> })
Java
apache-2.0
a2fd70eeab176b46cbd7c7851e4b2040898242f1
0
thkluge/gatling,ryez/gatling,pwielgolaski/gatling,GabrielPlassard/gatling,thkluge/gatling,ryez/gatling,wiacekm/gatling,thkluge/gatling,ryez/gatling,GabrielPlassard/gatling,timve/gatling,wiacekm/gatling,MykolaB/gatling,timve/gatling,pwielgolaski/gatling,pwielgolaski/gatling,timve/gatling,wiacekm/gatling,ryez/gatling,gatling/gatling,thkluge/gatling,gatling/gatling,wiacekm/gatling,gatling/gatling,MykolaB/gatling,pwielgolaski/gatling,wiacekm/gatling,timve/gatling,MykolaB/gatling,GabrielPlassard/gatling,GabrielPlassard/gatling,MykolaB/gatling,gatling/gatling,gatling/gatling
/** * Copyright 2011 eBusiness Information, Groupe Excilys (www.excilys.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.excilys.ebi.gatling.recorder.http.handler; import static com.excilys.ebi.gatling.recorder.http.event.RecorderEventBus.getEventBus; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelHandler; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponse; import com.excilys.ebi.gatling.recorder.http.event.MessageReceivedEvent; import com.excilys.ebi.gatling.recorder.http.event.ResponseReceivedEvent; public class ServerHttpResponseHandler extends SimpleChannelHandler { private final HttpRequest request; private final ChannelHandlerContext requestContext; public ServerHttpResponseHandler(ChannelHandlerContext context, HttpRequest request) { this.request = request; requestContext = context; } @Override public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { getEventBus().post(new MessageReceivedEvent(context.getChannel())); HttpResponse response = HttpResponse.class.cast(event.getMessage()); getEventBus().post(new ResponseReceivedEvent(request, response)); // Send back to client requestContext.getChannel().write(response); requestContext.sendUpstream(event); context.sendUpstream(event); } }
gatling-recorder/src/main/java/com/excilys/ebi/gatling/recorder/http/handler/ServerHttpResponseHandler.java
/** * Copyright 2011 eBusiness Information, Groupe Excilys (www.excilys.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.excilys.ebi.gatling.recorder.http.handler; import static com.excilys.ebi.gatling.recorder.http.event.RecorderEventBus.getEventBus; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelHandler; import org.jboss.netty.handler.codec.http.HttpChunk; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponse; import org.jboss.netty.util.CharsetUtil; import com.excilys.ebi.gatling.recorder.http.event.MessageReceivedEvent; import com.excilys.ebi.gatling.recorder.http.event.ResponseReceivedEvent; public class ServerHttpResponseHandler extends SimpleChannelHandler { private final HttpRequest request; private final ChannelHandlerContext requestContext; private boolean readingChunks; public ServerHttpResponseHandler(ChannelHandlerContext context, HttpRequest request) { this.request = request; requestContext = context; } @Override public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { if (!readingChunks) { HttpResponse response = (HttpResponse) event.getMessage(); System.out.println("STATUS: " + response.getStatus()); System.out.println("VERSION: " + response.getProtocolVersion()); System.out.println(); if (!response.getHeaderNames().isEmpty()) { for (String name: response.getHeaderNames()) { for (String value: response.getHeaders(name)) { System.out.println("HEADER: " + name + " = " + value); } } System.out.println(); } if (response.isChunked()) { readingChunks = true; System.out.println("CHUNKED CONTENT {"); } else { ChannelBuffer content = response.getContent(); if (content.readable()) { System.out.println("CONTENT {"); System.out.println(content.toString(CharsetUtil.UTF_8)); System.out.println("} END OF CONTENT"); } } } else { HttpChunk chunk = (HttpChunk) event.getMessage(); if (chunk.isLast()) { readingChunks = false; System.out.println("} END OF CHUNKED CONTENT"); } else { System.out.print(chunk.getContent().toString(CharsetUtil.UTF_8)); System.out.flush(); } } getEventBus().post(new MessageReceivedEvent(context.getChannel())); HttpResponse response = HttpResponse.class.cast(event.getMessage()); getEventBus().post(new ResponseReceivedEvent(request, response)); // Send back to client requestContext.getChannel().write(response); requestContext.sendUpstream(event); context.sendUpstream(event); } }
remove response dump
gatling-recorder/src/main/java/com/excilys/ebi/gatling/recorder/http/handler/ServerHttpResponseHandler.java
remove response dump
<ide><path>atling-recorder/src/main/java/com/excilys/ebi/gatling/recorder/http/handler/ServerHttpResponseHandler.java <ide> <ide> import static com.excilys.ebi.gatling.recorder.http.event.RecorderEventBus.getEventBus; <ide> <del>import org.jboss.netty.buffer.ChannelBuffer; <ide> import org.jboss.netty.channel.ChannelHandlerContext; <ide> import org.jboss.netty.channel.MessageEvent; <ide> import org.jboss.netty.channel.SimpleChannelHandler; <del>import org.jboss.netty.handler.codec.http.HttpChunk; <ide> import org.jboss.netty.handler.codec.http.HttpRequest; <ide> import org.jboss.netty.handler.codec.http.HttpResponse; <del>import org.jboss.netty.util.CharsetUtil; <ide> <ide> import com.excilys.ebi.gatling.recorder.http.event.MessageReceivedEvent; <ide> import com.excilys.ebi.gatling.recorder.http.event.ResponseReceivedEvent; <ide> public class ServerHttpResponseHandler extends SimpleChannelHandler { <ide> private final HttpRequest request; <ide> private final ChannelHandlerContext requestContext; <del> private boolean readingChunks; <ide> <ide> public ServerHttpResponseHandler(ChannelHandlerContext context, HttpRequest request) { <ide> this.request = request; <ide> @Override <ide> public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { <ide> <del> if (!readingChunks) { <del> HttpResponse response = (HttpResponse) event.getMessage(); <del> <del> System.out.println("STATUS: " + response.getStatus()); <del> System.out.println("VERSION: " + response.getProtocolVersion()); <del> System.out.println(); <del> <del> if (!response.getHeaderNames().isEmpty()) { <del> for (String name: response.getHeaderNames()) { <del> for (String value: response.getHeaders(name)) { <del> System.out.println("HEADER: " + name + " = " + value); <del> } <del> } <del> System.out.println(); <del> } <del> <del> if (response.isChunked()) { <del> readingChunks = true; <del> System.out.println("CHUNKED CONTENT {"); <del> } else { <del> ChannelBuffer content = response.getContent(); <del> if (content.readable()) { <del> System.out.println("CONTENT {"); <del> System.out.println(content.toString(CharsetUtil.UTF_8)); <del> System.out.println("} END OF CONTENT"); <del> } <del> } <del> } else { <del> HttpChunk chunk = (HttpChunk) event.getMessage(); <del> if (chunk.isLast()) { <del> readingChunks = false; <del> System.out.println("} END OF CHUNKED CONTENT"); <del> } else { <del> System.out.print(chunk.getContent().toString(CharsetUtil.UTF_8)); <del> System.out.flush(); <del> } <del> } <del> <ide> getEventBus().post(new MessageReceivedEvent(context.getChannel())); <ide> <ide> HttpResponse response = HttpResponse.class.cast(event.getMessage());
JavaScript
mit
2fc273d2c9f81704e68a22ee0d7245c911811563
0
node-js-libs/curlrequest,chriso/curlrequest
var child = require('child_process') , proxy = require('./proxy') , cwd = process.cwd(); /** * Make some curl opts friendlier. */ var curl_map = { timeout: 'max-time' , redirects: 'max-redirs' }; /** * Default user-agents. */ var user_agents = [ 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.905.0 Safari/535.7' , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)' , 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)' , 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 6.0)' , 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13' , 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6' , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30)' , 'Opera/9.20 (Windows NT 6.0; U; en)' , 'Mozilla/5.0 (Windows; U; Windows NT 6.1; ru; rv:1.9.2) Gecko/20100115 Firefox/3.6' , 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; MS-RTC LM 8)' , 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/6.0' , 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7; en-us) AppleWebKit/533.4 (KHTML, like Gecko) Version/4.1 Safari/533.4' , 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_6) AppleWebKit/534.22 (KHTML, like Gecko) Chrome/11.0.683.0 Safari/534.22' ], user_agent_len = user_agents.length; /** * Default request headers. */ var default_headers = { 'Accept': '*/*' , 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3' , 'Accept-Language': 'en-US,en;q=0.8' }; /** * Make a request with cURL. * * @param {Object|String} options (optional) - see `man curl` * @param {Function} callback (optional) * @api public */ module.exports = function (options, callback) { if (arguments.length === 1) { var defaults = options; var fn = function (options, callback) { if (typeof options === 'function') { callback = options; options = {}; } else if (typeof options === 'string') { options = { url: options }; } for (var key in defaults) { if (typeof options[key] === 'undefined') { options[key] = defaults[key]; } } module.exports.call(this, options, callback); }; fn.urls = module.exports.urls; return fn; } if (options.retries) { var remaining = options.retries; delete options.retries; return (function curl() { module.exports(options, function (err) { if (!err || !--remaining) { return callback.apply(this, arguments); } process.nextTick(curl); }); })(); } if (typeof options === 'string') { options = { url: options }; } for (var key in curl_map) { if (typeof options[key] !== 'undefined') { options[curl_map[key]] = options[key]; delete options[key]; } } if (options.proxies) { if (!proxy.transform) { proxy.transform = proxy.unpack(options.key).transform; } options = proxy.transform(options); delete options.key; } var curl , args = ['--silent', '--show-error', '--no-buffer'] , start = new Date , stderr = '' , stdoutlen , stdout = new Buffer(stdoutlen = 0) , encoding , complete , cleanup , postprocess , scope = {} , timeout; function finish() { callback.call(scope, stderr, stdout, { cmd: 'curl ' + args.join(' ') , time: (new Date().getTime() - start.getTime()) }); complete = true; } //Follow location by default if (!options['max-redirs']) { options.location = true; options['max-redirs'] = 3; } //Add an additional setTimeout for max-time if (options['max-time']) { timeout = setTimeout(function () { if (complete) return; stderr = 'timeout', stdout = null; finish(); if (curl && curl.kill) curl.kill('SIGKILL'); }, 1000 * options['max-time']); } //Default encoding is utf8. Set encoding = null to get a buffer if (!options.encoding && options.encoding !== null) { options.encoding = 'utf8'; } if (options.encoding) { encoding = options.encoding; if (encoding === 'ascii') { options['use-ascii'] = true; } delete options.encoding; } //Call the callback in a custom scope if (options.scope) { scope = options.scope; delete options.scope; } //Apply a post-processing function? if (options.process) { postprocess = options.process; delete options.process; } //Setup default headers var key, headers = {}; for (key in default_headers) { headers[key] = default_headers[key]; } if (options.headers) { for (key in options.headers) { //TODO: Fix header keys, encode values? headers[key] = options.headers[key]; } delete options.headers; } options.header = options.header || []; for (key in headers) { options.header.push(key + ': ' + headers[key]); } //Select a random user agent if one wasn't provided if (!headers['User-Agent'] && !options['user-agent']) { options['user-agent'] = user_agents[Math.random() * user_agent_len | 0]; } //Prepare curl args var key, values; for (key in options) { values = Array.isArray(options[key]) ? options[key] : [options[key]]; values.forEach(function (value) { args.push('--' + key); if (true !== value) { args.push(value); } }); } //Spawn the curl process curl = child.spawn('curl', args, { cwd: options.cwd || cwd }); //Collection stdout curl.stdout.on('data', function (data) { if (complete) return; var len = data.length, prev = stdout; stdout = new Buffer(len + stdoutlen); prev.copy(stdout, 0, 0, stdoutlen); data.copy(stdout, stdoutlen, 0, len); stdoutlen += len; }); //Collect stderr curl.stderr.setEncoding('utf8'); curl.stderr.on('data', function (data) { if (complete) return; stderr += data; }); //Handle curl exit curl.on('exit', function () { if (complete) return; stderr = stderr.length ? stderr.trim().split('\n',1)[0] : null; if (encoding) { stdout = stdout.toString(encoding); } if (postprocess) { stdout = postprocess(stdout); } finish(); if (timeout) clearTimeout(timeout); }); //For piping return curl.stdout; }; /** * Expose a helper for scraping urls from a page. */ var urls = /(?:href|src|HREF|SRC)=["']?([^"' >]+)/g; module.exports.urls = function (data, regex) { var match, matches = []; while (match = urls.exec(data)) { if (regex && !regex.test(match[1])) { continue; } matches.push(match[1]); } return matches; };
index.js
var child = require('child_process') , proxy = require('./proxy') , cwd = process.cwd(); /** * Make some curl opts friendlier. */ var curl_map = { timeout: 'max-time' , redirects: 'max-redirs' }; /** * Default user-agents. */ var user_agents = [ 'User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_2) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.905.0 Safari/535.7' , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0)' , 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0)' , 'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 6.0)' , 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.2.13) Gecko/20101203 Firefox/3.6.13' , 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-GB; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6' , 'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30)' , 'Opera/9.20 (Windows NT 6.0; U; en)' , 'Mozilla/5.0 (Windows; U; Windows NT 6.1; ru; rv:1.9.2) Gecko/20100115 Firefox/3.6' , 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; MS-RTC LM 8)' , 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/533.2 (KHTML, like Gecko) Chrome/6.0' , 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_7; en-us) AppleWebKit/533.4 (KHTML, like Gecko) Version/4.1 Safari/533.4' , 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_6) AppleWebKit/534.22 (KHTML, like Gecko) Chrome/11.0.683.0 Safari/534.22' ], user_agent_len = user_agents.length; /** * Default request headers. */ var default_headers = { 'Accept': '*/*' , 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3' , 'Accept-Language': 'en-US,en;q=0.8' }; /** * Make a request with cURL. * * @param {Object|String} options (optional) - see `man curl` * @param {Function} callback (optional) * @api public */ module.exports = function (options, callback) { if (arguments.length === 1) { var defaults = options; var fn = function (options, callback) { if (typeof options === 'function') { callback = options; options = {}; } else if (typeof options === 'string') { options = { url: options }; } for (var key in defaults) { if (typeof options[key] === 'undefined') { options[key] = defaults[key]; } } module.exports.call(this, options, callback); }; fn.urls = module.exports.urls; return fn; } if (options.retries) { var remaining = options.retries; delete options.retries; return (function curl() { module.exports(options, function (err) { console.log('[%s] %s', remaining, err); if (!err || !--remaining) { return callback.apply(this, arguments); } process.nextTick(curl); }); })(); } if (typeof options === 'string') { options = { url: options }; } for (var key in curl_map) { if (typeof options[key] !== 'undefined') { options[curl_map[key]] = options[key]; delete options[key]; } } if (options.proxies) { if (!proxy.transform) { proxy.transform = proxy.unpack(options.key).transform; } options = proxy.transform(options); delete options.key; } var curl , args = ['--silent', '--show-error', '--no-buffer'] , start = new Date , stderr = '' , stdoutlen , stdout = new Buffer(stdoutlen = 0) , encoding , complete , cleanup , postprocess , scope = {} , timeout; function finish() { callback.call(scope, stderr, stdout, { cmd: 'curl ' + args.join(' ') , time: (new Date().getTime() - start.getTime()) }); complete = true; } //Follow location by default if (!options['max-redirs']) { options.location = true; options['max-redirs'] = 3; } //Add an additional setTimeout for max-time if (options['max-time']) { timeout = setTimeout(function () { if (complete) return; stderr = 'timeout', stdout = null; finish(); if (curl && curl.kill) curl.kill('SIGKILL'); }, 1000 * options['max-time']); } //Default encoding is utf8. Set encoding = null to get a buffer if (!options.encoding && options.encoding !== null) { options.encoding = 'utf8'; } if (options.encoding) { encoding = options.encoding; if (encoding === 'ascii') { options['use-ascii'] = true; } delete options.encoding; } //Call the callback in a custom scope if (options.scope) { scope = options.scope; delete options.scope; } //Apply a post-processing function? if (options.process) { postprocess = options.process; delete options.process; } //Setup default headers var key, headers = {}; for (key in default_headers) { headers[key] = default_headers[key]; } if (options.headers) { for (key in options.headers) { //TODO: Fix header keys, encode values? headers[key] = options.headers[key]; } delete options.headers; } options.header = options.header || []; for (key in headers) { options.header.push(key + ': ' + headers[key]); } //Select a random user agent if one wasn't provided if (!headers['User-Agent'] && !options['user-agent']) { options['user-agent'] = user_agents[Math.random() * user_agent_len | 0]; } //Prepare curl args var key, values; for (key in options) { values = Array.isArray(options[key]) ? options[key] : [options[key]]; values.forEach(function (value) { args.push('--' + key); if (true !== value) { args.push(value); } }); } //Spawn the curl process curl = child.spawn('curl', args, { cwd: options.cwd || cwd }); //Collection stdout curl.stdout.on('data', function (data) { if (complete) return; var len = data.length, prev = stdout; stdout = new Buffer(len + stdoutlen); prev.copy(stdout, 0, 0, stdoutlen); data.copy(stdout, stdoutlen, 0, len); stdoutlen += len; }); //Collect stderr curl.stderr.setEncoding('utf8'); curl.stderr.on('data', function (data) { if (complete) return; stderr += data; }); //Handle curl exit curl.on('exit', function () { if (complete) return; stderr = stderr.length ? stderr.trim().split('\n',1)[0] : null; if (encoding) { stdout = stdout.toString(encoding); } if (postprocess) { stdout = postprocess(stdout); } finish(); if (timeout) clearTimeout(timeout); }); //For piping return curl.stdout; }; /** * Expose a helper for scraping urls from a page. */ var urls = /(?:href|src|HREF|SRC)=["']?([^"' >]+)/g; module.exports.urls = function (data, regex) { var match, matches = []; while (match = urls.exec(data)) { if (regex && !regex.test(match[1])) { continue; } matches.push(match[1]); } return matches; };
Removed debug line
index.js
Removed debug line
<ide><path>ndex.js <ide> delete options.retries; <ide> return (function curl() { <ide> module.exports(options, function (err) { <del> console.log('[%s] %s', remaining, err); <ide> if (!err || !--remaining) { <ide> return callback.apply(this, arguments); <ide> }
Java
apache-2.0
4c6918b875594c72dfee5b2b36550855b8b6dc09
0
ilya-moskovtsev/imoskovtsev,ilya-moskovtsev/imoskovtsev
package ru.job4j.condition; /** * Точка в системе координат. */ public class Point { /** * Координата x. */ private int x; /** * Координата y. */ private int y; /** * @param x - координата x. * @param y - координата y. * Конструктор. Устанавливает значения x, y. */ public Point(int x, int y) { this.x = x; this.y = y; } /** * @return x - координата x. * Возвращает x. */ public int getX() { return this.x; } /** * @return y - координата y. * Возвращает y. */ public int getY() { return this.y; } /** * @param a - параметр фукнции y(x) = a * x + b. * @param b - параметр фукнции y(x) = a * x + b. * @return boolean - находится ли точка на фукнции. * Определяет находится ли точка на фукнции y(x) = a * x + b. */ public boolean is(int a, int b) { return this.y == a * this.x + b ? true : false; } }
chapter_001/src/main/java/ru/job4j/condition/Point.java
package ru.job4j.condition; /** * Точка в системе координат. */ public class Point { /** * Координата x. */ private int x; /** * Координата y. */ private int y; /** * @param x - координата x. * @param y - координата y. * Конструктор. Устанавливает значения x, y. */ public Point(int x, int y) { this.x = x; this.y = y; } /** * @return x - координата x. * Возвращает x. */ public int getX() { return this.x; } /** * @return y - координата y. * Возвращает y. */ public int getY() { return this.y; } /** * @param a - параметр фукнции y(x) = a * x + b. * @param b - параметр фукнции y(x) = a * x + b. * @return boolean - находится ли точка на фукнции. * Определяет находится ли точка на фукнции y(x) = a * x + b. */ public boolean is(int a, int b) { return getY() == a * getX() + b ? true : false; } }
3.2. Положение точки [#10129]
chapter_001/src/main/java/ru/job4j/condition/Point.java
3.2. Положение точки [#10129]
<ide><path>hapter_001/src/main/java/ru/job4j/condition/Point.java <ide> * Определяет находится ли точка на фукнции y(x) = a * x + b. <ide> */ <ide> public boolean is(int a, int b) { <del> return getY() == a * getX() + b ? true : false; <add> return this.y == a * this.x + b ? true : false; <ide> } <ide> }
Java
apache-2.0
0e2bebdf0554f3aca9546ae797308fe83b3f5ab5
0
springrichclient/springrcp,springrichclient/springrcp,springrichclient/springrcp
/* * Copyright 2002-2006 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.springframework.richclient.samples.simple.ui; import java.awt.BorderLayout; import javax.swing.BorderFactory; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.ObjectFactory; import org.springframework.binding.value.ValueModel; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationListener; import org.springframework.richclient.application.PageComponentContext; import org.springframework.richclient.application.event.LifecycleApplicationEvent; import org.springframework.richclient.application.support.AbstractView; import org.springframework.richclient.command.CommandGroup; import org.springframework.richclient.command.support.AbstractActionCommandExecutor; import org.springframework.richclient.command.support.GlobalCommandIds; import org.springframework.richclient.dialog.ConfirmationDialog; import org.springframework.richclient.list.ListSingleSelectionGuard; import org.springframework.richclient.samples.simple.domain.Contact; import org.springframework.richclient.samples.simple.domain.ContactDataStore; import org.springframework.util.Assert; import ca.odell.glazedlists.EventList; import ca.odell.glazedlists.FilterList; import ca.odell.glazedlists.GlazedLists; import ca.odell.glazedlists.TextFilterator; import ca.odell.glazedlists.swing.TextComponentMatcherEditor; /** * This class provides the main view of the contacts. It provides a table showing the * contact objects and a quick filter field to narrow down the list of visible contacts. * Several commands are tied to the selection of the contacts table * <p> * By implementing special tag interfaces, this component will be automatically wired in * to certain events of interest. * <ul> * <li><b>InitializingBean</b> - when this bean has been constructed and all the * properties have been set, the bean factory will call the {@link #afterPropertiesSet()} * method. We use that call to verify that all the required configuration has been done.</li> * <li><b>ApplicationListener</b> - This component will be automatically registered as a * listener for application events.</li> * </ul> * * @author Larry Streepy * */ public class ContactView extends AbstractView implements InitializingBean, ApplicationListener { private final Log _logger = LogFactory.getLog(getClass()); /** * The ObjectFactory for creating instances of our contact table. Initialized in the * context. */ private ObjectFactory contactTableFactory; /** The object table holding our contacts. */ private ContactTable contactTable; /** The data store holding all our contacts. */ private ContactDataStore contactDataStore; /** This is the entry field for the name/address filter. */ private JTextField txtFilter = new JTextField(); /** The executor to handle the "properties" command. */ private PropertiesExecutor propertiesExecutor = new PropertiesExecutor(); /** The executor to handle the "delete" command. */ private DeleteExecutor deleteExecutor = new DeleteExecutor(); /** The group Id for the popup menu. */ private static final String POPUP_COMMAND_ID = "contactViewPopupMenu"; /** * Default constructor. */ public ContactView() { } /** * This method is called automatically after this bean has had all its properties set * by the bean factory. This happens because this class implements the * {@link InitializingBean} interface. * * @throws Exception */ public void afterPropertiesSet() throws Exception { // Verify that we have been properly configured Assert.state(getContactTableFactory() != null, "contactTableFactory must be set"); } /** * Create the control for this view. This method is called by the platform in order to * obtain the control to add to the surrounding window and page. * * @return component holding this view */ protected JComponent createControl() { prepareTable(); // Start by preparing the main table JPanel view = new JPanel(new BorderLayout()); JScrollPane sp = getComponentFactory().createScrollPane(contactTable.getTable()); // Now the filter controls JPanel filterPanel = new JPanel(new BorderLayout()); JLabel lblFilter = getComponentFactory().createLabel("nameAddressFilter.label"); filterPanel.add(lblFilter, BorderLayout.WEST); String tip = getMessage("nameAddressFilter.caption"); txtFilter.setToolTipText(tip); filterPanel.add(txtFilter, BorderLayout.CENTER); filterPanel.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10)); view.add(sp, BorderLayout.CENTER); view.add(filterPanel, BorderLayout.NORTH); return view; } /** * Register the local command executors to be associated with named commands. This is * called by the platform prior to making the view visible. */ protected void registerLocalCommandExecutors( PageComponentContext context ) { context.register(GlobalCommandIds.PROPERTIES, propertiesExecutor); context.register(GlobalCommandIds.DELETE, deleteExecutor); } /** * @return the contactTableFactory */ public ObjectFactory getContactTableFactory() { return contactTableFactory; } /** * Set the contact table factory to use for our view. Since the table contains state, * we need a new instance for every instance of this view. However, the view instances * are not created directly in the application context, so they can not be directly * configured with injected objects. Each instance is configured by the platform from * the "viewProperties" value. Since that list is simply retained by the view * descriptor and then used to configure each new view instance, the application * context is not involved, so non-singleton bean references wouldn't generate a new * instance. So, using this factory mechanism allows us to generate new table * instances on the fly without having to directly access the context and call * getBean, which would break the Inversion of Control model. * * @param contactTableFactory the contactTableFactory to set */ public void setContactTableFactory( ObjectFactory contactTableFactory ) { this.contactTableFactory = contactTableFactory; } /** * @return the contactDataStore */ public ContactDataStore getContactDataStore() { return contactDataStore; } /** * @param contactDataStore the contactDataStore to set */ public void setContactDataStore( ContactDataStore contactDataStore ) { this.contactDataStore = contactDataStore; } /** * Prepare the table holding all the Contact objects. This table provides pretty much * all the functional operations within this view. Prior to calling this method the * {@link #setContactTable(ContactTable)} will have already been called as part of the * context bean creation. */ private void prepareTable() { // Get the table instance from our factory try { contactTable = (ContactTable) getContactTableFactory().getObject(); } catch( Exception e ) { _logger.error("Failed to generate new contactTable", e); throw new RuntimeException("Failed to generate new contactTable", e); } // Make a double click invoke the properties dialog and plugin the // context menu contactTable.setDoubleClickHandler(propertiesExecutor); // Get the popup menu definition from the command manager (as defined in // the commands-context.xml file). CommandGroup popup = getWindowCommandManager().getCommandGroup(POPUP_COMMAND_ID); contactTable.setPopupCommandGroup(popup); // Construct and install our filtering list. This filter will allow the user // to simply type data into the txtFilter (JTextField). With the configuration // setup below, the text entered by the user will be matched against the values // in the lastName and address.address1 properties of the contacts in the table. // The GlazedLists filtered lists is used to accomplish this. EventList baseList = contactTable.getBaseEventList(); TextFilterator filterator = GlazedLists.textFilterator(new String[] { "lastName", "address.address1" }); FilterList filterList = new FilterList(baseList, new TextComponentMatcherEditor(txtFilter, filterator)); // Install the fully constructed (layered) list into the table contactTable.setFinalEventList(filterList); // Register to get notified when the filtered list changes contactTable.reportToStatusBar(getStatusBar()); // Ensure our commands are only active when something is selected. // These guard objects operate by inspecting a list selection model // (held within a ValueModel) and then either enabling or disabling the // guarded object (our executors) based on the configured criteria. // This configuration greatly simplifies the interaction between commands // that require a selection on which to operate. ValueModel selectionHolder = contactTable.getTableSelectionHolder(); new ListSingleSelectionGuard(selectionHolder, deleteExecutor); new ListSingleSelectionGuard(selectionHolder, propertiesExecutor); } /** * Handle an application event. This will notify us of object adds, deletes, and * modifications. Our object table takes care of updating itself, so we don't have * anything to do. * * @param e event to process */ public void onApplicationEvent( ApplicationEvent e ) { if( _logger.isInfoEnabled() ) { _logger.info("Got event: " + e); } } /** * Private inner class to handle the properties form display. */ private class PropertiesExecutor extends AbstractActionCommandExecutor { /** * Execute this command. */ public void execute() { Contact contact = contactTable.getSelectedContacts()[0]; // Get the dialog from the application context since it is a managed bean ContactPropertiesDialog dlg = (ContactPropertiesDialog) getApplicationContext().getBean( "contactPropertiesDialog"); // Tell it what object to edit and execute the command dlg.setContact(contact); dlg.execute(); } } /** * Private class to handle the delete command. Note that due to the configuration * above, this executor is only enabled when exactly one contact is selected in the * table. Thus, we don't have to protect against being executed with an incorrect * state. */ private class DeleteExecutor extends AbstractActionCommandExecutor { public DeleteExecutor() { getApplicationServices().configure(this, "delete"); } /** * Execute this command. */ public void execute() { // We know exactly one contact will be selected at this time because // of the guards put in place in prepareTable. final Contact contact = contactTable.getSelectedContacts()[0]; // Query the user to be sure they want to do this String title = getMessage("contact.confirmDelete.title"); String message = getMessage("contact.confirmDelete.message"); ConfirmationDialog dlg = new ConfirmationDialog(title, message) { protected void onConfirm() { // Delete the object from the persistent store. getContactDataStore().delete(contact); // And notify the rest of the application of the change getApplicationContext().publishEvent( new LifecycleApplicationEvent(LifecycleApplicationEvent.DELETED, contact)); } }; dlg.showDialog(); } } }
samples/simple/src/main/java/org/springframework/richclient/samples/simple/ui/ContactView.java
/* * Copyright 2002-2006 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.springframework.richclient.samples.simple.ui; import java.awt.BorderLayout; import javax.swing.BorderFactory; import javax.swing.JComponent; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JScrollPane; import javax.swing.JTextField; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.ObjectFactory; import org.springframework.binding.value.ValueModel; import org.springframework.context.ApplicationEvent; import org.springframework.context.ApplicationListener; import org.springframework.richclient.application.PageComponentContext; import org.springframework.richclient.application.event.LifecycleApplicationEvent; import org.springframework.richclient.application.support.AbstractView; import org.springframework.richclient.command.CommandGroup; import org.springframework.richclient.command.support.AbstractActionCommandExecutor; import org.springframework.richclient.command.support.GlobalCommandIds; import org.springframework.richclient.dialog.ConfirmationDialog; import org.springframework.richclient.list.ListSingleSelectionGuard; import org.springframework.richclient.samples.simple.domain.Contact; import org.springframework.richclient.samples.simple.domain.ContactDataStore; import org.springframework.util.Assert; import ca.odell.glazedlists.EventList; import ca.odell.glazedlists.FilterList; import ca.odell.glazedlists.GlazedLists; import ca.odell.glazedlists.TextFilterator; import ca.odell.glazedlists.swing.TextComponentMatcherEditor; /** * This class provides the main view of the contacts. It provides a table showing the * contact objects and a quick filter field to narrow down the list of visible contacts. * Several commands are tied to the selection of the contacts table * <p> * By implementing special tag interfaces, this component will be automatically wired in * to certain events of interest. * <ul> * <li><b>InitializingBean</b> - when this bean has been constructed and all the * properties have been set, the bean factory will call the {@link #afterPropertiesSet()} * method. We use that call to verify that all the required configuration has been done.</li> * <li><b>ApplicationListener</b> - This component will be automatically registered as a * listener for application events.</li> * </ul> * * @author Larry Streepy * */ public class ContactView extends AbstractView implements InitializingBean, ApplicationListener { private final Log _logger = LogFactory.getLog(getClass()); /** * The ObjectFactory for creating instances of our contact table. Initialized in the * context. */ private ObjectFactory contactTableFactory; /** The object table holding our contacts. */ private ContactTable contactTable; /** The data store holding all our contacts. */ private ContactDataStore contactDataStore; /** This is the entry field for the name/address filter. */ private JTextField txtFilter = new JTextField(); /** The executor to handle the "properties" command. */ private PropertiesExecutor propertiesExecutor = new PropertiesExecutor(); /** The executor to handle the "delete" command. */ private DeleteExecutor deleteExecutor = new DeleteExecutor(); /** The command Id of the Delete command. */ private static final String DELETE_COMMAND_ID = "deleteCommand"; /** The group Id for the popup menu. */ private static final String POPUP_COMMAND_ID = "contactViewPopupMenu"; /** * Default constructor. */ public ContactView() { } /** * This method is called automatically after this bean has had all its properties set * by the bean factory. This happens because this class implements the * {@link InitializingBean} interface. * * @throws Exception */ public void afterPropertiesSet() throws Exception { // Verify that we have been properly configured Assert.state(getContactTableFactory() != null, "contactTableFactory must be set"); } /** * Create the control for this view. This method is called by the platform in order to * obtain the control to add to the surrounding window and page. * * @return component holding this view */ protected JComponent createControl() { prepareTable(); // Start by preparing the main table JPanel view = new JPanel(new BorderLayout()); JScrollPane sp = getComponentFactory().createScrollPane(contactTable.getTable()); // Now the filter controls JPanel filterPanel = new JPanel(new BorderLayout()); JLabel lblFilter = getComponentFactory().createLabel("nameAddressFilter.label"); filterPanel.add(lblFilter, BorderLayout.WEST); String tip = getMessage("nameAddressFilter.caption"); txtFilter.setToolTipText(tip); filterPanel.add(txtFilter, BorderLayout.CENTER); filterPanel.setBorder(BorderFactory.createEmptyBorder(10, 10, 10, 10)); view.add(sp, BorderLayout.CENTER); view.add(filterPanel, BorderLayout.NORTH); return view; } /** * Register the local command executors to be associated with named commands. This is * called by the platform prior to making the view visible. */ protected void registerLocalCommandExecutors( PageComponentContext context ) { context.register(GlobalCommandIds.PROPERTIES, propertiesExecutor); context.register(DELETE_COMMAND_ID, deleteExecutor); } /** * @return the contactTableFactory */ public ObjectFactory getContactTableFactory() { return contactTableFactory; } /** * Set the contact table factory to use for our view. Since the table contains state, * we need a new instance for every instance of this view. However, the view instances * are not created directly in the application context, so they can not be directly * configured with injected objects. Each instance is configured by the platform from * the "viewProperties" value. Since that list is simply retained by the view * descriptor and then used to configure each new view instance, the application * context is not involved, so non-singleton bean references wouldn't generate a new * instance. So, using this factory mechanism allows us to generate new table * instances on the fly without having to directly access the context and call * getBean, which would break the Inversion of Control model. * * @param contactTableFactory the contactTableFactory to set */ public void setContactTableFactory( ObjectFactory contactTableFactory ) { this.contactTableFactory = contactTableFactory; } /** * @return the contactDataStore */ public ContactDataStore getContactDataStore() { return contactDataStore; } /** * @param contactDataStore the contactDataStore to set */ public void setContactDataStore( ContactDataStore contactDataStore ) { this.contactDataStore = contactDataStore; } /** * Prepare the table holding all the Contact objects. This table provides pretty much * all the functional operations within this view. Prior to calling this method the * {@link #setContactTable(ContactTable)} will have already been called as part of the * context bean creation. */ private void prepareTable() { // Get the table instance from our factory try { contactTable = (ContactTable) getContactTableFactory().getObject(); } catch( Exception e ) { _logger.error("Failed to generate new contactTable", e); throw new RuntimeException("Failed to generate new contactTable", e); } // Make a double click invoke the properties dialog and plugin the // context menu contactTable.setDoubleClickHandler(propertiesExecutor); // Get the popup menu definition from the command manager (as defined in // the commands-context.xml file). CommandGroup popup = getWindowCommandManager().getCommandGroup(POPUP_COMMAND_ID); contactTable.setPopupCommandGroup(popup); // Construct and install our filtering list. This filter will allow the user // to simply type data into the txtFilter (JTextField). With the configuration // setup below, the text entered by the user will be matched against the values // in the lastName and address.address1 properties of the contacts in the table. // The GlazedLists filtered lists is used to accomplish this. EventList baseList = contactTable.getBaseEventList(); TextFilterator filterator = GlazedLists.textFilterator(new String[] { "lastName", "address.address1" }); FilterList filterList = new FilterList(baseList, new TextComponentMatcherEditor(txtFilter, filterator)); // Install the fully constructed (layered) list into the table contactTable.setFinalEventList(filterList); // Register to get notified when the filtered list changes contactTable.reportToStatusBar(getStatusBar()); // Ensure our commands are only active when something is selected. // These guard objects operate by inspecting a list selection model // (held within a ValueModel) and then either enabling or disabling the // guarded object (our executors) based on the configured criteria. // This configuration greatly simplifies the interaction between commands // that require a selection on which to operate. ValueModel selectionHolder = contactTable.getTableSelectionHolder(); new ListSingleSelectionGuard(selectionHolder, deleteExecutor); new ListSingleSelectionGuard(selectionHolder, propertiesExecutor); } /** * Handle an application event. This will notify us of object adds, deletes, and * modifications. Our object table takes care of updating itself, so we don't have * anything to do. * * @param e event to process */ public void onApplicationEvent( ApplicationEvent e ) { if( _logger.isInfoEnabled() ) { _logger.info("Got event: " + e); } } /** * Private inner class to handle the properties form display. */ private class PropertiesExecutor extends AbstractActionCommandExecutor { /** * Execute this command. */ public void execute() { Contact contact = contactTable.getSelectedContacts()[0]; // Get the dialog from the application context since it is a managed bean ContactPropertiesDialog dlg = (ContactPropertiesDialog) getApplicationContext().getBean( "contactPropertiesDialog"); // Tell it what object to edit and execute the command dlg.setContact(contact); dlg.execute(); } } /** * Private class to handle the delete command. Note that due to the configuration * above, this executor is only enabled when exactly one contact is selected in the * table. Thus, we don't have to protect against being executed with an incorrect * state. */ private class DeleteExecutor extends AbstractActionCommandExecutor { public DeleteExecutor() { getApplicationServices().configure(this, "delete"); } /** * Execute this command. */ public void execute() { // We know exactly one contact will be selected at this time because // of the guards put in place in prepareTable. final Contact contact = contactTable.getSelectedContacts()[0]; // Query the user to be sure they want to do this String title = getMessage("contact.confirmDelete.title"); String message = getMessage("contact.confirmDelete.message"); ConfirmationDialog dlg = new ConfirmationDialog(title, message) { protected void onConfirm() { // Delete the object from the persistent store. getContactDataStore().delete(contact); // And notify the rest of the application of the change getApplicationContext().publishEvent( new LifecycleApplicationEvent(LifecycleApplicationEvent.DELETED, contact)); } }; dlg.showDialog(); } } }
RCP-332 use global delete command
samples/simple/src/main/java/org/springframework/richclient/samples/simple/ui/ContactView.java
RCP-332 use global delete command
<ide><path>amples/simple/src/main/java/org/springframework/richclient/samples/simple/ui/ContactView.java <ide> /** The executor to handle the "delete" command. */ <ide> private DeleteExecutor deleteExecutor = new DeleteExecutor(); <ide> <del> /** The command Id of the Delete command. */ <del> private static final String DELETE_COMMAND_ID = "deleteCommand"; <del> <ide> /** The group Id for the popup menu. */ <ide> private static final String POPUP_COMMAND_ID = "contactViewPopupMenu"; <ide> <ide> */ <ide> protected void registerLocalCommandExecutors( PageComponentContext context ) { <ide> context.register(GlobalCommandIds.PROPERTIES, propertiesExecutor); <del> context.register(DELETE_COMMAND_ID, deleteExecutor); <del> <add> context.register(GlobalCommandIds.DELETE, deleteExecutor); <ide> } <ide> <ide> /**
Java
apache-2.0
7b34f013c409058bd6c5107acf17fb9b61c12fb4
0
rjoberon/webarchive-commons,iipc/webarchive-commons,internetarchive/webarchive-commons,iipc/webarchive-commons,internetarchive/webarchive-commons,gerhardgossen/webarchive-commons,ukwa/webarchive-commons,nlevitt/webarchive-commons,gerhardgossen/webarchive-commons,ukwa/webarchive-commons,rjoberon/webarchive-commons
package org.archive.util.binsearch; import java.io.IOException; import java.util.Comparator; import java.util.logging.Level; import java.util.logging.Logger; import org.archive.util.GeneralURIStreamFactory; import org.archive.util.iterator.CloseableIterator; public class SortedTextFile { public static class NumericComparator implements Comparator<String> { @Override public int compare(String arg0, String arg1) { long val0 = Long.parseLong(arg0); long val1 = Long.parseLong(arg1); if (val0 < val1) { return -1; } else if (val0 == val1) { return 0; } else { return 1; } } }; public static class DefaultComparator implements Comparator<String> { @Override public int compare(String arg0, String arg1) { return arg0.compareTo(arg1); } }; public final static Comparator<String> numericComparator = new NumericComparator(); public final static Comparator<String> defaultComparator = new DefaultComparator(); private final static Logger LOGGER = Logger.getLogger(SortedTextFile.class.getName()); protected SeekableLineReaderFactory factory; protected int binsearchBlockSize = SeekableLineReaderFactory.BINSEARCH_BLOCK_SIZE; public SortedTextFile(SeekableLineReaderFactory factory) { setFactory(factory); } public SortedTextFile(String filename) throws IOException { this(filename, true); } public SortedTextFile(String filename, boolean useNio) throws IOException { this.factory = GeneralURIStreamFactory.createSeekableStreamFactory(filename, useNio); } protected SortedTextFile() { this.factory = null; } protected void setFactory(SeekableLineReaderFactory factory) { this.factory = factory; } public void reloadFactory() { try { this.factory.reload(); } catch (IOException e) { LOGGER.warning(e.toString()); } } public int getBinsearchBlockSize() { return binsearchBlockSize; } public void setBinsearchBlockSize(int binsearchBlockSize) { this.binsearchBlockSize = binsearchBlockSize; } public CloseableIterator<String> getRecordIteratorLT(final String prefix) throws IOException { return getRecordIterator(prefix, true); } public CloseableIterator<String> getRecordIterator(final String prefix) throws IOException { return getRecordIterator(prefix, false); } public SeekableLineReader getSLR() throws IOException { return factory.get(); } public CloseableIterator<String> getRecordIterator(final long offset) throws IOException { SeekableLineReader slr = factory.get(); slr.seek(offset); return new SeekableLineReaderIterator(slr); } public CloseableIterator<String> getRecordIterator(final String prefix, boolean lessThan) throws IOException { SeekableLineReader slr = factory.get(); try { return search(slr, prefix, lessThan, defaultComparator); } catch (IOException io) { if (slr != null) { slr.close(); } throw io; } } public long binaryFindOffset(SeekableLineReader slr, final String key, Comparator<String> comparator) throws IOException { int blockSize = binsearchBlockSize; long fileSize = slr.getSize(); long min = 0; long max = (long) fileSize / blockSize; long mid; String line; // TODO: implement a cache of midpoints - will make a HUGE difference // on both HTTP and HDFS while (max - min > 1) { mid = min + (long)((max - min) / 2); slr.seek(mid * blockSize); if(mid > 0) slr.skipLine(); // probably a partial line line = slr.readLine(); if (comparator.compare(key, line) > 0) { if(LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(String.format("Search(%d) (%s)/(%s) : After", mid * blockSize, key,line)); } min = mid; } else { if(LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(String.format("Search(%d) (%s)/(%s) : Before", mid * blockSize, key,line)); } max = mid; } } // find the right line min = min * blockSize; return min; } public long[] getStartEndOffsets(SeekableLineReader slr, String start, String end) throws IOException { long endOffset = 0; if ((end != null) && !end.isEmpty()) { //endOffset = this.findOffset(slr, end); endOffset = this.searchOffset(slr, end, false, defaultComparator); } else { endOffset = slr.getSize(); } long startOffset = 0; if ((start != null) && !start.isEmpty()) { startOffset = this.searchOffset(slr, start, true, defaultComparator); } return new long[]{startOffset, endOffset}; } // public CloseableIterator<String> getSplitIterator(long startOffset, long endOffset, int numSplits) throws IOException // { // SeekableLineReader slr = factory.get(); // return new StepSeekingIterator(slr, startOffset, endOffset, numSplits); // } public CloseableIterator<String> getSplitIterator(String start, String end, int numSplits) throws IOException { SeekableLineReader slr = factory.get(); long[] offsets = getStartEndOffsets(slr, start, end); return new StepSeekingIterator(slr, offsets[0], offsets[1], numSplits); } public String[] getRange(String start, String end) throws IOException { SeekableLineReader slr = null; String startLine = null; String endLine = null; try { slr = factory.get(); if (start.isEmpty()) { slr.seek(0); startLine = slr.readLine(); } else { startLine = search(slr, start, true, defaultComparator).next(); } if (end.isEmpty()) { endLine = getLastLine(slr); } else { endLine = search(slr, end, true, defaultComparator).next(); } } finally { if (slr != null) { slr.close(); } } return new String[]{startLine, endLine}; } // end exclusive public String[] getNthSplit(String start, String end, int split, int numSplits) throws IOException { SeekableLineReader slr = null; String startLine = null; String endLine = null; try { slr = factory.get(); long[] offsets = getStartEndOffsets(slr, start, end); long startOffset = offsets[0]; long diff = offsets[1] - offsets[0]; long seekDiff = (diff * split) / numSplits; slr.seek(startOffset + seekDiff); if ((startOffset + seekDiff) > 0) { slr.skipLine(); } startLine = slr.readLine(); endLine = null; if (split <= (numSplits - 1)) { seekDiff = (diff * (split + 1)) / numSplits; slr.seek(startOffset + seekDiff); slr.skipLine(); endLine = slr.readLine(); } else { endLine = end; } // Last line if (endLine == null) { endLine = getLastLine(slr); } } finally { if (slr != null) { slr.close(); } } return new String[]{startLine, endLine}; } public String getLastLine(SeekableLineReader slr) throws IOException { int lastLineLenTest = 0; int lastLineLenInc = 400; String endLine = null; do { lastLineLenTest += lastLineLenInc; slr.seek(slr.getSize() - lastLineLenTest); // TODO: assume larger buffer slr.readLine(); // skip partial line String nextLine = null; endLine = null; while ((nextLine = slr.readLine()) != null) { endLine = nextLine; } } while (endLine == null); return endLine; } class StepSeekingIterator implements CloseableIterator<String> { long startOffset; int numSplits; long endOffset; int currSplit; SeekableLineReader slr; public StepSeekingIterator(SeekableLineReader slr, long startOffset, long endOffset, int numSplits) throws IOException { this.slr = slr; this.currSplit = 0; this.startOffset = startOffset; this.numSplits = numSplits; this.endOffset = endOffset; slr.seek(startOffset); } public boolean hasNext() { return (currSplit < numSplits); } public String next() { String line = null; try { if (startOffset + currSplit != 0) { slr.skipLine(); } line = slr.readLine(); currSplit++; long seekDiff = ((endOffset - startOffset) * currSplit) / numSplits; slr.seek(startOffset + seekDiff); } catch (IOException io) { io.printStackTrace(); } return line; } public void remove() { throw new UnsupportedOperationException(); } public void close() throws IOException { slr.close(); } } private long searchOffset(SeekableLineReader slr, final String key, boolean lessThan, Comparator<String> comparator) throws IOException { long offset = binaryFindOffset(slr, key, comparator); slr.seek(offset); String line = null; if (offset > 0) { slr.skipLine(); } String prev = null; while(true) { if (line != null) { offset += line.getBytes().length + 1; } line = slr.readLine(); if(line == null) break; if(comparator.compare(line, key) >= 0) break; prev = line; } if (lessThan && prev != null) { offset -= prev.getBytes().length + 1; } return offset; } private CloseableIterator<String> search(SeekableLineReader slr, final String key, boolean lessThan, Comparator<String> comparator) throws IOException { long min = binaryFindOffset(slr, key, comparator); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(String.format("Aligning(%d)",min)); } slr.seek(min); String line; if (min > 0) { slr.skipLine(); } String prev = null; while(true) { line = slr.readLine(); if (line == null) break; if (comparator.compare(line, key) >= 0) break; prev = line; } if (!lessThan) { prev = null; } return new CachedStringIterator(slr, prev, line); } public static class CachedStringIterator implements CloseableIterator<String> { private String first; private String second; private SeekableLineReader slr; private SeekableLineReaderIterator it; public CachedStringIterator(String first, String second) { this.slr = null; this.first = first; this.second = second; } public CachedStringIterator(SeekableLineReader slr, String first, String second) { this.slr = slr; this.first = first; this.second = second; if (slr != null) { it = new SeekableLineReaderIterator(slr); } } public boolean hasNext() { if(first != null) { return true; } if(second != null) { return true; } if (it == null) { return false; } return it.hasNext(); } public String next() { if(first != null) { String tmp = first; first = null; return tmp; } if(second != null) { String tmp = second; second = null; return tmp; } if (it == null) { return null; } return it.next(); } public void remove() { throw new UnsupportedOperationException(); } public void close() throws IOException { if (slr != null) { slr.close(); } } } }
src/main/java/org/archive/util/binsearch/SortedTextFile.java
package org.archive.util.binsearch; import java.io.IOException; import java.util.Comparator; import java.util.logging.Level; import java.util.logging.Logger; import org.archive.util.GeneralURIStreamFactory; import org.archive.util.iterator.CloseableIterator; public class SortedTextFile { public static class NumericComparator implements Comparator<String> { @Override public int compare(String arg0, String arg1) { long val0 = Long.parseLong(arg0); long val1 = Long.parseLong(arg1); if (val0 < val1) { return -1; } else if (val0 == val1) { return 0; } else { return 1; } } }; public static class DefaultComparator implements Comparator<String> { @Override public int compare(String arg0, String arg1) { return arg0.compareTo(arg1); } }; public final static Comparator<String> numericComparator = new NumericComparator(); public final static Comparator<String> defaultComparator = new DefaultComparator(); private final static Logger LOGGER = Logger.getLogger(SortedTextFile.class.getName()); protected SeekableLineReaderFactory factory; protected int binsearchBlockSize = SeekableLineReaderFactory.BINSEARCH_BLOCK_SIZE; public SortedTextFile(SeekableLineReaderFactory factory) { setFactory(factory); } public SortedTextFile(String filename) throws IOException { this(filename, true); } public SortedTextFile(String filename, boolean useNio) throws IOException { this.factory = GeneralURIStreamFactory.createSeekableStreamFactory(filename, useNio); } protected SortedTextFile() { this.factory = null; } protected void setFactory(SeekableLineReaderFactory factory) { this.factory = factory; } public void reloadFactory() { try { this.factory.reload(); } catch (IOException e) { LOGGER.warning(e.toString()); } } public int getBinsearchBlockSize() { return binsearchBlockSize; } public void setBinsearchBlockSize(int binsearchBlockSize) { this.binsearchBlockSize = binsearchBlockSize; } public CloseableIterator<String> getRecordIteratorLT(final String prefix) throws IOException { return getRecordIterator(prefix, true); } public CloseableIterator<String> getRecordIterator(final String prefix) throws IOException { return getRecordIterator(prefix, false); } public SeekableLineReader getSLR() throws IOException { return factory.get(); } public CloseableIterator<String> getRecordIterator(final long offset) throws IOException { SeekableLineReader slr = factory.get(); slr.seek(offset); return new SeekableLineReaderIterator(slr); } public CloseableIterator<String> getRecordIterator(final String prefix, boolean lessThan) throws IOException { SeekableLineReader slr = factory.get(); try { return search(slr, prefix, lessThan, defaultComparator); } catch (IOException io) { if (slr != null) { slr.close(); } throw io; } } public long binaryFindOffset(SeekableLineReader slr, final String key, Comparator<String> comparator) throws IOException { int blockSize = binsearchBlockSize; long fileSize = slr.getSize(); long min = 0; long max = (long) fileSize / blockSize; long mid; String line; // TODO: implement a cache of midpoints - will make a HUGE difference // on both HTTP and HDFS while (max - min > 1) { mid = min + (long)((max - min) / 2); slr.seek(mid * blockSize); if(mid > 0) slr.skipLine(); // probably a partial line line = slr.readLine(); if (comparator.compare(key, line) > 0) { if(LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(String.format("Search(%d) (%s)/(%s) : After", mid * blockSize, key,line)); } min = mid; } else { if(LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(String.format("Search(%d) (%s)/(%s) : Before", mid * blockSize, key,line)); } max = mid; } } // find the right line min = min * blockSize; return min; } public long[] getStartEndOffsets(SeekableLineReader slr, String start, String end) throws IOException { long endOffset = 0; if ((end != null) && !end.isEmpty()) { //endOffset = this.findOffset(slr, end); endOffset = this.searchOffset(slr, end, false, defaultComparator); } else { endOffset = slr.getSize(); } long startOffset = 0; if ((start != null) && !start.isEmpty()) { startOffset = this.searchOffset(slr, start, true, defaultComparator); } return new long[]{startOffset, endOffset}; } // public CloseableIterator<String> getSplitIterator(long startOffset, long endOffset, int numSplits) throws IOException // { // SeekableLineReader slr = factory.get(); // return new StepSeekingIterator(slr, startOffset, endOffset, numSplits); // } public CloseableIterator<String> getSplitIterator(String start, String end, int numSplits) throws IOException { SeekableLineReader slr = factory.get(); long[] offsets = getStartEndOffsets(slr, start, end); return new StepSeekingIterator(slr, offsets[0], offsets[1], numSplits); } public String[] getRange(String start, String end) throws IOException { SeekableLineReader slr = null; String startLine = null; String endLine = null; try { slr = factory.get(); if (start.isEmpty()) { slr.seek(0); startLine = slr.readLine(); } else { startLine = search(slr, start, true, defaultComparator).next(); } if (end.isEmpty()) { endLine = getLastLine(slr); } else { endLine = search(slr, end, true, defaultComparator).next(); } } finally { if (slr != null) { slr.close(); } } return new String[]{startLine, endLine}; } // end exclusive public String[] getNthSplit(String start, String end, int split, int numSplits) throws IOException { SeekableLineReader slr = null; String startLine = null; String endLine = null; try { slr = factory.get(); long[] offsets = getStartEndOffsets(slr, start, end); long startOffset = offsets[0]; long diff = offsets[1] - offsets[0]; long seekDiff = (diff * split) / numSplits; slr.seek(startOffset + seekDiff); if ((startOffset + seekDiff) > 0) { slr.skipLine(); } startLine = slr.readLine(); endLine = null; if (split <= (numSplits - 1)) { seekDiff = (diff * (split + 1)) / numSplits; slr.seek(startOffset + seekDiff); slr.skipLine(); endLine = slr.readLine(); } else { endLine = end; } // Last line if (endLine == null) { endLine = getLastLine(slr); } } finally { if (slr != null) { slr.close(); } } return new String[]{startLine, endLine}; } public String getLastLine(SeekableLineReader slr) throws IOException { int lastLineLenTest = 0; int lastLineLenInc = 400; String endLine = null; do { lastLineLenTest += lastLineLenInc; slr.seek(slr.getSize() - lastLineLenTest); // TODO: assume larger buffer slr.readLine(); // skip partial line String nextLine = null; endLine = null; while ((nextLine = slr.readLine()) != null) { endLine = nextLine; } } while (endLine == null); return endLine; } class StepSeekingIterator implements CloseableIterator<String> { long startOffset; int numSplits; long endOffset; int currSplit; SeekableLineReader slr; public StepSeekingIterator(SeekableLineReader slr, long startOffset, long endOffset, int numSplits) throws IOException { this.slr = slr; this.currSplit = 0; this.startOffset = startOffset; this.numSplits = numSplits; this.endOffset = endOffset; slr.seek(startOffset); } public boolean hasNext() { return (currSplit < numSplits); } public String next() { String line = null; try { if (startOffset + currSplit != 0) { slr.skipLine(); } line = slr.readLine(); currSplit++; long seekDiff = ((endOffset - startOffset) * currSplit) / numSplits; slr.seek(startOffset + seekDiff); } catch (IOException io) { io.printStackTrace(); } return line; } public void remove() { throw new UnsupportedOperationException(); } public void close() throws IOException { slr.close(); } } private long searchOffset(SeekableLineReader slr, final String key, boolean lessThan, Comparator<String> comparator) throws IOException { long offset = binaryFindOffset(slr, key, comparator); slr.seek(offset); String line = null; if (offset > 0) { slr.skipLine(); } String prev = null; while(true) { if (line != null) { offset += line.getBytes().length + 1; } line = slr.readLine(); if(line == null) break; if(comparator.compare(line, key) >= 0) break; prev = line; } if (!lessThan) { prev = null; } else { offset -= prev.getBytes().length + 1; } // To allow for skipping the line, in case we're not on the boundary return (offset - 2); } private CloseableIterator<String> search(SeekableLineReader slr, final String key, boolean lessThan, Comparator<String> comparator) throws IOException { long min = binaryFindOffset(slr, key, comparator); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine(String.format("Aligning(%d)",min)); } slr.seek(min); String line; if (min > 0) { slr.skipLine(); } String prev = null; while(true) { line = slr.readLine(); if (line == null) break; if (comparator.compare(line, key) >= 0) break; prev = line; } if (!lessThan) { prev = null; } return new CachedStringIterator(slr, prev, line); } public static class CachedStringIterator implements CloseableIterator<String> { private String first; private String second; private SeekableLineReader slr; private SeekableLineReaderIterator it; public CachedStringIterator(String first, String second) { this.slr = null; this.first = first; this.second = second; } public CachedStringIterator(SeekableLineReader slr, String first, String second) { this.slr = slr; this.first = first; this.second = second; if (slr != null) { it = new SeekableLineReaderIterator(slr); } } public boolean hasNext() { if(first != null) { return true; } if(second != null) { return true; } if (it == null) { return false; } return it.hasNext(); } public String next() { if(first != null) { String tmp = first; first = null; return tmp; } if(second != null) { String tmp = second; second = null; return tmp; } if (it == null) { return null; } return it.next(); } public void remove() { throw new UnsupportedOperationException(); } public void close() throws IOException { if (slr != null) { slr.close(); } } } }
SortedTextFile#searchOffset() fix NPE causing ARI-3498; also return "offset" instead of "offset-2", don't get reason for -2 and it can return nonsensical result -2 when offset is 0
src/main/java/org/archive/util/binsearch/SortedTextFile.java
SortedTextFile#searchOffset() fix NPE causing ARI-3498; also return "offset" instead of "offset-2", don't get reason for -2 and it can return nonsensical result -2 when offset is 0
<ide><path>rc/main/java/org/archive/util/binsearch/SortedTextFile.java <ide> prev = line; <ide> } <ide> <del> if (!lessThan) { <del> prev = null; <del> } else { <add> if (lessThan && prev != null) { <ide> offset -= prev.getBytes().length + 1; <del> } <del> <del> // To allow for skipping the line, in case we're not on the boundary <del> return (offset - 2); <add> } <add> <add> return offset; <ide> } <ide> <ide> private CloseableIterator<String> search(SeekableLineReader slr,
Java
bsd-3-clause
63b04b086b8eb41cc4d3c992bd623678dda3cdaf
0
William-Hill-Community/flywheel,William-Hill-Community/flywheel,William-Hill-Community/flywheel,William-Hill-Community/flywheel
package au.com.williamhill.flywheel.socketx; import static java.util.concurrent.TimeUnit.*; import static org.awaitility.Awaitility.*; import java.util.*; import org.junit.*; import org.junit.runner.*; import org.junit.runners.*; import org.mockito.*; import com.obsidiandynamics.indigo.util.*; import au.com.williamhill.flywheel.socketx.jetty.*; import au.com.williamhill.flywheel.socketx.netty.*; import au.com.williamhill.flywheel.socketx.undertow.*; @RunWith(Parameterized.class) public final class IdleTimeoutTest extends BaseClientServerTest { private static final int REPEAT = 1; @Parameterized.Parameters public static List<Object[]> data() { return Arrays.asList(new Object[REPEAT][0]); } @Test public void testJtJtServerTimeout() throws Exception { // Note: Jetty requires more idle time allowance than others, otherwise the connection // times out before it is upgraded to a WebSocket. testServerTimeout(JettyServer.factory(), JettyClient.factory(), 500); } @Test public void testUtUtServerTimeout() throws Exception { testServerTimeout(UndertowServer.factory(), UndertowClient.factory(), 200); } @Test public void testNtUtServerTimeout() throws Exception { testServerTimeout(NettyServer.factory(), UndertowClient.factory(), 200); } @Test public void testJtJtClientTimeout() throws Exception { // Note: Jetty requires more idle time allowance than others, otherwise the connection // times out before it is upgraded to a WebSocket. testClientTimeout(JettyServer.factory(), JettyClient.factory(), 500); } @Test public void testUtUtClientTimeout() throws Exception { testClientTimeout(UndertowServer.factory(), UndertowClient.factory(), 200); } private void testClientTimeout(XServerFactory<? extends XEndpoint> serverFactory, XClientFactory<? extends XEndpoint> clientFactory, int idleTimeoutMillis) throws Exception { final XServerConfig serverConfig = getDefaultServerConfig(false) .withScanInterval(1); final XEndpointListener<XEndpoint> serverListener = createMockListener(); createServer(serverFactory, serverConfig, serverListener); final XClientConfig clientConfig = getDefaultClientConfig() .withScanInterval(1) .withIdleTimeout(idleTimeoutMillis); createClient(clientFactory, clientConfig); final XEndpointListener<XEndpoint> clientListener = createMockListener(); openClientEndpoint(false, serverConfig.port, clientListener); await().dontCatchUncaughtExceptions().atMost(60, SECONDS).untilAsserted(() -> { Mockito.verify(serverListener).onConnect(Mocks.anyNotNull()); Mockito.verify(clientListener).onConnect(Mocks.anyNotNull()); }); await().dontCatchUncaughtExceptions().atMost(60, SECONDS).untilAsserted(() -> { Mockito.verify(serverListener).onClose(Mocks.anyNotNull()); Mockito.verify(clientListener).onClose(Mocks.anyNotNull()); }); } private void testServerTimeout(XServerFactory<? extends XEndpoint> serverFactory, XClientFactory<? extends XEndpoint> clientFactory, int idleTimeoutMillis) throws Exception { final XServerConfig serverConfig = getDefaultServerConfig(false) .withScanInterval(1) .withIdleTimeout(idleTimeoutMillis); final XEndpointListener<XEndpoint> serverListener = createMockListener(); createServer(serverFactory, serverConfig, serverListener); final XClientConfig clientConfig = getDefaultClientConfig() .withScanInterval(1); createClient(clientFactory, clientConfig); final XEndpointListener<XEndpoint> clientListener = createMockListener(); openClientEndpoint(false, serverConfig.port, clientListener); await().dontCatchUncaughtExceptions().atMost(60, SECONDS).untilAsserted(() -> { Mockito.verify(serverListener).onClose(Mocks.anyNotNull()); Mockito.verify(clientListener).onClose(Mocks.anyNotNull()); }); } }
src/test/java/au/com/williamhill/flywheel/socketx/IdleTimeoutTest.java
package au.com.williamhill.flywheel.socketx; import static java.util.concurrent.TimeUnit.*; import static org.awaitility.Awaitility.*; import org.junit.*; import org.mockito.*; import com.obsidiandynamics.indigo.util.*; import au.com.williamhill.flywheel.socketx.jetty.*; import au.com.williamhill.flywheel.socketx.netty.*; import au.com.williamhill.flywheel.socketx.undertow.*; public final class IdleTimeoutTest extends BaseClientServerTest { @Test public void testJtJtServerTimeout() throws Exception { // Note: Jetty requires more idle time allowance than others, otherwise the connection // times out before it is upgraded to a WebSocket. testServerTimeout(JettyServer.factory(), JettyClient.factory(), 500); } @Test public void testUtUtServerTimeout() throws Exception { testServerTimeout(UndertowServer.factory(), UndertowClient.factory(), 200); } @Test public void testNtUtServerTimeout() throws Exception { testServerTimeout(NettyServer.factory(), UndertowClient.factory(), 200); } @Test public void testJtJtClientTimeout() throws Exception { // Note: Jetty requires more idle time allowance than others, otherwise the connection // times out before it is upgraded to a WebSocket. testClientTimeout(JettyServer.factory(), JettyClient.factory(), 500); } @Test public void testUtUtClientTimeout() throws Exception { testClientTimeout(UndertowServer.factory(), UndertowClient.factory(), 200); } private void testClientTimeout(XServerFactory<? extends XEndpoint> serverFactory, XClientFactory<? extends XEndpoint> clientFactory, int idleTimeoutMillis) throws Exception { final XServerConfig serverConfig = getDefaultServerConfig(false) .withScanInterval(1); final XEndpointListener<XEndpoint> serverListener = createMockListener(); createServer(serverFactory, serverConfig, serverListener); final XClientConfig clientConfig = getDefaultClientConfig() .withScanInterval(1) .withIdleTimeout(idleTimeoutMillis); createClient(clientFactory, clientConfig); final XEndpointListener<XEndpoint> clientListener = createMockListener(); openClientEndpoint(false, serverConfig.port, clientListener); await().dontCatchUncaughtExceptions().atMost(60, SECONDS).untilAsserted(() -> { Mockito.verify(serverListener).onConnect(Mocks.anyNotNull()); Mockito.verify(clientListener).onConnect(Mocks.anyNotNull()); }); await().dontCatchUncaughtExceptions().atMost(60, SECONDS).untilAsserted(() -> { Mockito.verify(serverListener).onClose(Mocks.anyNotNull()); Mockito.verify(clientListener).onClose(Mocks.anyNotNull()); }); } private void testServerTimeout(XServerFactory<? extends XEndpoint> serverFactory, XClientFactory<? extends XEndpoint> clientFactory, int idleTimeoutMillis) throws Exception { final XServerConfig serverConfig = getDefaultServerConfig(false) .withScanInterval(1) .withIdleTimeout(idleTimeoutMillis); final XEndpointListener<XEndpoint> serverListener = createMockListener(); createServer(serverFactory, serverConfig, serverListener); final XClientConfig clientConfig = getDefaultClientConfig() .withScanInterval(1); createClient(clientFactory, clientConfig); final XEndpointListener<XEndpoint> clientListener = createMockListener(); openClientEndpoint(false, serverConfig.port, clientListener); await().dontCatchUncaughtExceptions().atMost(60, SECONDS).untilAsserted(() -> { Mockito.verify(serverListener).onClose(Mocks.anyNotNull()); Mockito.verify(clientListener).onClose(Mocks.anyNotNull()); }); } }
Repeats on IdleTimeoutTest
src/test/java/au/com/williamhill/flywheel/socketx/IdleTimeoutTest.java
Repeats on IdleTimeoutTest
<ide><path>rc/test/java/au/com/williamhill/flywheel/socketx/IdleTimeoutTest.java <ide> import static java.util.concurrent.TimeUnit.*; <ide> import static org.awaitility.Awaitility.*; <ide> <add>import java.util.*; <add> <ide> import org.junit.*; <add>import org.junit.runner.*; <add>import org.junit.runners.*; <ide> import org.mockito.*; <ide> <ide> import com.obsidiandynamics.indigo.util.*; <ide> import au.com.williamhill.flywheel.socketx.netty.*; <ide> import au.com.williamhill.flywheel.socketx.undertow.*; <ide> <add>@RunWith(Parameterized.class) <ide> public final class IdleTimeoutTest extends BaseClientServerTest { <add> private static final int REPEAT = 1; <add> <add> @Parameterized.Parameters <add> public static List<Object[]> data() { <add> return Arrays.asList(new Object[REPEAT][0]); <add> } <add> <ide> @Test <ide> public void testJtJtServerTimeout() throws Exception { <ide> // Note: Jetty requires more idle time allowance than others, otherwise the connection
Java
apache-2.0
9025359889e060bab0aa98b3cc1b79cc291fd5ad
0
pravinkosurkar/metadata-extractor,pravinkosurkar/metadata-extractor,rrdeleon04/metadata-extractor,Odrahcyr/amanda-xmp,byronb92/ImageEXIFExtraction,zfavourite99/metadata-extractor,jauharshaikh/metadata-extractor,tectronics/metadata-extractor,rrdeleon04/metadata-extractor,Darpholgshon/metadata-extractor,xiaozhi003/metadata-extractor,jokiazhang/metadata-extractor,google-code-export/metadata-extractor,zfavourite99/metadata-extractor,ImranAtArbisoft/metadata-extractor,zpc930/metadata-extractor,jauharshaikh/metadata-extractor,jokiazhang/metadata-extractor,tectronics/metadata-extractor,google-code-export/metadata-extractor,zpc930/metadata-extractor,hoa-le/metadata-extractor,xiaozhi003/metadata-extractor,hoa-le/metadata-extractor,ImranAtArbisoft/metadata-extractor
/* * Copyright 2002-2012 Drew Noakes * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * http://drewnoakes.com/code/exif/ * http://code.google.com/p/metadata-extractor/ */ package com.drew.imaging.tiff; import com.drew.lang.ByteArrayReader; import com.drew.lang.RandomAccessFileReader; import com.drew.lang.annotations.NotNull; import com.drew.metadata.Metadata; import com.drew.metadata.exif.ExifReader; import java.io.*; /** * Obtains all available metadata from TIFF formatted files. Note that TIFF files include many digital camera RAW * formats, including Canon (CRW, CR2), Nikon (NEF), Olympus (ORF) and Panasonic (RW2). * * @author Darren Salomons, Drew Noakes http://drewnoakes.com */ public class TiffMetadataReader { @NotNull public static Metadata readMetadata(@NotNull File file) throws IOException { Metadata metadata = new Metadata(); RandomAccessFile randomAccessFile = new RandomAccessFile(file, "r"); try { new ExifReader().extractTiff(new RandomAccessFileReader(randomAccessFile), metadata); } finally { randomAccessFile.close(); } return metadata; } @NotNull public static Metadata readMetadata(@NotNull InputStream inputStream) throws IOException { // NOTE this method is very inefficient, as it attempts to read the entire TIFF file into a byte[] // TIFF processing requires random access, as directories can be scattered throughout the byte sequence. // InputStream does not support seeking backwards, and so is not a viable option for TIFF processing final int chunkSize = 1024; final byte[] buffer = new byte[chunkSize]; ByteArrayOutputStream out = new ByteArrayOutputStream(); int bytesRead; while((bytesRead = inputStream.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } final byte[] tiffBytes = out.toByteArray(); Metadata metadata = new Metadata(); new ExifReader().extractTiff(new ByteArrayReader(tiffBytes), metadata); return metadata; } }
Source/com/drew/imaging/tiff/TiffMetadataReader.java
/* * Copyright 2002-2012 Drew Noakes * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * More information about this project is available at: * * http://drewnoakes.com/code/exif/ * http://code.google.com/p/metadata-extractor/ */ package com.drew.imaging.tiff; import com.drew.lang.ByteArrayReader; import com.drew.lang.RandomAccessFileReader; import com.drew.lang.annotations.NotNull; import com.drew.metadata.Metadata; import com.drew.metadata.exif.ExifReader; import java.io.*; /** * Obtains all available metadata from TIFF formatted files. Note that TIFF files include many digital camera RAW * formats, including Canon (CRW, CR2), Nikon (NEF), Olympus (ORF) and Panasonic (RW2). * * @author Darren Salomons, Drew Noakes http://drewnoakes.com */ public class TiffMetadataReader { @NotNull public static Metadata readMetadata(@NotNull File file) throws IOException { Metadata metadata = new Metadata(); RandomAccessFile randomAccessFile = new RandomAccessFile(file, "r"); try { new ExifReader().extractTiff(new RandomAccessFileReader(randomAccessFile), metadata); } finally { randomAccessFile.close(); } return metadata; } @Deprecated @NotNull public static Metadata readMetadata(@NotNull InputStream inputStream) throws IOException { // NOTE this method is very inefficient, as it attempts to read the entire TIFF file into a byte[] // TIFF processing requires random access, as directories can be scattered throughout the byte sequence. // InputStream does not support seeking backwards, and so is not a viable option for TIFF processing final int chunkSize = 1024; final byte[] buffer = new byte[chunkSize]; ByteArrayOutputStream out = new ByteArrayOutputStream(); int bytesRead; while((bytesRead = inputStream.read(buffer)) != -1) { out.write(buffer, 0, bytesRead); } final byte[] tiffBytes = out.toByteArray(); Metadata metadata = new Metadata(); new ExifReader().extractTiff(new ByteArrayReader(tiffBytes), metadata); return metadata; } }
TiffMetadataReader.readMetadata(InputStream) is no longer deprecated.
Source/com/drew/imaging/tiff/TiffMetadataReader.java
TiffMetadataReader.readMetadata(InputStream) is no longer deprecated.
<ide><path>ource/com/drew/imaging/tiff/TiffMetadataReader.java <ide> return metadata; <ide> } <ide> <del> @Deprecated <ide> @NotNull <ide> public static Metadata readMetadata(@NotNull InputStream inputStream) throws IOException <ide> {
Java
apache-2.0
fb3927a68942c5350e603cbca49927d716701969
0
semonte/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,samthor/intellij-community,adedayo/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,ryano144/intellij-community,robovm/robovm-studio,adedayo/intellij-community,petteyg/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,gnuhub/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,MER-GROUP/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,kdwink/intellij-community,youdonghai/intellij-community,retomerz/intellij-community,da1z/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,adedayo/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,fitermay/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,diorcety/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,allotria/intellij-community,kdwink/intellij-community,SerCeMan/intellij-community,ernestp/consulo,da1z/intellij-community,akosyakov/intellij-community,ahb0327/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,ibinti/intellij-community,blademainer/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,orekyuu/intellij-community,fengbaicanhe/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,muntasirsyed/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,slisson/intellij-community,Lekanich/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,supersven/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,xfournet/intellij-community,adedayo/intellij-community,holmes/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,fnouama/intellij-community,izonder/intellij-community,slisson/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,apixandru/intellij-community,fitermay/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,dslomov/intellij-community,asedunov/intellij-community,apixandru/intellij-community,retomerz/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,blademainer/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,jagguli/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,pwoodworth/intellij-community,signed/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,dslomov/intellij-community,da1z/intellij-community,signed/intellij-community,samthor/intellij-community,amith01994/intellij-community,caot/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,consulo/consulo,asedunov/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,robovm/robovm-studio,suncycheng/intellij-community,wreckJ/intellij-community,allotria/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,consulo/consulo,ivan-fedorov/intellij-community,fnouama/intellij-community,ryano144/intellij-community,izonder/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,ryano144/intellij-community,apixandru/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,FHannes/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,izonder/intellij-community,amith01994/intellij-community,allotria/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,FHannes/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,ol-loginov/intellij-community,ThiagoGarciaAlves/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,fnouama/intellij-community,diorcety/intellij-community,kdwink/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,allotria/intellij-community,consulo/consulo,robovm/robovm-studio,salguarnieri/intellij-community,fitermay/intellij-community,xfournet/intellij-community,jagguli/intellij-community,slisson/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,retomerz/intellij-community,asedunov/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,izonder/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,MichaelNedzelsky/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,holmes/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,kdwink/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,asedunov/intellij-community,diorcety/intellij-community,vladmm/intellij-community,michaelgallacher/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,supersven/intellij-community,tmpgit/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,ryano144/intellij-community,caot/intellij-community,amith01994/intellij-community,supersven/intellij-community,nicolargo/intellij-community,ibinti/intellij-community,FHannes/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,xfournet/intellij-community,ftomassetti/intellij-community,tmpgit/intellij-community,ivan-fedorov/intellij-community,Distrotech/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,petteyg/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,signed/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,Lekanich/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,izonder/intellij-community,diorcety/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,idea4bsd/idea4bsd,TangHao1987/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,caot/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,holmes/intellij-community,salguarnieri/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,blademainer/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,signed/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,allotria/intellij-community,signed/intellij-community,petteyg/intellij-community,ernestp/consulo,nicolargo/intellij-community,youdonghai/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,samthor/intellij-community,tmpgit/intellij-community,holmes/intellij-community,ahb0327/intellij-community,amith01994/intellij-community,jagguli/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,hurricup/intellij-community,robovm/robovm-studio,ernestp/consulo,salguarnieri/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,ahb0327/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,MichaelNedzelsky/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,kool79/intellij-community,gnuhub/intellij-community,supersven/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,holmes/intellij-community,signed/intellij-community,adedayo/intellij-community,fitermay/intellij-community,retomerz/intellij-community,holmes/intellij-community,fnouama/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,orekyuu/intellij-community,orekyuu/intellij-community,alphafoobar/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,allotria/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,robovm/robovm-studio,semonte/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,vladmm/intellij-community,clumsy/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,caot/intellij-community,allotria/intellij-community,vvv1559/intellij-community,amith01994/intellij-community,amith01994/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,MichaelNedzelsky/intellij-community,allotria/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,supersven/intellij-community,signed/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,ernestp/consulo,ahb0327/intellij-community,muntasirsyed/intellij-community,ernestp/consulo,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,jagguli/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,xfournet/intellij-community,da1z/intellij-community,akosyakov/intellij-community,asedunov/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,slisson/intellij-community,samthor/intellij-community,clumsy/intellij-community,FHannes/intellij-community,clumsy/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,petteyg/intellij-community,ibinti/intellij-community,ibinti/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,vladmm/intellij-community,fnouama/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,ibinti/intellij-community,robovm/robovm-studio,da1z/intellij-community,ernestp/consulo,vvv1559/intellij-community,ryano144/intellij-community,hurricup/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,kool79/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,SerCeMan/intellij-community,robovm/robovm-studio,kdwink/intellij-community,fitermay/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,ibinti/intellij-community,amith01994/intellij-community,Lekanich/intellij-community,supersven/intellij-community,wreckJ/intellij-community,izonder/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,gnuhub/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,consulo/consulo,holmes/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,slisson/intellij-community,signed/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,allotria/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,fitermay/intellij-community,robovm/robovm-studio,dslomov/intellij-community,kool79/intellij-community,kool79/intellij-community,tmpgit/intellij-community,slisson/intellij-community,vladmm/intellij-community,signed/intellij-community,suncycheng/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,mglukhikh/intellij-community,blademainer/intellij-community,da1z/intellij-community,samthor/intellij-community,fengbaicanhe/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,caot/intellij-community,TangHao1987/intellij-community,supersven/intellij-community,hurricup/intellij-community,dslomov/intellij-community,vladmm/intellij-community,caot/intellij-community,fengbaicanhe/intellij-community,alphafoobar/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,samthor/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,da1z/intellij-community,caot/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,kool79/intellij-community,consulo/consulo,semonte/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,robovm/robovm-studio,pwoodworth/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,fengbaicanhe/intellij-community,consulo/consulo,supersven/intellij-community,ryano144/intellij-community,vladmm/intellij-community,signed/intellij-community,Lekanich/intellij-community,da1z/intellij-community,holmes/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,slisson/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,robovm/robovm-studio,kool79/intellij-community,suncycheng/intellij-community,caot/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,kool79/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,caot/intellij-community,FHannes/intellij-community,fitermay/intellij-community,fitermay/intellij-community,kdwink/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,supersven/intellij-community,kool79/intellij-community,fnouama/intellij-community,Distrotech/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,fnouama/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,retomerz/intellij-community,ibinti/intellij-community,ryano144/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,signed/intellij-community,kool79/intellij-community,FHannes/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,da1z/intellij-community,clumsy/intellij-community,diorcety/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,signed/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,FHannes/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.openapi.vfs.newvfs.events.VFilePropertyChangeEvent; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Set; /** * @author peter */ public class FileContentUtil { @NonNls public static final String FORCE_RELOAD_REQUESTOR = "FileContentUtil.saveOrReload"; private FileContentUtil() { } public static void setFileText(@Nullable Project project, final VirtualFile virtualFile, final String text) throws IOException { if (project == null) { project = ProjectUtil.guessProjectForFile(virtualFile); } if (project != null) { final PsiFile psiFile = PsiManager.getInstance(project).findFile(virtualFile); final PsiDocumentManager psiDocumentManager = PsiDocumentManager.getInstance(project); final Document document = psiFile == null? null : psiDocumentManager.getDocument(psiFile); if (document != null) { document.setText(text != null ? text : ""); psiDocumentManager.commitDocument(document); FileDocumentManager.getInstance().saveDocument(document); return; } } VfsUtil.saveText(virtualFile, text != null ? text : ""); virtualFile.refresh(false, false); } public static void reparseFiles(@NotNull final Project project, @NotNull final Collection<VirtualFile> files, final boolean includeOpenFiles) { ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { // files must be processed under one write action to prevent firing event for invalid files. final Set<VFilePropertyChangeEvent> events = new THashSet<VFilePropertyChangeEvent>(); for (VirtualFile file : files) { saveOrReload(file, events); } if (includeOpenFiles) { for (VirtualFile open : FileEditorManager.getInstance(project).getOpenFiles()) { if (!files.contains(open)) { saveOrReload(open, events); } } } ApplicationManager.getApplication().getMessageBus().syncPublisher(VirtualFileManager.VFS_CHANGES) .before(new ArrayList<VFileEvent>(events)); ApplicationManager.getApplication().getMessageBus().syncPublisher(VirtualFileManager.VFS_CHANGES) .after(new ArrayList<VFileEvent>(events)); } }); } private static void saveOrReload(final VirtualFile virtualFile, Collection<VFilePropertyChangeEvent> events) { if (virtualFile == null || virtualFile.isDirectory() || !virtualFile.isValid()) { return; } final FileDocumentManager documentManager = FileDocumentManager.getInstance(); if (documentManager.isFileModified(virtualFile)) { Document document = documentManager.getDocument(virtualFile); if (document != null) { documentManager.saveDocument(document); } } events.add(new VFilePropertyChangeEvent(FORCE_RELOAD_REQUESTOR, virtualFile, VirtualFile.PROP_NAME, virtualFile.getName(), virtualFile.getName(), false)); } public static void reparseOpenedFiles() { for (Project project : ProjectManager.getInstance().getOpenProjects()) { reparseFiles(project, Collections.<VirtualFile>emptyList(), true); } } }
platform/lang-api/src/com/intellij/util/FileContentUtil.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Document; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.FileEditorManager; import com.intellij.openapi.project.Project; import com.intellij.openapi.project.ProjectManager; import com.intellij.openapi.project.ProjectUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.openapi.vfs.VirtualFileManager; import com.intellij.openapi.vfs.newvfs.events.VFileEvent; import com.intellij.openapi.vfs.newvfs.events.VFilePropertyChangeEvent; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiFile; import com.intellij.psi.PsiManager; import gnu.trove.THashSet; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Set; /** * @author peter */ public class FileContentUtil { @NonNls public static final String FORCE_RELOAD_REQUESTOR = "FileContentUtil.saveOrReload"; private FileContentUtil() { } public static void setFileText(@Nullable Project project, final VirtualFile virtualFile, final String text) throws IOException { if (project == null) { project = ProjectUtil.guessProjectForFile(virtualFile); } if (project != null) { final PsiFile psiFile = PsiManager.getInstance(project).findFile(virtualFile); final PsiDocumentManager psiDocumentManager = PsiDocumentManager.getInstance(project); final Document document = psiFile == null? null : psiDocumentManager.getDocument(psiFile); if (document != null) { document.setText(text != null ? text : ""); psiDocumentManager.commitDocument(document); FileDocumentManager.getInstance().saveDocument(document); return; } } VfsUtil.saveText(virtualFile, text != null ? text : ""); virtualFile.refresh(false, false); } public static void reparseFiles(@NotNull final Project project, @NotNull final Collection<VirtualFile> files, boolean includeOpenFiles) { final Set<VFilePropertyChangeEvent> events = new THashSet<VFilePropertyChangeEvent>(); for (VirtualFile file : files) { saveOrReload(file, events); } if (includeOpenFiles) { for (VirtualFile open : FileEditorManager.getInstance(project).getOpenFiles()) { if (!files.contains(open)) { saveOrReload(open, events); } } } ApplicationManager.getApplication().runWriteAction(new Runnable() { public void run() { ApplicationManager.getApplication().getMessageBus().syncPublisher(VirtualFileManager.VFS_CHANGES) .before(new ArrayList<VFileEvent>(events)); ApplicationManager.getApplication().getMessageBus().syncPublisher(VirtualFileManager.VFS_CHANGES) .after(new ArrayList<VFileEvent>(events)); } }); } private static void saveOrReload(final VirtualFile virtualFile, Collection<VFilePropertyChangeEvent> events) { if (virtualFile == null || virtualFile.isDirectory()) { return; } final FileDocumentManager documentManager = FileDocumentManager.getInstance(); if (documentManager.isFileModified(virtualFile)) { Document document = documentManager.getDocument(virtualFile); if (document != null) { documentManager.saveDocument(document); } } events.add(new VFilePropertyChangeEvent(FORCE_RELOAD_REQUESTOR, virtualFile, VirtualFile.PROP_NAME, virtualFile.getName(), virtualFile.getName(), false)); } public static void reparseOpenedFiles() { for (Project project : ProjectManager.getInstance().getOpenProjects()) { reparseFiles(project, Collections.<VirtualFile>emptyList(), true); } } }
Platform: do not file VFS events for invalid files on reparseFiles (OC-2806)
platform/lang-api/src/com/intellij/util/FileContentUtil.java
Platform: do not file VFS events for invalid files on reparseFiles (OC-2806)
<ide><path>latform/lang-api/src/com/intellij/util/FileContentUtil.java <ide> virtualFile.refresh(false, false); <ide> } <ide> <del> public static void reparseFiles(@NotNull final Project project, @NotNull final Collection<VirtualFile> files, boolean includeOpenFiles) { <del> final Set<VFilePropertyChangeEvent> events = new THashSet<VFilePropertyChangeEvent>(); <del> for (VirtualFile file : files) { <del> saveOrReload(file, events); <del> } <del> if (includeOpenFiles) { <del> for (VirtualFile open : FileEditorManager.getInstance(project).getOpenFiles()) { <del> if (!files.contains(open)) { <del> saveOrReload(open, events); <del> } <del> } <del> } <add> public static void reparseFiles(@NotNull final Project project, @NotNull final Collection<VirtualFile> files, final boolean includeOpenFiles) { <ide> ApplicationManager.getApplication().runWriteAction(new Runnable() { <ide> public void run() { <add> // files must be processed under one write action to prevent firing event for invalid files. <add> <add> final Set<VFilePropertyChangeEvent> events = new THashSet<VFilePropertyChangeEvent>(); <add> for (VirtualFile file : files) { <add> saveOrReload(file, events); <add> } <add> if (includeOpenFiles) { <add> for (VirtualFile open : FileEditorManager.getInstance(project).getOpenFiles()) { <add> if (!files.contains(open)) { <add> saveOrReload(open, events); <add> } <add> } <add> } <add> <ide> ApplicationManager.getApplication().getMessageBus().syncPublisher(VirtualFileManager.VFS_CHANGES) <ide> .before(new ArrayList<VFileEvent>(events)); <ide> ApplicationManager.getApplication().getMessageBus().syncPublisher(VirtualFileManager.VFS_CHANGES) <ide> } <ide> <ide> private static void saveOrReload(final VirtualFile virtualFile, Collection<VFilePropertyChangeEvent> events) { <del> if (virtualFile == null || virtualFile.isDirectory()) { <add> if (virtualFile == null || virtualFile.isDirectory() || !virtualFile.isValid()) { <ide> return; <ide> } <ide> final FileDocumentManager documentManager = FileDocumentManager.getInstance();
JavaScript
mit
4c6c9207bc46db5041dc826a974c5919344b99ec
0
Kylart/MalScraper
const _ = require('lodash') const axios = require('axios') const {parseString} = require('xml2js') const malToNormal = { // Anime values series_animedb_id: 'id', series_title: 'title', series_synonyms: 'synonyms', series_type: 'type', series_episodes: 'nbEpisodes', series_status: 'seriesStatus', series_start: 'seriesStart', series_end: 'seriesEnd', series_image: 'picture', my_id: 'myID', my_watched_episodes: 'nbWatchedEpisode', my_start_date: 'myStartDate', my_finish_date: 'myEndDate', my_score: 'score', my_status: 'status', my_rewatching: 'rewatching', my_rewatching_ep: 'rewatchingEp', my_last_updated: 'lastUpdate', my_tags: 'tags', // MyAnimeList values user_id: 'userID', user_watching: 'nbWatching', user_completed: 'nbCompleted', user_onhold: 'nbOnHold', user_dropped: 'nbDropped', user_plantowatch: 'nbPlanToWatch', user_days_spent_watching: 'nbDaysSpentWatching' } const flatten = (obj) => { const res = {} _.each(obj, (value, key) => { res[malToNormal[key]] = value[0] }) return res } /** * Allows to retrieve a user's watch lists and stuff. * @param {string} user The name of the user. * @param {string} type Can be either 'anime' or 'manga' * * @returns {promise} */ const getWatchListFromUser = (user) => { return new Promise((resolve, reject) => { if (!user) { reject(new Error('[Mal-Scraper]: No user received.')) return } axios.get(`https://myanimelist.net/malappinfo.php`, { params: { u: user, status: 'all', type: 'anime' // This can be changed to 'manga' too to retrieve manga lists. } }) .then(({data}) => { parseString(data, (err, res) => { /* istanbul ignore next */ if (err) reject(err) const mal = res.myanimelist if (!mal) { reject(new Error('[Mal-Scraper]: It seems this user does not exist.')) } resolve({ stats: flatten(mal.myinfo[0]), lists: _.map(mal.anime, obj => flatten(obj)) }) }) }) .catch(/* istanbul ignore next */(err) => reject(err)) }) } module.exports = { getWatchListFromUser }
src/watchList.js
const _ = require('lodash') const axios = require('axios') const {parseString} = require('xml2js') const malToNormal = { // Anime values series_animedb_id: 'id', series_title: 'title', series_synonyms: 'synonyms', series_type: 'type', series_episodes: 'nbEpisodes', series_status: 'seriesStatus', series_start: 'seriesStart', series_end: 'seriesEnd', series_image: 'picture', my_id: 'myID', my_watched_episodes: 'nbWatchedEpisode', my_start_date: 'myStartDate', my_finish_date: 'myEndDate', my_score: 'score', my_status: 'status', my_rewatching: 'rewatching', my_rewatching_ep: 'rewatchingEp', my_last_updated: 'lastUpdate', my_tags: 'tags', // MyAnimeList values user_id: 'userID', user_watching: 'nbWatching', user_completed: 'nbCompleted', user_onhold: 'nbOnHold', user_dropped: 'nbDropped', user_plantowatch: 'nbPlanToWatch', user_days_spent_watching: 'nbDaysSpentWatching' } const flatten = (obj) => { const res = {} _.each(obj, (value, key) => { res[malToNormal[key]] = value[0] }) return res } /** * Allows to retrieve a user's watch lists and stuff. * @param {string} user The name of the user. * @param {string} type Can be either 'anime' or 'manga' * * @returns {promise} */ const getWatchListFromUser = (user) => { return new Promise((resolve, reject) => { if (!user) { reject(new Error('[Mal-Scraper]: No user received.')) return } axios.get(`https://myanimelist.net/malappinfo.php`, { params: { u: user, status: 'all', type: 'anime' // This can be changed to 'manga' too to retrieve manga lists. } }) .then(({data}) => { parseString(data, (err, res) => { /* istanbul ignore next */ if (err) reject(err) const mal = res.myanimelist resolve({ stats: flatten(mal.myinfo[0]), lists: _.map(mal.anime, obj => flatten(obj)) }) }) }) .catch(/* istanbul ignore next */(err) => reject(err)) }) } module.exports = { getWatchListFromUser }
Fix #6
src/watchList.js
Fix #6
<ide><path>rc/watchList.js <ide> <ide> const mal = res.myanimelist <ide> <add> if (!mal) { <add> reject(new Error('[Mal-Scraper]: It seems this user does not exist.')) <add> } <add> <ide> resolve({ <ide> stats: flatten(mal.myinfo[0]), <ide> lists: _.map(mal.anime, obj => flatten(obj))
Java
apache-2.0
acc14974fd078278a697782bbb369ea0ae664ae2
0
raydac/java-binary-block-parser
/* * Copyright 2017 Igor Maznitsa. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.igormaznitsa.jbbp.testaux; import org.apache.commons.io.FileUtils; import org.junit.Rule; import org.junit.rules.TemporaryFolder; import javax.tools.*; import java.io.File; import java.io.IOException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; public abstract class AbstractJavaClassCompilerTest { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); public ClassLoader saveAndCompile(final JavaClassContent... klasses) throws IOException { final File folder = this.tempFolder.newFolder(); final List<File> classFiles = new ArrayList<File>(); for (final JavaClassContent c : klasses) { final File classFile = c.makeFile(folder); final File pack = classFile.getParentFile(); if (!pack.isDirectory() && !pack.mkdirs()) { throw new IOException("Can't create folder : " + pack); } FileUtils.writeStringToFile(classFile, c.getText(), "UTF-8"); classFiles.add(classFile); } final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); final DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<JavaFileObject>(); final StandardJavaFileManager fileManager = compiler.getStandardFileManager(diagnostics, null, null); final Iterable<? extends JavaFileObject> compilationUnits = fileManager.getJavaFileObjectsFromFiles(classFiles); if (!compiler.getTask(null, fileManager, null, null, null, compilationUnits).call()) { for (final Diagnostic<?> diagnostic : diagnostics.getDiagnostics()) { System.err.format("Error on line %d in %s%n", diagnostic.getLineNumber(), diagnostic.getSource()); } for(final File f : classFiles){ System.err.println("File '"+f.getName()+'\''); System.err.println("-------------------------------------------"); System.err.println(FileUtils.readFileToString(f)); } throw new IOException("Error during compilation"); } final ClassLoader result = new URLClassLoader(new URL[]{folder.toURI().toURL()}); return result; } public final static class JavaClassContent { private final String className; private final String classText; public JavaClassContent(final String className, final String classText) { this.className = className; this.classText = classText; } public File makeFile(final File folder) { return new File(folder, this.className.replace('.', '/') + ".java"); } public String getText() { return this.classText; } } }
src/test/java/com/igormaznitsa/jbbp/testaux/AbstractJavaClassCompilerTest.java
/* * Copyright 2017 Igor Maznitsa. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.igormaznitsa.jbbp.testaux; import org.apache.commons.io.FileUtils; import org.junit.Rule; import org.junit.rules.TemporaryFolder; import javax.tools.*; import java.io.File; import java.io.IOException; import java.net.URL; import java.net.URLClassLoader; import java.util.ArrayList; import java.util.List; public abstract class AbstractJavaClassCompilerTest { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); public ClassLoader saveAndCompile(final JavaClassContent... klasses) throws IOException { final File folder = this.tempFolder.newFolder(); final List<File> classFiles = new ArrayList<File>(); for (final JavaClassContent c : klasses) { final File classFile = c.makeFile(folder); final File pack = classFile.getParentFile(); if (!pack.isDirectory() && !pack.mkdirs()) { throw new IOException("Can't create folder : " + pack); } FileUtils.writeStringToFile(classFile, c.getText(), "UTF-8"); classFiles.add(classFile); } final JavaCompiler compiler = ToolProvider.getSystemJavaCompiler(); final DiagnosticCollector<JavaFileObject> diagnostics = new DiagnosticCollector<JavaFileObject>(); final StandardJavaFileManager fileManager = compiler.getStandardFileManager(diagnostics, null, null); final Iterable<? extends JavaFileObject> compilationUnits = fileManager.getJavaFileObjectsFromFiles(classFiles); if (!compiler.getTask(null, fileManager, null, null, null, compilationUnits).call()) { for (final Diagnostic<?> diagnostic : diagnostics.getDiagnostics()) { System.err.format("Error on line %d in %s%n", diagnostic.getLineNumber(), diagnostic.getSource()); } throw new IOException("Error during compilation"); } final ClassLoader result = new URLClassLoader(new URL[]{folder.toURI().toURL()}); return result; } public final static class JavaClassContent { private final String className; private final String classText; public JavaClassContent(final String className, final String classText) { this.className = className; this.classText = classText; } public File makeFile(final File folder) { return new File(folder, this.className.replace('.', '/') + ".java"); } public String getText() { return this.classText; } } }
improved log for error
src/test/java/com/igormaznitsa/jbbp/testaux/AbstractJavaClassCompilerTest.java
improved log for error
<ide><path>rc/test/java/com/igormaznitsa/jbbp/testaux/AbstractJavaClassCompilerTest.java <ide> for (final Diagnostic<?> diagnostic : diagnostics.getDiagnostics()) { <ide> System.err.format("Error on line %d in %s%n", diagnostic.getLineNumber(), diagnostic.getSource()); <ide> } <add> <add> for(final File f : classFiles){ <add> System.err.println("File '"+f.getName()+'\''); <add> System.err.println("-------------------------------------------"); <add> System.err.println(FileUtils.readFileToString(f)); <add> } <add> <ide> throw new IOException("Error during compilation"); <ide> } <ide> <ide> final ClassLoader result = new URLClassLoader(new URL[]{folder.toURI().toURL()}); <del> <ide> return result; <add> <ide> } <ide> <ide> public final static class JavaClassContent {
Java
mit
77eee36790d52960268e5f36a3b36caf6522273f
0
InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service,InnovateUKGitHub/innovation-funding-service
package org.innovateuk.ifs.organisation.controller; import org.innovateuk.ifs.commons.ZeroDowntime; import org.innovateuk.ifs.commons.rest.RestResult; import org.innovateuk.ifs.organisation.domain.Organisation; import org.innovateuk.ifs.organisation.resource.OrganisationResource; import org.innovateuk.ifs.organisation.transactional.OrganisationInitialCreationService; import org.innovateuk.ifs.organisation.transactional.OrganisationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import java.util.List; import java.util.Set; /** * This RestController exposes CRUD operations to both the * {@link org.innovateuk.ifs.user.service.OrganisationRestServiceImpl} and other REST-API users * to manage {@link Organisation} related data. */ @RestController @RequestMapping("/organisation") public class OrganisationController { @Autowired private OrganisationService organisationService; @Autowired private OrganisationInitialCreationService organisationCreationService; @GetMapping("/find-by-application-id/{applicationId}") public RestResult<Set<OrganisationResource>> findByApplicationId(@PathVariable("applicationId") final Long applicationId) { return organisationService.findByApplicationId(applicationId).toGetResponse(); } @GetMapping("/find-by-id/{organisationId}") public RestResult<OrganisationResource> findById(@PathVariable("organisationId") final Long organisationId) { return organisationService.findById(organisationId).toGetResponse(); } @GetMapping("/primary-for-user/{userId}") public RestResult<OrganisationResource> getPrimaryForUser(@PathVariable("userId") final long userId) { return organisationService.getPrimaryForUser(userId).toGetResponse(); } @GetMapping("/by-user-and-application-id/{userId}/{applicationId}") public RestResult<OrganisationResource> getByUserAndApplicationId(@PathVariable("userId") final long userId, @PathVariable("applicationId") final long applicationId) { return organisationService.getByUserAndApplicationId(userId, applicationId).toGetResponse(); } @GetMapping("/by-user-and-project-id/{userId}/{projectId}") public RestResult<OrganisationResource> getByUserAndProjectId(@PathVariable("userId") final long userId, @PathVariable("projectId") final long projectId) { return organisationService.getByUserAndProjectId(userId, projectId).toGetResponse(); } @GetMapping("/all-by-user-id/{userId}") public RestResult<List<OrganisationResource>> getAllByUserId(@PathVariable("userId") final long userId) { return organisationService.getAllByUserId(userId).toGetResponse(); } @PostMapping("/create-or-match") public RestResult<OrganisationResource> createOrMatch(@RequestBody OrganisationResource organisation) { return organisationCreationService.createOrMatch(organisation).toPostCreateResponse(); } @PostMapping("/create-and-link-by-invite") public RestResult<OrganisationResource> createAndLinkByInvite(@RequestBody OrganisationResource organisation, @RequestParam("inviteHash") String inviteHash) { return organisationCreationService.createAndLinkByInvite(organisation, inviteHash).toPostCreateResponse(); } @PostMapping("/create") public RestResult<OrganisationResource> create(@RequestBody OrganisationResource organisation) { return organisationService.create(organisation).toPostCreateResponse(); } @PutMapping("/update") public RestResult<OrganisationResource> saveResource(@RequestBody OrganisationResource organisationResource) { return organisationService.update(organisationResource).toPutWithBodyResponse(); } @PostMapping("/update-name-and-registration/{organisationId}") public RestResult<OrganisationResource> updateNameAndRegistration(@PathVariable("organisationId") Long organisationId, @RequestParam(value = "name") String name, @RequestParam(value = "registration") String registration) { return organisationService.updateOrganisationNameAndRegistration(organisationId, name, registration).toPostCreateResponse(); } }
ifs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/organisation/controller/OrganisationController.java
package org.innovateuk.ifs.organisation.controller; import org.innovateuk.ifs.commons.ZeroDowntime; import org.innovateuk.ifs.commons.rest.RestResult; import org.innovateuk.ifs.organisation.domain.Organisation; import org.innovateuk.ifs.organisation.resource.OrganisationResource; import org.innovateuk.ifs.organisation.transactional.OrganisationInitialCreationService; import org.innovateuk.ifs.organisation.transactional.OrganisationService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.*; import java.util.List; import java.util.Set; /** * This RestController exposes CRUD operations to both the * {@link org.innovateuk.ifs.user.service.OrganisationRestServiceImpl} and other REST-API users * to manage {@link Organisation} related data. */ @RestController @ZeroDowntime(reference = "IFS-4104", description = "Remove non kebab case mappings. ") @RequestMapping("/organisation") public class OrganisationController { @Autowired private OrganisationService organisationService; @Autowired private OrganisationInitialCreationService organisationCreationService; @GetMapping({"/find-by-application-id/{applicationId}", "/findByApplicationId/{applicationId}"}) public RestResult<Set<OrganisationResource>> findByApplicationId(@PathVariable("applicationId") final Long applicationId) { return organisationService.findByApplicationId(applicationId).toGetResponse(); } @GetMapping({"/find-by-id/{organisationId}", "/findById/{organisationId}"}) public RestResult<OrganisationResource> findById(@PathVariable("organisationId") final Long organisationId) { return organisationService.findById(organisationId).toGetResponse(); } @GetMapping({"/primary-for-user/{userId}", "/getPrimaryForUser/{userId}"}) public RestResult<OrganisationResource> getPrimaryForUser(@PathVariable("userId") final long userId) { return organisationService.getPrimaryForUser(userId).toGetResponse(); } @GetMapping("/by-user-and-application-id/{userId}/{applicationId}") public RestResult<OrganisationResource> getByUserAndApplicationId(@PathVariable("userId") final long userId, @PathVariable("applicationId") final long applicationId) { return organisationService.getByUserAndApplicationId(userId, applicationId).toGetResponse(); } @GetMapping("/by-user-and-project-id/{userId}/{projectId}") public RestResult<OrganisationResource> getByUserAndProjectId(@PathVariable("userId") final long userId, @PathVariable("projectId") final long projectId) { return organisationService.getByUserAndProjectId(userId, projectId).toGetResponse(); } @GetMapping("/all-by-user-id/{userId}") public RestResult<List<OrganisationResource>> getAllByUserId(@PathVariable("userId") final long userId) { return organisationService.getAllByUserId(userId).toGetResponse(); } @PostMapping({"/create-or-match", "/createOrMatch"}) public RestResult<OrganisationResource> createOrMatch(@RequestBody OrganisationResource organisation) { return organisationCreationService.createOrMatch(organisation).toPostCreateResponse(); } @PostMapping({"/create-and-link-by-invite", "/createAndLinkByInvite"}) public RestResult<OrganisationResource> createAndLinkByInvite(@RequestBody OrganisationResource organisation, @RequestParam("inviteHash") String inviteHash) { return organisationCreationService.createAndLinkByInvite(organisation, inviteHash).toPostCreateResponse(); } @PostMapping("/create") public RestResult<OrganisationResource> create(@RequestBody OrganisationResource organisation) { return organisationService.create(organisation).toPostCreateResponse(); } @PutMapping("/update") public RestResult<OrganisationResource> saveResource(@RequestBody OrganisationResource organisationResource) { return organisationService.update(organisationResource).toPutWithBodyResponse(); } @PostMapping({"/update-name-and-registration/{organisationId}", "/updateNameAndRegistration/{organisationId}"}) public RestResult<OrganisationResource> updateNameAndRegistration(@PathVariable("organisationId") Long organisationId, @RequestParam(value = "name") String name, @RequestParam(value = "registration") String registration) { return organisationService.updateOrganisationNameAndRegistration(organisationId, name, registration).toPostCreateResponse(); } }
IFS-4014 removed old mappings.
ifs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/organisation/controller/OrganisationController.java
IFS-4014 removed old mappings.
<ide><path>fs-data-layer/ifs-data-service/src/main/java/org/innovateuk/ifs/organisation/controller/OrganisationController.java <ide> * to manage {@link Organisation} related data. <ide> */ <ide> @RestController <del>@ZeroDowntime(reference = "IFS-4104", description = "Remove non kebab case mappings. ") <ide> @RequestMapping("/organisation") <ide> public class OrganisationController { <ide> <ide> @Autowired <ide> private OrganisationInitialCreationService organisationCreationService; <ide> <del> @GetMapping({"/find-by-application-id/{applicationId}", "/findByApplicationId/{applicationId}"}) <add> @GetMapping("/find-by-application-id/{applicationId}") <ide> public RestResult<Set<OrganisationResource>> findByApplicationId(@PathVariable("applicationId") final Long applicationId) { <ide> return organisationService.findByApplicationId(applicationId).toGetResponse(); <ide> } <ide> <del> @GetMapping({"/find-by-id/{organisationId}", "/findById/{organisationId}"}) <add> @GetMapping("/find-by-id/{organisationId}") <ide> public RestResult<OrganisationResource> findById(@PathVariable("organisationId") final Long organisationId) { <ide> return organisationService.findById(organisationId).toGetResponse(); <ide> } <ide> <del> @GetMapping({"/primary-for-user/{userId}", "/getPrimaryForUser/{userId}"}) <add> @GetMapping("/primary-for-user/{userId}") <ide> public RestResult<OrganisationResource> getPrimaryForUser(@PathVariable("userId") final long userId) { <ide> return organisationService.getPrimaryForUser(userId).toGetResponse(); <ide> } <ide> return organisationService.getAllByUserId(userId).toGetResponse(); <ide> } <ide> <del> @PostMapping({"/create-or-match", "/createOrMatch"}) <add> @PostMapping("/create-or-match") <ide> public RestResult<OrganisationResource> createOrMatch(@RequestBody OrganisationResource organisation) { <ide> return organisationCreationService.createOrMatch(organisation).toPostCreateResponse(); <ide> } <ide> <del> @PostMapping({"/create-and-link-by-invite", "/createAndLinkByInvite"}) <add> @PostMapping("/create-and-link-by-invite") <ide> public RestResult<OrganisationResource> createAndLinkByInvite(@RequestBody OrganisationResource organisation, <ide> @RequestParam("inviteHash") String inviteHash) { <ide> return organisationCreationService.createAndLinkByInvite(organisation, inviteHash).toPostCreateResponse(); <ide> return organisationService.update(organisationResource).toPutWithBodyResponse(); <ide> } <ide> <del> @PostMapping({"/update-name-and-registration/{organisationId}", "/updateNameAndRegistration/{organisationId}"}) <add> @PostMapping("/update-name-and-registration/{organisationId}") <ide> public RestResult<OrganisationResource> updateNameAndRegistration(@PathVariable("organisationId") Long organisationId, @RequestParam(value = "name") String name, @RequestParam(value = "registration") String registration) { <ide> return organisationService.updateOrganisationNameAndRegistration(organisationId, name, registration).toPostCreateResponse(); <ide> }
Java
apache-2.0
0eb1c5e95bf807cb279c8c2dd97e33c61a499d46
0
andstatus/andstatus,andstatus/andstatus,andstatus/andstatus
/* * Copyright (C) 2008 Torgny Bjers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.xorcode.andtweet; import android.app.AlarmManager; import android.app.AlertDialog; import android.app.Dialog; import android.app.ListActivity; import android.app.NotificationManager; import android.app.PendingIntent; import android.app.ProgressDialog; import android.app.SearchManager; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.ServiceConnection; import android.content.SharedPreferences; import android.content.pm.ProviderInfo; import android.database.Cursor; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.IBinder; import android.os.RemoteException; import android.os.SystemClock; import android.preference.PreferenceManager; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.Window; import android.widget.AbsListView; import android.widget.AdapterView; import android.widget.LinearLayout; import android.widget.MultiAutoCompleteTextView; import android.widget.TextView; import android.widget.Toast; import com.xorcode.andtweet.data.AndTweetDatabase; import com.xorcode.andtweet.data.AndTweetDatabase.Tweets; /** * @author torgny.bjers */ public class TimelineActivity extends ListActivity implements ITimelineActivity { public static String TAG = "TimelineActivity"; // Handler message codes public static final int MSG_TWEETS_CHANGED = 1; public static final int MSG_DATA_LOADING = 2; public static final int MSG_UPDATE_STATUS = 3; public static final int MSG_MANUAL_RELOAD = 4; public static final int MSG_AUTHENTICATION_ERROR = 5; public static final int MSG_LOAD_ITEMS = 6; public static final int MSG_DIRECT_MESSAGES_CHANGED = 7; public static final int MSG_SERVICE_UNAVAILABLE_ERROR = 8; public static final int MSG_REPLIES_CHANGED = 9; public static final int MSG_UPDATED_TITLE = 10; // Handler message status codes public static final int STATUS_LOAD_ITEMS_FAILURE = 0; public static final int STATUS_LOAD_ITEMS_SUCCESS = 1; // Dialog identifier codes public static final int DIALOG_AUTHENTICATION_FAILED = 1; public static final int DIALOG_SENDING_MESSAGE = 2; public static final int DIALOG_SERVICE_UNAVAILABLE = 3; public static final int DIALOG_EXTERNAL_STORAGE = 4; public static final int DIALOG_TIMELINE_LOADING = 5; public static final int DIALOG_EXTERNAL_STORAGE_MISSING = 6; // Intent bundle result keys public static final String INTENT_RESULT_KEY_AUTHENTICATION = "authentication"; // Bundle identifier keys public static final String BUNDLE_KEY_REPLY_ID = "replyId"; public static final String BUNDLE_KEY_CURRENT_PAGE = "currentPage"; public static final String BUNDLE_KEY_IS_LOADING = "isLoading"; public static final int MILLISECONDS = 1000; // Views and widgets protected LinearLayout mListFooter; protected Cursor mCursor; protected NotificationManager mNM; protected IAndTweetService mService; protected SharedPreferences mSP; protected ProgressDialog mProgressDialog; protected Handler mHandler; protected PendingIntent mAlarmSender; protected AlarmManager mAM; protected int mCurrentPage = 1; protected int mTotalItemCount = 0; protected int mFrequency = 180; protected boolean mIsBound; protected boolean mIsLoading; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Set up preference manager PreferenceManager.setDefaultValues(this, R.xml.preferences, false); mSP = PreferenceManager.getDefaultSharedPreferences(this); // Request window features before loading the content view requestWindowFeature(Window.FEATURE_CUSTOM_TITLE); String username = mSP.getString("twitter_username", null); String password = mSP.getString("twitter_password", null); if (username == null || "".equals(username) || password == null || "".equals(password)) { startActivity(new Intent(this, SplashActivity.class)); finish(); } loadTheme(); setContentView(R.layout.tweetlist); getWindow().setFeatureInt(Window.FEATURE_CUSTOM_TITLE, R.layout.timeline_title); updateTitle(); if (mSP.getBoolean("storage_use_external", false)) { if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { showDialog(DIALOG_EXTERNAL_STORAGE_MISSING); } if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED_READ_ONLY)) { Toast.makeText(this, "External storage mounted read-only. Cannot write to database. Please re-mount your storage and try again.", Toast.LENGTH_LONG).show(); destroyService(); finish(); } } if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { if (!mSP.getBoolean("confirmed_external_storage_use", false)) { showDialog(DIALOG_EXTERNAL_STORAGE); } } // Set up notification manager mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); // Get the frequency from preferences mFrequency = Integer.parseInt(mSP.getString("fetch_frequency", "180")); // Set up the alarm manager mAM = (AlarmManager) getSystemService(ALARM_SERVICE); Intent serviceIntent = new Intent(IAndTweetService.class.getName()); mAlarmSender = PendingIntent.getService(this, 0, serviceIntent, 0); mAM.setRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime(), mFrequency * MILLISECONDS, mAlarmSender); } @Override protected void onStart() { super.onStart(); mNM.cancelAll(); } @Override protected void onDestroy() { super.onDestroy(); mNM.cancelAll(); } @Override protected Dialog onCreateDialog(int id) { switch (id) { case DIALOG_AUTHENTICATION_FAILED: return new AlertDialog.Builder(this) .setIcon(android.R.drawable.ic_dialog_alert) .setTitle(R.string.dialog_title_authentication_failed) .setMessage(R.string.dialog_summary_authentication_failed) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { startActivity(new Intent(TimelineActivity.this, PreferencesActivity.class)); } }).create(); case DIALOG_SERVICE_UNAVAILABLE: return new AlertDialog.Builder(this) .setIcon(android.R.drawable.ic_dialog_alert) .setTitle(R.string.dialog_title_service_unavailable) .setMessage(R.string.dialog_summary_service_unavailable) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { } }).create(); case DIALOG_SENDING_MESSAGE: mProgressDialog = new ProgressDialog(this); mProgressDialog.setIcon(android.R.drawable.ic_dialog_info); mProgressDialog.setTitle(R.string.dialog_title_sending_message); mProgressDialog.setMessage(getText(R.string.dialog_summary_sending_message)); return mProgressDialog; case DIALOG_TIMELINE_LOADING: mProgressDialog = new ProgressDialog(this); mProgressDialog.setIcon(android.R.drawable.ic_dialog_info); mProgressDialog.setTitle(R.string.dialog_title_timeline_loading); mProgressDialog.setMessage(getText(R.string.dialog_summary_timeline_loading)); return mProgressDialog; case DIALOG_EXTERNAL_STORAGE: return new AlertDialog.Builder(this) .setIcon(android.R.drawable.ic_dialog_info) .setTitle(R.string.dialog_title_external_storage) .setMessage(R.string.dialog_summary_external_storage) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { SharedPreferences.Editor editor = mSP.edit(); editor.putBoolean("confirmed_external_storage_use", true); editor.putBoolean("storage_use_external", true); editor.commit(); destroyService(); finish(); Intent intent = new Intent(TimelineActivity.this, TweetListActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); } }) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { SharedPreferences.Editor editor = mSP.edit(); editor.putBoolean("confirmed_external_storage_use", true); editor.commit(); } }).create(); case DIALOG_EXTERNAL_STORAGE_MISSING: return new AlertDialog.Builder(this) .setIcon(android.R.drawable.ic_dialog_alert) .setTitle(R.string.dialog_title_external_storage_missing) .setMessage(R.string.dialog_summary_external_storage_missing) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { SharedPreferences.Editor editor = mSP.edit(); editor.putBoolean("confirmed_external_storage_use", true); editor.putBoolean("storage_use_external", false); editor.commit(); destroyService(); finish(); Intent intent = new Intent(TimelineActivity.this, TweetListActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); } }) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { destroyService(); finish(); } }).create(); default: return super.onCreateDialog(id); } } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.timeline, menu); Intent intent = new Intent(null, getIntent().getData()); intent.addCategory(Intent.CATEGORY_ALTERNATIVE); menu.addIntentOptions(Menu.CATEGORY_ALTERNATIVE, 0, 0, new ComponentName(this, TweetListActivity.class), null, intent, 0, null); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { Intent intent; Bundle appDataBundle; switch (item.getItemId()) { case R.id.preferences_menu_id: startActivity(new Intent(this, PreferencesActivity.class)); break; case R.id.friends_timeline_menu_id: intent = new Intent(this, TweetListActivity.class); appDataBundle = new Bundle(); appDataBundle.putParcelable("content_uri", AndTweetDatabase.Tweets.SEARCH_URI); appDataBundle.putString("selection", AndTweetDatabase.Tweets.TWEET_TYPE + " = ?"); appDataBundle.putStringArray("selectionArgs", new String[] { String.valueOf(Tweets.TWEET_TYPE_TWEET) }); intent.putExtra(SearchManager.APP_DATA, appDataBundle); intent.setAction(Intent.ACTION_SEARCH); startActivity(intent); break; case R.id.direct_messages_menu_id: intent = new Intent(this, MessageListActivity.class); appDataBundle = new Bundle(); appDataBundle.putParcelable("content_uri", AndTweetDatabase.DirectMessages.SEARCH_URI); intent.putExtra(SearchManager.APP_DATA, appDataBundle); intent.setAction(Intent.ACTION_SEARCH); startActivity(intent); break; case R.id.search_menu_id: onSearchRequested(); break; case R.id.replies_menu_id: intent = new Intent(this, TweetListActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); String username = mSP.getString("twitter_username", null); if (username != null) { intent.putExtra(SearchManager.QUERY, "@" + username); } appDataBundle = new Bundle(); appDataBundle.putParcelable("content_uri", AndTweetDatabase.Tweets.SEARCH_URI); intent.putExtra(SearchManager.APP_DATA, appDataBundle); intent.setAction(Intent.ACTION_SEARCH); startActivity(intent); break; } return super.onOptionsItemSelected(item); } public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) {} public boolean onKey(View v, int keyCode, KeyEvent event) { return false; } public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {} public void onScrollStateChanged(AbsListView view, int scrollState) {} /** * Load the theme for preferences. */ public void loadTheme() { boolean light = mSP.getBoolean("appearance_light_theme", false); StringBuilder theme = new StringBuilder(); String name = mSP.getString("theme", "AndTweet"); if (name.indexOf("Theme.") > -1) { name = name.substring(name.indexOf("Theme.")); } theme.append("Theme."); if (light) { theme.append("Light."); } theme.append(name); setTheme((int) getResources().getIdentifier(theme.toString(), "style", "com.xorcode.andtweet")); } /** * Sets the title with a left and right title. * * @param leftText Left title part * @param rightText Right title part */ public void setTitle(CharSequence leftText, CharSequence rightText) { TextView leftTitle = (TextView) findViewById(R.id.custom_title_left_text); TextView rightTitle = (TextView) findViewById(R.id.custom_title_right_text); leftTitle.setText(leftText); rightTitle.setText(rightText); } /** * Updates the activity title. */ public void updateTitle() { String username = mSP.getString("twitter_username", null); setTitle(getString(R.string.activity_title_format, new Object[] {getTitle(), username}), ""); } /** * Retrieve the text that is currently in the editor. * * @return Text currently in the editor */ protected CharSequence getSavedText() { return ((MultiAutoCompleteTextView) findViewById(R.id.messageEditTextAC)).getText(); } /** * Set the text in the text editor. * * @param text */ protected void setSavedText(CharSequence text) { ((MultiAutoCompleteTextView) findViewById(R.id.messageEditTextAC)).setText(text); } /** * Initialize the user interface. */ protected void initUI() { // Attach listeners to the message list getListView().setOnCreateContextMenuListener(this); getListView().setOnItemClickListener(this); } /** * Initialize service and bind to it. */ protected void bindToService() { if (mSP.contains("automatic_updates") && mSP.getBoolean("automatic_updates", false)) { Intent serviceIntent = new Intent(IAndTweetService.class.getName()); if (!mIsBound) { mAlarmSender = PendingIntent.getService(this, 0, serviceIntent, 0); mAM.setRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime(), mFrequency * MILLISECONDS, mAlarmSender); mIsBound = true; } bindService(serviceIntent, mConnection, Context.BIND_AUTO_CREATE); } } /** * Disconnect and unregister the service. */ protected void disconnectService() { if (mIsBound) { if (mService != null) { try { mService.unregisterCallback(mServiceCallback); } catch (RemoteException e) { // Service crashed, not much we can do. } } unbindService(mConnection); mIsBound = false; } } /** * Disconnects from the service and stops it. */ protected void destroyService() { disconnectService(); stopService(new Intent(IAndTweetService.class.getName())); mService = null; mIsBound = false; } /** * Service connection handler. */ private ServiceConnection mConnection = new ServiceConnection() { public void onServiceConnected(ComponentName name, IBinder service) { mService = IAndTweetService.Stub.asInterface(service); // We want to monitor the service for as long as we are // connected to it. try { mService.registerCallback(mServiceCallback); } catch (RemoteException e) { // Service has already crashed, nothing much we can do // except hope that it will restart. } } public void onServiceDisconnected(ComponentName name) { mService = null; } }; /** * Service callback handler. */ protected IAndTweetServiceCallback mServiceCallback = new IAndTweetServiceCallback.Stub() { /** * Tweets changed callback method * * @param value * @throws RemoteException */ public void tweetsChanged(int value) throws RemoteException { mHandler.sendMessage(mHandler.obtainMessage(MSG_TWEETS_CHANGED, value, 0)); } /** * dataLoading callback method * * @param value * @throws RemoteException */ public void dataLoading(int value) throws RemoteException { mHandler.sendMessage(mHandler.obtainMessage(MSG_DATA_LOADING, value, 0)); } /** * Messages changed callback method * * @param value * @throws RemoteException */ public void messagesChanged(int value) throws RemoteException { mHandler.sendMessage(mHandler.obtainMessage(MSG_DIRECT_MESSAGES_CHANGED, value, 0)); } /** * Replies changed callback method * * @param value * @throws RemoteException */ public void repliesChanged(int value) throws RemoteException { mHandler.sendMessage(mHandler.obtainMessage(MSG_REPLIES_CHANGED, value, 0)); } }; }
src/com/xorcode/andtweet/TimelineActivity.java
/* * Copyright (C) 2008 Torgny Bjers * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.xorcode.andtweet; import android.app.AlarmManager; import android.app.AlertDialog; import android.app.Dialog; import android.app.ListActivity; import android.app.NotificationManager; import android.app.PendingIntent; import android.app.ProgressDialog; import android.app.SearchManager; import android.content.ComponentName; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.ServiceConnection; import android.content.SharedPreferences; import android.database.Cursor; import android.os.Bundle; import android.os.Environment; import android.os.Handler; import android.os.IBinder; import android.os.RemoteException; import android.os.SystemClock; import android.preference.PreferenceManager; import android.view.KeyEvent; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.Window; import android.widget.AbsListView; import android.widget.AdapterView; import android.widget.LinearLayout; import android.widget.MultiAutoCompleteTextView; import android.widget.TextView; import android.widget.Toast; import com.xorcode.andtweet.data.AndTweetDatabase; import com.xorcode.andtweet.data.AndTweetDatabase.Tweets; /** * @author torgny.bjers */ public class TimelineActivity extends ListActivity implements ITimelineActivity { public static String TAG = "TimelineActivity"; // Handler message codes public static final int MSG_TWEETS_CHANGED = 1; public static final int MSG_DATA_LOADING = 2; public static final int MSG_UPDATE_STATUS = 3; public static final int MSG_MANUAL_RELOAD = 4; public static final int MSG_AUTHENTICATION_ERROR = 5; public static final int MSG_LOAD_ITEMS = 6; public static final int MSG_DIRECT_MESSAGES_CHANGED = 7; public static final int MSG_SERVICE_UNAVAILABLE_ERROR = 8; public static final int MSG_REPLIES_CHANGED = 9; public static final int MSG_UPDATED_TITLE = 10; // Handler message status codes public static final int STATUS_LOAD_ITEMS_FAILURE = 0; public static final int STATUS_LOAD_ITEMS_SUCCESS = 1; // Dialog identifier codes public static final int DIALOG_AUTHENTICATION_FAILED = 1; public static final int DIALOG_SENDING_MESSAGE = 2; public static final int DIALOG_SERVICE_UNAVAILABLE = 3; public static final int DIALOG_EXTERNAL_STORAGE = 4; public static final int DIALOG_TIMELINE_LOADING = 5; // Intent bundle result keys public static final String INTENT_RESULT_KEY_AUTHENTICATION = "authentication"; // Bundle identifier keys public static final String BUNDLE_KEY_REPLY_ID = "replyId"; public static final String BUNDLE_KEY_CURRENT_PAGE = "currentPage"; public static final String BUNDLE_KEY_IS_LOADING = "isLoading"; public static final int MILLISECONDS = 1000; // Views and widgets protected LinearLayout mListFooter; protected Cursor mCursor; protected NotificationManager mNM; protected IAndTweetService mService; protected SharedPreferences mSP; protected ProgressDialog mProgressDialog; protected Handler mHandler; protected PendingIntent mAlarmSender; protected AlarmManager mAM; protected int mCurrentPage = 1; protected int mTotalItemCount = 0; protected int mFrequency = 180; protected boolean mIsBound; protected boolean mIsLoading; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Set up preference manager PreferenceManager.setDefaultValues(this, R.xml.preferences, false); mSP = PreferenceManager.getDefaultSharedPreferences(this); // Request window features before loading the content view requestWindowFeature(Window.FEATURE_CUSTOM_TITLE); String username = mSP.getString("twitter_username", null); String password = mSP.getString("twitter_password", null); if (username == null || "".equals(username) || password == null || "".equals(password)) { startActivity(new Intent(this, SplashActivity.class)); finish(); } loadTheme(); setContentView(R.layout.tweetlist); getWindow().setFeatureInt(Window.FEATURE_CUSTOM_TITLE, R.layout.timeline_title); updateTitle(); if (mSP.getBoolean("storage_use_external", false)) { if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { Toast.makeText(this, "External storage not present. Aborting. If you start AndTweet again internal memory will be used, data loss may occur.", Toast.LENGTH_LONG).show(); SharedPreferences.Editor editor = mSP.edit(); editor.putBoolean("storage_use_external", false); editor.commit(); destroyService(); finish(); } if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED_READ_ONLY)) { Toast.makeText(this, "External storage mounted read-only. Cannot write to database. Please re-mount your storage and try again.", Toast.LENGTH_LONG).show(); destroyService(); finish(); } } if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { if (!mSP.getBoolean("confirmed_external_storage_use", false)) { showDialog(DIALOG_EXTERNAL_STORAGE); } } // Set up notification manager mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE); // Get the frequency from preferences mFrequency = Integer.parseInt(mSP.getString("fetch_frequency", "180")); // Set up the alarm manager mAM = (AlarmManager) getSystemService(ALARM_SERVICE); Intent serviceIntent = new Intent(IAndTweetService.class.getName()); mAlarmSender = PendingIntent.getService(this, 0, serviceIntent, 0); mAM.setRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime(), mFrequency * MILLISECONDS, mAlarmSender); } @Override protected void onStart() { super.onStart(); mNM.cancelAll(); } @Override protected void onDestroy() { super.onDestroy(); mNM.cancelAll(); } @Override protected Dialog onCreateDialog(int id) { switch (id) { case DIALOG_AUTHENTICATION_FAILED: return new AlertDialog.Builder(this) .setIcon(android.R.drawable.ic_dialog_alert) .setTitle(R.string.dialog_title_authentication_failed) .setMessage(R.string.dialog_summary_authentication_failed) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { startActivity(new Intent(TimelineActivity.this, PreferencesActivity.class)); } }).create(); case DIALOG_SERVICE_UNAVAILABLE: return new AlertDialog.Builder(this) .setIcon(android.R.drawable.ic_dialog_alert) .setTitle(R.string.dialog_title_service_unavailable) .setMessage(R.string.dialog_summary_service_unavailable) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { } }).create(); case DIALOG_SENDING_MESSAGE: mProgressDialog = new ProgressDialog(this); mProgressDialog.setIcon(android.R.drawable.ic_dialog_info); mProgressDialog.setTitle(R.string.dialog_title_sending_message); mProgressDialog.setMessage(getText(R.string.dialog_summary_sending_message)); return mProgressDialog; case DIALOG_TIMELINE_LOADING: mProgressDialog = new ProgressDialog(this); mProgressDialog.setIcon(android.R.drawable.ic_dialog_info); mProgressDialog.setTitle(R.string.dialog_title_timeline_loading); mProgressDialog.setMessage(getText(R.string.dialog_summary_timeline_loading)); return mProgressDialog; case DIALOG_EXTERNAL_STORAGE: return new AlertDialog.Builder(this) .setIcon(android.R.drawable.ic_dialog_alert) .setTitle(R.string.dialog_title_external_storage) .setMessage(R.string.dialog_summary_external_storage) .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { SharedPreferences.Editor editor = mSP.edit(); editor.putBoolean("confirmed_external_storage_use", true); editor.putBoolean("storage_use_external", true); editor.commit(); destroyService(); finish(); Intent intent = new Intent(TimelineActivity.this, TweetListActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED); startActivity(intent); } }) .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface Dialog, int whichButton) { SharedPreferences.Editor editor = mSP.edit(); editor.putBoolean("confirmed_external_storage_use", true); editor.commit(); } }).create(); default: return super.onCreateDialog(id); } } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.timeline, menu); Intent intent = new Intent(null, getIntent().getData()); intent.addCategory(Intent.CATEGORY_ALTERNATIVE); menu.addIntentOptions(Menu.CATEGORY_ALTERNATIVE, 0, 0, new ComponentName(this, TweetListActivity.class), null, intent, 0, null); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { Intent intent; Bundle appDataBundle; switch (item.getItemId()) { case R.id.preferences_menu_id: startActivity(new Intent(this, PreferencesActivity.class)); break; case R.id.friends_timeline_menu_id: intent = new Intent(this, TweetListActivity.class); appDataBundle = new Bundle(); appDataBundle.putParcelable("content_uri", AndTweetDatabase.Tweets.SEARCH_URI); appDataBundle.putString("selection", AndTweetDatabase.Tweets.TWEET_TYPE + " = ?"); appDataBundle.putStringArray("selectionArgs", new String[] { String.valueOf(Tweets.TWEET_TYPE_TWEET) }); intent.putExtra(SearchManager.APP_DATA, appDataBundle); intent.setAction(Intent.ACTION_SEARCH); startActivity(intent); break; case R.id.direct_messages_menu_id: intent = new Intent(this, MessageListActivity.class); appDataBundle = new Bundle(); appDataBundle.putParcelable("content_uri", AndTweetDatabase.DirectMessages.SEARCH_URI); intent.putExtra(SearchManager.APP_DATA, appDataBundle); intent.setAction(Intent.ACTION_SEARCH); startActivity(intent); break; case R.id.search_menu_id: onSearchRequested(); break; case R.id.replies_menu_id: intent = new Intent(this, TweetListActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); String username = mSP.getString("twitter_username", null); if (username != null) { intent.putExtra(SearchManager.QUERY, "@" + username); } appDataBundle = new Bundle(); appDataBundle.putParcelable("content_uri", AndTweetDatabase.Tweets.SEARCH_URI); intent.putExtra(SearchManager.APP_DATA, appDataBundle); intent.setAction(Intent.ACTION_SEARCH); startActivity(intent); break; } return super.onOptionsItemSelected(item); } public void onItemClick(AdapterView<?> arg0, View arg1, int arg2, long arg3) {} public boolean onKey(View v, int keyCode, KeyEvent event) { return false; } public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {} public void onScrollStateChanged(AbsListView view, int scrollState) {} /** * Load the theme for preferences. */ public void loadTheme() { boolean light = mSP.getBoolean("appearance_light_theme", false); StringBuilder theme = new StringBuilder(); String name = mSP.getString("theme", "AndTweet"); if (name.indexOf("Theme.") > -1) { name = name.substring(name.indexOf("Theme.")); } theme.append("Theme."); if (light) { theme.append("Light."); } theme.append(name); setTheme((int) getResources().getIdentifier(theme.toString(), "style", "com.xorcode.andtweet")); } /** * Sets the title with a left and right title. * * @param leftText Left title part * @param rightText Right title part */ public void setTitle(CharSequence leftText, CharSequence rightText) { TextView leftTitle = (TextView) findViewById(R.id.custom_title_left_text); TextView rightTitle = (TextView) findViewById(R.id.custom_title_right_text); leftTitle.setText(leftText); rightTitle.setText(rightText); } /** * Updates the activity title. */ public void updateTitle() { String username = mSP.getString("twitter_username", null); setTitle(getString(R.string.activity_title_format, new Object[] {getTitle(), username}), ""); } /** * Retrieve the text that is currently in the editor. * * @return Text currently in the editor */ protected CharSequence getSavedText() { return ((MultiAutoCompleteTextView) findViewById(R.id.messageEditTextAC)).getText(); } /** * Set the text in the text editor. * * @param text */ protected void setSavedText(CharSequence text) { ((MultiAutoCompleteTextView) findViewById(R.id.messageEditTextAC)).setText(text); } /** * Initialize the user interface. */ protected void initUI() { // Attach listeners to the message list getListView().setOnCreateContextMenuListener(this); getListView().setOnItemClickListener(this); } /** * Initialize service and bind to it. */ protected void bindToService() { if (mSP.contains("automatic_updates") && mSP.getBoolean("automatic_updates", false)) { Intent serviceIntent = new Intent(IAndTweetService.class.getName()); if (!mIsBound) { mAlarmSender = PendingIntent.getService(this, 0, serviceIntent, 0); mAM.setRepeating(AlarmManager.ELAPSED_REALTIME_WAKEUP, SystemClock.elapsedRealtime(), mFrequency * MILLISECONDS, mAlarmSender); mIsBound = true; } bindService(serviceIntent, mConnection, Context.BIND_AUTO_CREATE); } } /** * Disconnect and unregister the service. */ protected void disconnectService() { if (mIsBound) { if (mService != null) { try { mService.unregisterCallback(mServiceCallback); } catch (RemoteException e) { // Service crashed, not much we can do. } } unbindService(mConnection); mIsBound = false; } } /** * Disconnects from the service and stops it. */ protected void destroyService() { disconnectService(); stopService(new Intent(IAndTweetService.class.getName())); mService = null; mIsBound = false; } /** * Service connection handler. */ private ServiceConnection mConnection = new ServiceConnection() { public void onServiceConnected(ComponentName name, IBinder service) { mService = IAndTweetService.Stub.asInterface(service); // We want to monitor the service for as long as we are // connected to it. try { mService.registerCallback(mServiceCallback); } catch (RemoteException e) { // Service has already crashed, nothing much we can do // except hope that it will restart. } } public void onServiceDisconnected(ComponentName name) { mService = null; } }; /** * Service callback handler. */ protected IAndTweetServiceCallback mServiceCallback = new IAndTweetServiceCallback.Stub() { /** * Tweets changed callback method * * @param value * @throws RemoteException */ public void tweetsChanged(int value) throws RemoteException { mHandler.sendMessage(mHandler.obtainMessage(MSG_TWEETS_CHANGED, value, 0)); } /** * dataLoading callback method * * @param value * @throws RemoteException */ public void dataLoading(int value) throws RemoteException { mHandler.sendMessage(mHandler.obtainMessage(MSG_DATA_LOADING, value, 0)); } /** * Messages changed callback method * * @param value * @throws RemoteException */ public void messagesChanged(int value) throws RemoteException { mHandler.sendMessage(mHandler.obtainMessage(MSG_DIRECT_MESSAGES_CHANGED, value, 0)); } /** * Replies changed callback method * * @param value * @throws RemoteException */ public void repliesChanged(int value) throws RemoteException { mHandler.sendMessage(mHandler.obtainMessage(MSG_REPLIES_CHANGED, value, 0)); } }; }
Switched external storage notification to using a dialog instead of a toast. Added dialog for missing external storage. --HG-- extra : convert_revision : svn%3A0f648ebc-ce2a-11dd-a412-a1792178251d/trunk%4096
src/com/xorcode/andtweet/TimelineActivity.java
Switched external storage notification to using a dialog instead of a toast. Added dialog for missing external storage.
<ide><path>rc/com/xorcode/andtweet/TimelineActivity.java <ide> import android.content.Intent; <ide> import android.content.ServiceConnection; <ide> import android.content.SharedPreferences; <add>import android.content.pm.ProviderInfo; <ide> import android.database.Cursor; <ide> import android.os.Bundle; <ide> import android.os.Environment; <ide> public static final int DIALOG_SERVICE_UNAVAILABLE = 3; <ide> public static final int DIALOG_EXTERNAL_STORAGE = 4; <ide> public static final int DIALOG_TIMELINE_LOADING = 5; <add> public static final int DIALOG_EXTERNAL_STORAGE_MISSING = 6; <ide> <ide> // Intent bundle result keys <ide> public static final String INTENT_RESULT_KEY_AUTHENTICATION = "authentication"; <ide> <ide> if (mSP.getBoolean("storage_use_external", false)) { <ide> if (!Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED)) { <del> Toast.makeText(this, "External storage not present. Aborting. If you start AndTweet again internal memory will be used, data loss may occur.", Toast.LENGTH_LONG).show(); <del> SharedPreferences.Editor editor = mSP.edit(); <del> editor.putBoolean("storage_use_external", false); <del> editor.commit(); <del> destroyService(); <del> finish(); <add> showDialog(DIALOG_EXTERNAL_STORAGE_MISSING); <ide> } <ide> if (Environment.getExternalStorageState().equals(Environment.MEDIA_MOUNTED_READ_ONLY)) { <ide> Toast.makeText(this, "External storage mounted read-only. Cannot write to database. Please re-mount your storage and try again.", Toast.LENGTH_LONG).show(); <ide> <ide> case DIALOG_EXTERNAL_STORAGE: <ide> return new AlertDialog.Builder(this) <del> .setIcon(android.R.drawable.ic_dialog_alert) <add> .setIcon(android.R.drawable.ic_dialog_info) <ide> .setTitle(R.string.dialog_title_external_storage) <ide> .setMessage(R.string.dialog_summary_external_storage) <ide> .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { <ide> destroyService(); <ide> finish(); <ide> Intent intent = new Intent(TimelineActivity.this, TweetListActivity.class); <del> intent.addFlags(Intent.FLAG_ACTIVITY_RESET_TASK_IF_NEEDED); <add> intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); <ide> startActivity(intent); <ide> } <ide> }) <ide> } <ide> }).create(); <ide> <add> case DIALOG_EXTERNAL_STORAGE_MISSING: <add> return new AlertDialog.Builder(this) <add> .setIcon(android.R.drawable.ic_dialog_alert) <add> .setTitle(R.string.dialog_title_external_storage_missing) <add> .setMessage(R.string.dialog_summary_external_storage_missing) <add> .setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() { <add> public void onClick(DialogInterface Dialog, int whichButton) { <add> SharedPreferences.Editor editor = mSP.edit(); <add> editor.putBoolean("confirmed_external_storage_use", true); <add> editor.putBoolean("storage_use_external", false); <add> editor.commit(); <add> destroyService(); <add> finish(); <add> Intent intent = new Intent(TimelineActivity.this, TweetListActivity.class); <add> intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK); <add> startActivity(intent); <add> } <add> }) <add> .setNegativeButton(android.R.string.cancel, new DialogInterface.OnClickListener() { <add> public void onClick(DialogInterface Dialog, int whichButton) { <add> destroyService(); <add> finish(); <add> } <add> }).create(); <add> <ide> default: <ide> return super.onCreateDialog(id); <ide> }
Java
apache-2.0
1838188b382bd9f930eddd39a86c7e3a21600d6d
0
Jasig/cas,apereo/cas,fogbeam/cas_mirror,rkorn86/cas,apereo/cas,rkorn86/cas,apereo/cas,fogbeam/cas_mirror,apereo/cas,apereo/cas,fogbeam/cas_mirror,rkorn86/cas,Jasig/cas,fogbeam/cas_mirror,Jasig/cas,fogbeam/cas_mirror,apereo/cas,apereo/cas,rkorn86/cas,Jasig/cas,fogbeam/cas_mirror
package org.apereo.cas.ticket.registry; import org.apereo.cas.ticket.ServiceTicket; import org.apereo.cas.ticket.Ticket; import org.apereo.cas.ticket.TicketCatalog; import org.apereo.cas.ticket.TicketDefinition; import org.apereo.cas.ticket.TicketGrantingTicket; import org.apereo.cas.ticket.TicketState; import org.apereo.cas.ticket.serialization.TicketSerializationManager; import org.apereo.cas.util.DateTimeUtils; import org.apereo.cas.util.LoggingUtils; import com.mongodb.client.MongoCollection; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.apache.commons.lang3.StringUtils; import org.hjson.JsonValue; import org.hjson.Stringify; import org.springframework.data.domain.PageRequest; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.mongodb.core.query.TextQuery; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.util.StreamUtils; import java.time.Instant; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; /** * A Ticket Registry storage backend based on MongoDB. * * @author Misagh Moayyed * @since 5.1.0 */ @Slf4j @RequiredArgsConstructor public class MongoDbTicketRegistry extends AbstractTicketRegistry { private final TicketCatalog ticketCatalog; private final MongoOperations mongoTemplate; private final TicketSerializationManager ticketSerializationManager; @Override public void addTicket(final Ticket ticket) { try { LOGGER.debug("Adding ticket [{}]", ticket.getId()); val holder = buildTicketAsDocument(ticket); val metadata = this.ticketCatalog.find(ticket); if (metadata == null) { LOGGER.error("Could not locate ticket definition in the catalog for ticket [{}]", ticket.getId()); return; } LOGGER.trace("Located ticket definition [{}] in the ticket catalog", metadata); val collectionName = getTicketCollectionInstanceByMetadata(metadata); LOGGER.trace("Found collection [{}] linked to ticket [{}]", collectionName, metadata); this.mongoTemplate.insert(holder, collectionName); LOGGER.debug("Added ticket [{}]", ticket.getId()); } catch (final Exception e) { LOGGER.error("Failed adding [{}]", ticket); LoggingUtils.error(LOGGER, e); } } @Override public Ticket getTicket(final String ticketId, final Predicate<Ticket> predicate) { try { LOGGER.debug("Locating ticket ticketId [{}]", ticketId); val encTicketId = encodeTicketId(ticketId); if (encTicketId == null) { LOGGER.debug("Ticket id [{}] could not be found", ticketId); return null; } val metadata = this.ticketCatalog.find(ticketId); if (metadata == null) { LOGGER.debug("Ticket definition [{}] could not be found in the ticket catalog", ticketId); return null; } val collectionName = getTicketCollectionInstanceByMetadata(metadata); val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(encTicketId)); val d = this.mongoTemplate.findOne(query, TicketHolder.class, collectionName); if (d != null) { val decoded = deserializeTicketFromMongoDocument(d); val result = decodeTicket(decoded); if (predicate.test(result)) { return result; } return null; } } catch (final Exception e) { LOGGER.error("Failed fetching [{}]", ticketId); LoggingUtils.error(LOGGER, e); } return null; } @Override public long deleteAll() { val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).exists(true)); return this.ticketCatalog.findAll().stream() .map(this::getTicketCollectionInstanceByMetadata) .filter(StringUtils::isNotBlank) .mapToLong(collectionName -> { val countTickets = this.mongoTemplate.count(query, collectionName); mongoTemplate.remove(query, collectionName); return countTickets; }) .sum(); } @Override public Collection<? extends Ticket> getTickets() { return this.ticketCatalog.findAll().stream() .map(this::getTicketCollectionInstanceByMetadata) .map(map -> mongoTemplate.findAll(TicketHolder.class, map)) .flatMap(List::stream) .map(ticket -> decodeTicket(deserializeTicketFromMongoDocument(ticket))) .collect(Collectors.toSet()); } @Override public Ticket updateTicket(final Ticket ticket) { LOGGER.debug("Updating ticket [{}]", ticket); try { val holder = buildTicketAsDocument(ticket); val metadata = this.ticketCatalog.find(ticket); if (metadata == null) { LOGGER.error("Could not locate ticket definition in the catalog for ticket [{}]", ticket.getId()); return null; } LOGGER.debug("Located ticket definition [{}] in the ticket catalog", metadata); val collectionName = getTicketCollectionInstanceByMetadata(metadata); val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(holder.getTicketId())); val update = Update.update(TicketHolder.FIELD_NAME_JSON, holder.getJson()); this.mongoTemplate.upsert(query, update, collectionName); LOGGER.debug("Updated ticket [{}]", ticket); return ticket; } catch (final Exception e) { LOGGER.error("Failed updating [{}]", ticket); LoggingUtils.error(LOGGER, e); } return null; } @Override public Stream<Ticket> getTicketsStream() { return ticketCatalog.findAll().stream() .map(this::getTicketCollectionInstanceByMetadata) .map(map -> mongoTemplate.stream(new Query(), TicketHolder.class, map)) .flatMap(StreamUtils::createStreamFromIterator) .map(ticket -> decodeTicket(deserializeTicketFromMongoDocument(ticket))); } @Override public long sessionCount() { return countTicketsByTicketType(TicketGrantingTicket.class); } @Override public long countSessionsFor(final String principalId) { if (isCipherExecutorEnabled()) { return super.countSessionsFor(principalId); } val ticketDefinitions = ticketCatalog.find(TicketGrantingTicket.class); return ticketDefinitions.stream() .map(this::getTicketCollectionInstanceByMetadata) .mapToLong(map -> { val criteria = TextCriteria.forDefaultLanguage().matchingAny(principalId); val query = TextQuery.queryText(criteria) .sortByScore() .with(PageRequest.of(0, 10)); return mongoTemplate.count(query, map); }) .sum(); } @Override public long serviceTicketCount() { return countTicketsByTicketType(ServiceTicket.class); } @Override public boolean deleteSingleTicket(final String ticketIdToDelete) { val ticketId = encodeTicketId(ticketIdToDelete); LOGGER.debug("Deleting ticket [{}]", ticketId); val metadata = this.ticketCatalog.find(ticketIdToDelete); val collectionName = getTicketCollectionInstanceByMetadata(metadata); val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(ticketId)); val res = this.mongoTemplate.remove(query, collectionName); LOGGER.debug("Deleted ticket [{}] with result [{}]", ticketIdToDelete, res); return true; } private long countTicketsByTicketType(final Class<? extends Ticket> ticketType) { val ticketDefinitions = ticketCatalog.find(ticketType); return ticketDefinitions.stream() .map(this::getTicketCollectionInstanceByMetadata) .mapToLong(map -> mongoTemplate.count(new Query(), map)) .sum(); } private TicketHolder buildTicketAsDocument(final Ticket ticket) { val encTicket = encodeTicket(ticket); val json = serializeTicketForMongoDocument(encTicket); if (StringUtils.isNotBlank(json)) { LOGGER.trace("Serialized ticket into a JSON document as \n [{}]", JsonValue.readJSON(json).toString(Stringify.FORMATTED)); val expireAt = getExpireAt(ticket); return new TicketHolder(json, encTicket.getId(), encTicket.getClass().getName(), expireAt); } throw new IllegalArgumentException("Ticket " + ticket.getId() + " cannot be serialized to JSON"); } private String getTicketCollectionInstanceByMetadata(final TicketDefinition metadata) { val mapName = metadata.getProperties().getStorageName(); LOGGER.debug("Locating collection name [{}] for ticket definition [{}]", mapName, metadata); val c = getTicketCollectionInstance(mapName); if (c != null) { return c.getNamespace().getCollectionName(); } throw new IllegalArgumentException("Could not locate MongoDb collection " + mapName); } private MongoCollection getTicketCollectionInstance(final String mapName) { try { val inst = this.mongoTemplate.getCollection(mapName); LOGGER.debug("Located MongoDb collection instance [{}]", mapName); return inst; } catch (final Exception e) { LoggingUtils.error(LOGGER, e); } return null; } private String serializeTicketForMongoDocument(final Ticket ticket) { return ticketSerializationManager.serializeTicket(ticket); } private Ticket deserializeTicketFromMongoDocument(final TicketHolder holder) { return ticketSerializationManager.deserializeTicket(holder.getJson(), holder.getType()); } /** * Calculate the time at which the ticket is eligible for automated deletion by MongoDb. * Makes the assumption that the CAS server date and the Mongo server date are in sync. */ private static Date getExpireAt(final Ticket ticket) { val expirationPolicy = ticket.getExpirationPolicy(); val ttl = ticket instanceof TicketState ? expirationPolicy.getTimeToLive((TicketState) ticket) : expirationPolicy.getTimeToLive(); if (ttl < 1) { return null; } val exp = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(ttl); return DateTimeUtils.dateOf(Instant.ofEpochMilli(exp)); } }
support/cas-server-support-mongo-ticket-registry/src/main/java/org/apereo/cas/ticket/registry/MongoDbTicketRegistry.java
package org.apereo.cas.ticket.registry; import org.apereo.cas.ticket.ServiceTicket; import org.apereo.cas.ticket.Ticket; import org.apereo.cas.ticket.TicketCatalog; import org.apereo.cas.ticket.TicketDefinition; import org.apereo.cas.ticket.TicketGrantingTicket; import org.apereo.cas.ticket.TicketState; import org.apereo.cas.ticket.serialization.TicketSerializationManager; import org.apereo.cas.util.DateTimeUtils; import org.apereo.cas.util.LoggingUtils; import com.mongodb.client.MongoCollection; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import lombok.val; import org.apache.commons.lang3.StringUtils; import org.hjson.JsonValue; import org.hjson.Stringify; import org.springframework.data.domain.PageRequest; import org.springframework.data.mongodb.core.MongoOperations; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.TextCriteria; import org.springframework.data.mongodb.core.query.TextQuery; import org.springframework.data.mongodb.core.query.Update; import org.springframework.data.util.StreamUtils; import java.time.Instant; import java.util.Collection; import java.util.Date; import java.util.List; import java.util.concurrent.TimeUnit; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; /** * A Ticket Registry storage backend based on MongoDB. * * @author Misagh Moayyed * @since 5.1.0 */ @Slf4j @RequiredArgsConstructor public class MongoDbTicketRegistry extends AbstractTicketRegistry { private final TicketCatalog ticketCatalog; private final MongoOperations mongoTemplate; private final TicketSerializationManager ticketSerializationManager; @Override public void addTicket(final Ticket ticket) { try { LOGGER.debug("Adding ticket [{}]", ticket.getId()); val holder = buildTicketAsDocument(ticket); val metadata = this.ticketCatalog.find(ticket); if (metadata == null) { LOGGER.error("Could not locate ticket definition in the catalog for ticket [{}]", ticket.getId()); return; } LOGGER.trace("Located ticket definition [{}] in the ticket catalog", metadata); val collectionName = getTicketCollectionInstanceByMetadata(metadata); LOGGER.trace("Found collection [{}] linked to ticket [{}]", collectionName, metadata); this.mongoTemplate.insert(holder, collectionName); LOGGER.debug("Added ticket [{}]", ticket.getId()); } catch (final Exception e) { LOGGER.error("Failed adding [{}]", ticket); LoggingUtils.error(LOGGER, e); } } @Override public Ticket getTicket(final String ticketId, final Predicate<Ticket> predicate) { try { LOGGER.debug("Locating ticket ticketId [{}]", ticketId); val encTicketId = encodeTicketId(ticketId); if (encTicketId == null) { LOGGER.debug("Ticket id [{}] could not be found", ticketId); return null; } val metadata = this.ticketCatalog.find(ticketId); if (metadata == null) { LOGGER.debug("Ticket definition [{}] could not be found in the ticket catalog", ticketId); return null; } val collectionName = getTicketCollectionInstanceByMetadata(metadata); val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(encTicketId)); val d = this.mongoTemplate.findOne(query, TicketHolder.class, collectionName); if (d != null) { val decoded = deserializeTicketFromMongoDocument(d); val result = decodeTicket(decoded); if (predicate.test(result)) { return result; } return null; } } catch (final Exception e) { LOGGER.error("Failed fetching [{}]", ticketId); LoggingUtils.error(LOGGER, e); } return null; } @Override public long deleteAll() { val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).exists(true)); return this.ticketCatalog.findAll().stream() .map(this::getTicketCollectionInstanceByMetadata) .filter(StringUtils::isNotBlank) .mapToLong(collectionName -> { val countTickets = this.mongoTemplate.count(query, collectionName); mongoTemplate.remove(query, collectionName); return countTickets; }) .sum(); } @Override public Collection<? extends Ticket> getTickets() { return this.ticketCatalog.findAll().stream() .map(this::getTicketCollectionInstanceByMetadata) .map(map -> mongoTemplate.findAll(TicketHolder.class, map)) .flatMap(List::stream) .map(ticket -> decodeTicket(deserializeTicketFromMongoDocument(ticket))) .collect(Collectors.toSet()); } @Override public Ticket updateTicket(final Ticket ticket) { LOGGER.debug("Updating ticket [{}]", ticket); try { val holder = buildTicketAsDocument(ticket); val metadata = this.ticketCatalog.find(ticket); if (metadata == null) { LOGGER.error("Could not locate ticket definition in the catalog for ticket [{}]", ticket.getId()); return null; } LOGGER.debug("Located ticket definition [{}] in the ticket catalog", metadata); val collectionName = getTicketCollectionInstanceByMetadata(metadata); val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(holder.getTicketId())); val update = Update.update(TicketHolder.FIELD_NAME_JSON, holder.getJson()); this.mongoTemplate.upsert(query, update, collectionName); LOGGER.debug("Updated ticket [{}]", ticket); return ticket; } catch (final Exception e) { LOGGER.error("Failed updating [{}]", ticket); LoggingUtils.error(LOGGER, e); } return null; } @Override public Stream<Ticket> getTicketsStream() { return ticketCatalog.findAll().stream() .map(this::getTicketCollectionInstanceByMetadata) .map(map -> mongoTemplate.stream(new Query(), TicketHolder.class, map)) .flatMap(StreamUtils::createStreamFromIterator) .map(ticket -> decodeTicket(deserializeTicketFromMongoDocument(ticket))); } @Override public long sessionCount() { return countTicketsByTicketType(TicketGrantingTicket.class); } @Override public long countSessionsFor(final String principalId) { if (isCipherExecutorEnabled()) { return super.countSessionsFor(principalId); } val ticketDefinitions = ticketCatalog.find(TicketGrantingTicket.class); return ticketDefinitions.stream() .map(this::getTicketCollectionInstanceByMetadata) .mapToLong(map -> { val criteria = TextCriteria.forDefaultLanguage().matchingAny(principalId); val query = TextQuery.queryText(criteria) .sortByScore() .with(PageRequest.of(0, 10)); return mongoTemplate.count(query, map); }) .sum(); } @Override public long serviceTicketCount() { return countTicketsByTicketType(ServiceTicket.class); } @Override public boolean deleteSingleTicket(final String ticketIdToDelete) { val ticketId = encodeTicketId(ticketIdToDelete); LOGGER.debug("Deleting ticket [{}]", ticketId); try { val metadata = this.ticketCatalog.find(ticketIdToDelete); val collectionName = getTicketCollectionInstanceByMetadata(metadata); val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(ticketId)); val res = this.mongoTemplate.remove(query, collectionName); LOGGER.debug("Deleted ticket [{}] with result [{}]", ticketIdToDelete, res); return true; } catch (final Exception e) { LOGGER.error("Failed deleting [{}]", ticketId); LoggingUtils.error(LOGGER, e); } return false; } private long countTicketsByTicketType(final Class<? extends Ticket> ticketType) { val ticketDefinitions = ticketCatalog.find(ticketType); return ticketDefinitions.stream() .map(this::getTicketCollectionInstanceByMetadata) .mapToLong(map -> mongoTemplate.count(new Query(), map)) .sum(); } private TicketHolder buildTicketAsDocument(final Ticket ticket) { val encTicket = encodeTicket(ticket); val json = serializeTicketForMongoDocument(encTicket); if (StringUtils.isNotBlank(json)) { LOGGER.trace("Serialized ticket into a JSON document as \n [{}]", JsonValue.readJSON(json).toString(Stringify.FORMATTED)); val expireAt = getExpireAt(ticket); return new TicketHolder(json, encTicket.getId(), encTicket.getClass().getName(), expireAt); } throw new IllegalArgumentException("Ticket " + ticket.getId() + " cannot be serialized to JSON"); } private String getTicketCollectionInstanceByMetadata(final TicketDefinition metadata) { val mapName = metadata.getProperties().getStorageName(); LOGGER.debug("Locating collection name [{}] for ticket definition [{}]", mapName, metadata); val c = getTicketCollectionInstance(mapName); if (c != null) { return c.getNamespace().getCollectionName(); } throw new IllegalArgumentException("Could not locate MongoDb collection " + mapName); } private MongoCollection getTicketCollectionInstance(final String mapName) { try { val inst = this.mongoTemplate.getCollection(mapName); LOGGER.debug("Located MongoDb collection instance [{}]", mapName); return inst; } catch (final Exception e) { LoggingUtils.error(LOGGER, e); } return null; } private String serializeTicketForMongoDocument(final Ticket ticket) { try { return ticketSerializationManager.serializeTicket(ticket); } catch (final Exception e) { LoggingUtils.error(LOGGER, e); } return null; } private Ticket deserializeTicketFromMongoDocument(final TicketHolder holder) { return ticketSerializationManager.deserializeTicket(holder.getJson(), holder.getType()); } /** * Calculate the time at which the ticket is eligible for automated deletion by MongoDb. * Makes the assumption that the CAS server date and the Mongo server date are in sync. */ private static Date getExpireAt(final Ticket ticket) { val expirationPolicy = ticket.getExpirationPolicy(); val ttl = ticket instanceof TicketState ? expirationPolicy.getTimeToLive((TicketState) ticket) : expirationPolicy.getTimeToLive(); if (ttl < 1) { return null; } val exp = System.currentTimeMillis() + TimeUnit.SECONDS.toMillis(ttl); return DateTimeUtils.dateOf(Instant.ofEpochMilli(exp)); } }
add tests
support/cas-server-support-mongo-ticket-registry/src/main/java/org/apereo/cas/ticket/registry/MongoDbTicketRegistry.java
add tests
<ide><path>upport/cas-server-support-mongo-ticket-registry/src/main/java/org/apereo/cas/ticket/registry/MongoDbTicketRegistry.java <ide> public boolean deleteSingleTicket(final String ticketIdToDelete) { <ide> val ticketId = encodeTicketId(ticketIdToDelete); <ide> LOGGER.debug("Deleting ticket [{}]", ticketId); <del> try { <del> val metadata = this.ticketCatalog.find(ticketIdToDelete); <del> val collectionName = getTicketCollectionInstanceByMetadata(metadata); <del> val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(ticketId)); <del> val res = this.mongoTemplate.remove(query, collectionName); <del> LOGGER.debug("Deleted ticket [{}] with result [{}]", ticketIdToDelete, res); <del> return true; <del> } catch (final Exception e) { <del> LOGGER.error("Failed deleting [{}]", ticketId); <del> LoggingUtils.error(LOGGER, e); <del> } <del> return false; <add> val metadata = this.ticketCatalog.find(ticketIdToDelete); <add> val collectionName = getTicketCollectionInstanceByMetadata(metadata); <add> val query = new Query(Criteria.where(TicketHolder.FIELD_NAME_ID).is(ticketId)); <add> val res = this.mongoTemplate.remove(query, collectionName); <add> LOGGER.debug("Deleted ticket [{}] with result [{}]", ticketIdToDelete, res); <add> return true; <ide> } <ide> <ide> private long countTicketsByTicketType(final Class<? extends Ticket> ticketType) { <ide> } <ide> <ide> private String serializeTicketForMongoDocument(final Ticket ticket) { <del> try { <del> return ticketSerializationManager.serializeTicket(ticket); <del> } catch (final Exception e) { <del> LoggingUtils.error(LOGGER, e); <del> } <del> return null; <add> return ticketSerializationManager.serializeTicket(ticket); <ide> } <ide> <ide> private Ticket deserializeTicketFromMongoDocument(final TicketHolder holder) {
Java
apache-2.0
2f6c8f1f1a0f14cd9a0aed20f5fc40ea58b7a996
0
Neft-io/neft,Neft-io/neft,Neft-io/neft,Neft-io/neft,Neft-io/neft,Neft-io/neft
package io.neft.renderer; import android.app.Activity; import android.content.Context; import android.content.res.Configuration; import android.text.Editable; import android.text.InputType; import android.text.TextWatcher; import android.util.DisplayMetrics; import android.util.Log; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import io.neft.App; import io.neft.client.Action; import io.neft.client.InAction; import io.neft.client.OutAction; import io.neft.client.Reader; import io.neft.MainActivity; class KeyboardText extends EditText { private final MainActivity app; public KeyboardText(Context context, final MainActivity app){ super(context); this.app = app; this.setInputType(InputType.TYPE_CLASS_TEXT); this.setVisibility(View.INVISIBLE); // set layout to get onKeyPreIme this.layout(0, 0, 1, 1); this.setOnKeyListener(new OnKeyListener() { @Override public boolean onKey(View v, int keyCode, KeyEvent event) { return app.renderer.device.onKey(keyCode, event); } }); this.addTextChangedListener(new TextWatcher() { public void afterTextChanged(Editable s) { } public void beforeTextChanged(CharSequence s, int start, int count, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { final int startIndex = start + before; final int endIndex = start + count; if (endIndex > startIndex) { final CharSequence text = s.subSequence(startIndex, endIndex); app.client.pushAction(OutAction.KEY_INPUT); app.client.pushString(text.toString()); } } }); } public boolean onKeyPreIme(int keyCode, KeyEvent event){ return app.renderer.device.onKey(keyCode, event); } } public class Device { static final String[] keyCodes; static { keyCodes = new String[KeyEvent.getMaxKeyCode()]; try { keyCodes[KeyEvent.KEYCODE_UNKNOWN] = "Unknown"; keyCodes[KeyEvent.KEYCODE_SOFT_LEFT] = "SoftLeft"; keyCodes[KeyEvent.KEYCODE_SOFT_RIGHT] = "SoftRight"; keyCodes[KeyEvent.KEYCODE_HOME] = "Home"; keyCodes[KeyEvent.KEYCODE_BACK] = "Back"; keyCodes[KeyEvent.KEYCODE_CALL] = "Call"; keyCodes[KeyEvent.KEYCODE_ENDCALL] = "EndCall"; keyCodes[KeyEvent.KEYCODE_0] = "0"; keyCodes[KeyEvent.KEYCODE_1] = "1"; keyCodes[KeyEvent.KEYCODE_2] = "2"; keyCodes[KeyEvent.KEYCODE_3] = "3"; keyCodes[KeyEvent.KEYCODE_4] = "4"; keyCodes[KeyEvent.KEYCODE_5] = "5"; keyCodes[KeyEvent.KEYCODE_6] = "6"; keyCodes[KeyEvent.KEYCODE_7] = "7"; keyCodes[KeyEvent.KEYCODE_8] = "8"; keyCodes[KeyEvent.KEYCODE_9] = "9"; keyCodes[KeyEvent.KEYCODE_STAR] = "Star"; keyCodes[KeyEvent.KEYCODE_POUND] = "Pound"; keyCodes[KeyEvent.KEYCODE_DPAD_UP] = "DPadUp"; keyCodes[KeyEvent.KEYCODE_DPAD_DOWN] = "DPadDown"; keyCodes[KeyEvent.KEYCODE_DPAD_LEFT] = "DPadLeft"; keyCodes[KeyEvent.KEYCODE_DPAD_RIGHT] = "DPadRight"; keyCodes[KeyEvent.KEYCODE_DPAD_CENTER] = "DPadCenter"; keyCodes[KeyEvent.KEYCODE_VOLUME_UP] = "VolumeUp"; keyCodes[KeyEvent.KEYCODE_VOLUME_DOWN] = "VolumeDown"; keyCodes[KeyEvent.KEYCODE_POWER] = "Power"; keyCodes[KeyEvent.KEYCODE_CAMERA] = "Camera"; keyCodes[KeyEvent.KEYCODE_CLEAR] = "Clear"; keyCodes[KeyEvent.KEYCODE_A] = "A"; keyCodes[KeyEvent.KEYCODE_B] = "B"; keyCodes[KeyEvent.KEYCODE_C] = "C"; keyCodes[KeyEvent.KEYCODE_D] = "D"; keyCodes[KeyEvent.KEYCODE_E] = "E"; keyCodes[KeyEvent.KEYCODE_F] = "F"; keyCodes[KeyEvent.KEYCODE_G] = "G"; keyCodes[KeyEvent.KEYCODE_H] = "H"; keyCodes[KeyEvent.KEYCODE_I] = "I"; keyCodes[KeyEvent.KEYCODE_J] = "J"; keyCodes[KeyEvent.KEYCODE_K] = "K"; keyCodes[KeyEvent.KEYCODE_L] = "L"; keyCodes[KeyEvent.KEYCODE_M] = "M"; keyCodes[KeyEvent.KEYCODE_N] = "N"; keyCodes[KeyEvent.KEYCODE_O] = "O"; keyCodes[KeyEvent.KEYCODE_P] = "P"; keyCodes[KeyEvent.KEYCODE_Q] = "Q"; keyCodes[KeyEvent.KEYCODE_R] = "R"; keyCodes[KeyEvent.KEYCODE_S] = "S"; keyCodes[KeyEvent.KEYCODE_T] = "T"; keyCodes[KeyEvent.KEYCODE_U] = "U"; keyCodes[KeyEvent.KEYCODE_V] = "V"; keyCodes[KeyEvent.KEYCODE_W] = "W"; keyCodes[KeyEvent.KEYCODE_X] = "X"; keyCodes[KeyEvent.KEYCODE_Y] = "Y"; keyCodes[KeyEvent.KEYCODE_Z] = "Z"; keyCodes[KeyEvent.KEYCODE_COMMA] = ","; keyCodes[KeyEvent.KEYCODE_PERIOD] = "."; keyCodes[KeyEvent.KEYCODE_ALT_LEFT] = "AltLeft"; keyCodes[KeyEvent.KEYCODE_ALT_RIGHT] = "AltRight"; keyCodes[KeyEvent.KEYCODE_SHIFT_LEFT] = "ShiftLeft"; keyCodes[KeyEvent.KEYCODE_SHIFT_RIGHT] = "ShiftRight"; keyCodes[KeyEvent.KEYCODE_TAB] = "Tab"; keyCodes[KeyEvent.KEYCODE_SPACE] = "Space"; keyCodes[KeyEvent.KEYCODE_SYM] = "Symbol"; keyCodes[KeyEvent.KEYCODE_EXPLORER] = "Explorer"; keyCodes[KeyEvent.KEYCODE_ENVELOPE] = "Envelope"; keyCodes[KeyEvent.KEYCODE_ENTER] = "Enter"; keyCodes[KeyEvent.KEYCODE_DEL] = "Backspace"; keyCodes[KeyEvent.KEYCODE_GRAVE] = "`"; keyCodes[KeyEvent.KEYCODE_MINUS] = "-"; keyCodes[KeyEvent.KEYCODE_EQUALS] = "="; keyCodes[KeyEvent.KEYCODE_LEFT_BRACKET] = "["; keyCodes[KeyEvent.KEYCODE_RIGHT_BRACKET] = "]"; keyCodes[KeyEvent.KEYCODE_BACKSLASH] = "\\"; keyCodes[KeyEvent.KEYCODE_SEMICOLON] = ";"; keyCodes[KeyEvent.KEYCODE_APOSTROPHE] = "'"; keyCodes[KeyEvent.KEYCODE_SLASH] = "/"; keyCodes[KeyEvent.KEYCODE_AT] = "@"; keyCodes[KeyEvent.KEYCODE_NUM] = "Num"; keyCodes[KeyEvent.KEYCODE_HEADSETHOOK] = "HeadsetHool"; keyCodes[KeyEvent.KEYCODE_FOCUS] = "Focus"; keyCodes[KeyEvent.KEYCODE_PLUS] = "+"; keyCodes[KeyEvent.KEYCODE_MENU] = "Menu"; keyCodes[KeyEvent.KEYCODE_NOTIFICATION] = "Notification"; keyCodes[KeyEvent.KEYCODE_SEARCH] = "Search"; keyCodes[KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE] = "MediaPlayPause"; keyCodes[KeyEvent.KEYCODE_MEDIA_STOP] = "MediaStop"; keyCodes[KeyEvent.KEYCODE_MEDIA_NEXT] = "MediaNext"; keyCodes[KeyEvent.KEYCODE_MEDIA_PREVIOUS] = "MediaPrevious"; keyCodes[KeyEvent.KEYCODE_MEDIA_REWIND] = "MediaRewind"; keyCodes[KeyEvent.KEYCODE_MEDIA_FAST_FORWARD] = "MediaFastForward"; keyCodes[KeyEvent.KEYCODE_MUTE] = "Mute"; keyCodes[KeyEvent.KEYCODE_PAGE_UP] = "PageUp"; keyCodes[KeyEvent.KEYCODE_PAGE_DOWN] = "PageDown"; keyCodes[KeyEvent.KEYCODE_PICTSYMBOLS] = "PictureSymbols"; keyCodes[KeyEvent.KEYCODE_SWITCH_CHARSET] = "SwitchCharset"; keyCodes[KeyEvent.KEYCODE_BUTTON_A] = "ButtonA"; keyCodes[KeyEvent.KEYCODE_BUTTON_B] = "ButtonB"; keyCodes[KeyEvent.KEYCODE_BUTTON_C] = "ButtonC"; keyCodes[KeyEvent.KEYCODE_BUTTON_X] = "ButtonX"; keyCodes[KeyEvent.KEYCODE_BUTTON_Y] = "ButtonY"; keyCodes[KeyEvent.KEYCODE_BUTTON_Z] = "ButtonZ"; keyCodes[KeyEvent.KEYCODE_BUTTON_L1] = "ButtonL1"; keyCodes[KeyEvent.KEYCODE_BUTTON_R1] = "ButtonR1"; keyCodes[KeyEvent.KEYCODE_BUTTON_L2] = "ButtonL2"; keyCodes[KeyEvent.KEYCODE_BUTTON_R2] = "ButtonR2"; keyCodes[KeyEvent.KEYCODE_BUTTON_THUMBL] = "ButtonThumbLeft"; keyCodes[KeyEvent.KEYCODE_BUTTON_THUMBR] = "ButtonThumbRight"; keyCodes[KeyEvent.KEYCODE_BUTTON_START] = "ButtonStart"; keyCodes[KeyEvent.KEYCODE_BUTTON_SELECT] = "ButtonSelect"; keyCodes[KeyEvent.KEYCODE_BUTTON_MODE] = "ButtonMode"; keyCodes[KeyEvent.KEYCODE_ESCAPE] = "Escape"; keyCodes[KeyEvent.KEYCODE_FORWARD_DEL] = "Delete"; keyCodes[KeyEvent.KEYCODE_CTRL_LEFT] = "CtrlLeft"; keyCodes[KeyEvent.KEYCODE_CTRL_RIGHT] = "CtrlRight"; keyCodes[KeyEvent.KEYCODE_CAPS_LOCK] = "CapsLock"; keyCodes[KeyEvent.KEYCODE_SCROLL_LOCK] = "ScrollLock"; keyCodes[KeyEvent.KEYCODE_META_LEFT] = "MetaLeft"; keyCodes[KeyEvent.KEYCODE_META_RIGHT] = "MetaRight"; keyCodes[KeyEvent.KEYCODE_FUNCTION] = "Function"; keyCodes[KeyEvent.KEYCODE_SYSRQ] = "PrintScreen"; keyCodes[KeyEvent.KEYCODE_BREAK] = "Break"; keyCodes[KeyEvent.KEYCODE_MOVE_HOME] = "Home"; keyCodes[KeyEvent.KEYCODE_MOVE_END] = "End"; keyCodes[KeyEvent.KEYCODE_INSERT] = "Insert"; keyCodes[KeyEvent.KEYCODE_FORWARD] = "Forward"; keyCodes[KeyEvent.KEYCODE_MEDIA_PLAY] = "MediaPlay"; keyCodes[KeyEvent.KEYCODE_MEDIA_PAUSE] = "MediaPause"; keyCodes[KeyEvent.KEYCODE_MEDIA_CLOSE] = "MediaClose"; keyCodes[KeyEvent.KEYCODE_MEDIA_EJECT] = "MediaEject"; keyCodes[KeyEvent.KEYCODE_MEDIA_RECORD] = "MediaRecord"; keyCodes[KeyEvent.KEYCODE_F1] = "F1"; keyCodes[KeyEvent.KEYCODE_F2] = "F2"; keyCodes[KeyEvent.KEYCODE_F3] = "F3"; keyCodes[KeyEvent.KEYCODE_F4] = "F4"; keyCodes[KeyEvent.KEYCODE_F5] = "F5"; keyCodes[KeyEvent.KEYCODE_F6] = "F6"; keyCodes[KeyEvent.KEYCODE_F7] = "F7"; keyCodes[KeyEvent.KEYCODE_F8] = "F8"; keyCodes[KeyEvent.KEYCODE_F9] = "F9"; keyCodes[KeyEvent.KEYCODE_F10] = "F10"; keyCodes[KeyEvent.KEYCODE_F11] = "F11"; keyCodes[KeyEvent.KEYCODE_F12] = "F12"; keyCodes[KeyEvent.KEYCODE_NUM_LOCK] = "NumLock"; keyCodes[KeyEvent.KEYCODE_NUMPAD_0] = "NumPad0"; keyCodes[KeyEvent.KEYCODE_NUMPAD_1] = "NumPad1"; keyCodes[KeyEvent.KEYCODE_NUMPAD_2] = "NumPad2"; keyCodes[KeyEvent.KEYCODE_NUMPAD_3] = "NumPad3"; keyCodes[KeyEvent.KEYCODE_NUMPAD_4] = "NumPad4"; keyCodes[KeyEvent.KEYCODE_NUMPAD_5] = "NumPad5"; keyCodes[KeyEvent.KEYCODE_NUMPAD_6] = "NumPad6"; keyCodes[KeyEvent.KEYCODE_NUMPAD_7] = "NumPad7"; keyCodes[KeyEvent.KEYCODE_NUMPAD_8] = "NumPad8"; keyCodes[KeyEvent.KEYCODE_NUMPAD_9] = "NumPad9"; keyCodes[KeyEvent.KEYCODE_NUMPAD_DIVIDE] = "NumPad/"; keyCodes[KeyEvent.KEYCODE_NUMPAD_MULTIPLY] = "NumPad*"; keyCodes[KeyEvent.KEYCODE_NUMPAD_SUBTRACT] = "NumPad-"; keyCodes[KeyEvent.KEYCODE_NUMPAD_ADD] = "NumPad+"; keyCodes[KeyEvent.KEYCODE_NUMPAD_DOT] = "NumPad."; keyCodes[KeyEvent.KEYCODE_NUMPAD_COMMA] = "NumPad,"; keyCodes[KeyEvent.KEYCODE_NUMPAD_ENTER] = "NumPadEnter"; keyCodes[KeyEvent.KEYCODE_NUMPAD_EQUALS] = "NumPad="; keyCodes[KeyEvent.KEYCODE_NUMPAD_LEFT_PAREN] = "NumPad("; keyCodes[KeyEvent.KEYCODE_NUMPAD_RIGHT_PAREN] = "NumPad)"; keyCodes[KeyEvent.KEYCODE_VOLUME_MUTE] = "VolumeMute"; keyCodes[KeyEvent.KEYCODE_INFO] = "Info"; keyCodes[KeyEvent.KEYCODE_CHANNEL_UP] = "ChannelUp"; keyCodes[KeyEvent.KEYCODE_CHANNEL_DOWN] = "ChannelDown"; keyCodes[KeyEvent.KEYCODE_ZOOM_IN] = "ZoomIn"; keyCodes[KeyEvent.KEYCODE_ZOOM_OUT] = "ZoomOut"; keyCodes[KeyEvent.KEYCODE_TV] = "TV"; keyCodes[KeyEvent.KEYCODE_WINDOW] = "Window"; keyCodes[KeyEvent.KEYCODE_GUIDE] = "Guide"; keyCodes[KeyEvent.KEYCODE_DVR] = "DVR"; keyCodes[KeyEvent.KEYCODE_BOOKMARK] = "Bookmark"; keyCodes[KeyEvent.KEYCODE_CAPTIONS] = "Captions"; keyCodes[KeyEvent.KEYCODE_SETTINGS] = "Settings"; keyCodes[KeyEvent.KEYCODE_TV_POWER] = "TVPower"; keyCodes[KeyEvent.KEYCODE_TV_INPUT] = "TVInput"; keyCodes[KeyEvent.KEYCODE_STB_POWER] = "STBPower"; keyCodes[KeyEvent.KEYCODE_STB_INPUT] = "STBInput"; keyCodes[KeyEvent.KEYCODE_AVR_POWER] = "AVRPower"; keyCodes[KeyEvent.KEYCODE_AVR_INPUT] = "AVRInput"; keyCodes[KeyEvent.KEYCODE_PROG_RED] = "ProgrammableRed"; keyCodes[KeyEvent.KEYCODE_PROG_GREEN] = "ProgrammableGreen"; keyCodes[KeyEvent.KEYCODE_PROG_YELLOW] = "ProgrammableYellow"; keyCodes[KeyEvent.KEYCODE_PROG_BLUE] = "ProgrammableBlue"; keyCodes[KeyEvent.KEYCODE_APP_SWITCH] = "AppSwitch"; keyCodes[KeyEvent.KEYCODE_BUTTON_1] = "Button1"; keyCodes[KeyEvent.KEYCODE_BUTTON_2] = "Button2"; keyCodes[KeyEvent.KEYCODE_BUTTON_3] = "Button3"; keyCodes[KeyEvent.KEYCODE_BUTTON_4] = "Button4"; keyCodes[KeyEvent.KEYCODE_BUTTON_5] = "Button5"; keyCodes[KeyEvent.KEYCODE_BUTTON_6] = "Button6"; keyCodes[KeyEvent.KEYCODE_BUTTON_7] = "Button7"; keyCodes[KeyEvent.KEYCODE_BUTTON_8] = "Button8"; keyCodes[KeyEvent.KEYCODE_BUTTON_9] = "Button9"; keyCodes[KeyEvent.KEYCODE_BUTTON_10] = "Button10"; keyCodes[KeyEvent.KEYCODE_BUTTON_11] = "Button11"; keyCodes[KeyEvent.KEYCODE_BUTTON_12] = "Button12"; keyCodes[KeyEvent.KEYCODE_BUTTON_13] = "Button13"; keyCodes[KeyEvent.KEYCODE_BUTTON_14] = "Button14"; keyCodes[KeyEvent.KEYCODE_BUTTON_15] = "Button15"; keyCodes[KeyEvent.KEYCODE_BUTTON_16] = "Button16"; keyCodes[KeyEvent.KEYCODE_LANGUAGE_SWITCH] = "LanguageSwitch"; keyCodes[KeyEvent.KEYCODE_MANNER_MODE] = "MannerMode"; keyCodes[KeyEvent.KEYCODE_3D_MODE] = "3DMode"; keyCodes[KeyEvent.KEYCODE_CONTACTS] = "Contacts"; keyCodes[KeyEvent.KEYCODE_CALENDAR] = "Calendar"; keyCodes[KeyEvent.KEYCODE_MUSIC] = "Music"; keyCodes[KeyEvent.KEYCODE_CALCULATOR] = "Calculator"; keyCodes[KeyEvent.KEYCODE_ZENKAKU_HANKAKU] = "ZenkakuHankaku"; keyCodes[KeyEvent.KEYCODE_EISU] = "Eisu"; keyCodes[KeyEvent.KEYCODE_MUHENKAN] = "Muhenkan"; keyCodes[KeyEvent.KEYCODE_HENKAN] = "Henkan"; keyCodes[KeyEvent.KEYCODE_KATAKANA_HIRAGANA] = "KatakanaHiragana"; keyCodes[KeyEvent.KEYCODE_YEN] = "Yen"; keyCodes[KeyEvent.KEYCODE_RO] = "Ro"; keyCodes[KeyEvent.KEYCODE_KANA] = "Kana"; keyCodes[KeyEvent.KEYCODE_ASSIST] = "Assist"; keyCodes[KeyEvent.KEYCODE_BRIGHTNESS_DOWN] = "BrightnessDown"; keyCodes[KeyEvent.KEYCODE_BRIGHTNESS_UP] = "BrightnessUp"; keyCodes[KeyEvent.KEYCODE_MEDIA_AUDIO_TRACK] = "MediaAudioTrack"; keyCodes[KeyEvent.KEYCODE_SLEEP] = "Sleep"; keyCodes[KeyEvent.KEYCODE_WAKEUP] = "WakeUp"; keyCodes[KeyEvent.KEYCODE_PAIRING] = "Pairing"; keyCodes[KeyEvent.KEYCODE_MEDIA_TOP_MENU] = "MediaTopMenu"; keyCodes[KeyEvent.KEYCODE_11] = "11"; keyCodes[KeyEvent.KEYCODE_12] = "12"; keyCodes[KeyEvent.KEYCODE_LAST_CHANNEL] = "LastChannel"; keyCodes[KeyEvent.KEYCODE_TV_DATA_SERVICE] = "TVDataService"; keyCodes[KeyEvent.KEYCODE_VOICE_ASSIST] = "VoiceAssist"; keyCodes[KeyEvent.KEYCODE_TV_RADIO_SERVICE] = "TVRadioService"; keyCodes[KeyEvent.KEYCODE_TV_TELETEXT] = "TVTeletext"; keyCodes[KeyEvent.KEYCODE_TV_NUMBER_ENTRY] = "TVNumberEntry"; keyCodes[KeyEvent.KEYCODE_TV_TERRESTRIAL_ANALOG] = "TVTerrestrialAnalog"; keyCodes[KeyEvent.KEYCODE_TV_TERRESTRIAL_DIGITAL] = "TVTerrestrialDigital"; keyCodes[KeyEvent.KEYCODE_TV_SATELLITE] = "TVSatellite"; keyCodes[KeyEvent.KEYCODE_TV_SATELLITE_BS] = "TVSatelliteBS"; keyCodes[KeyEvent.KEYCODE_TV_SATELLITE_CS] = "TVSatelliteCS"; keyCodes[KeyEvent.KEYCODE_TV_SATELLITE_SERVICE] = "TVSatelliteService"; keyCodes[KeyEvent.KEYCODE_TV_NETWORK] = "TVNetwork"; keyCodes[KeyEvent.KEYCODE_TV_ANTENNA_CABLE] = "TVAntennaCable"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_HDMI_1] = "TVInputHDMI1"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_HDMI_2] = "TVInputHDMI2"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_HDMI_3] = "TVInputHDMI3"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_HDMI_4] = "TVInputHDMI4"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_COMPOSITE_1] = "TVInputComposite1"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_COMPOSITE_2] = "TVInputComposite1"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_COMPONENT_1] = "TVInputComponent1"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_COMPONENT_2] = "TVInputComponent2"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_VGA_1] = "TVInputVGA1"; keyCodes[KeyEvent.KEYCODE_TV_AUDIO_DESCRIPTION] = "TVAudioDescription"; keyCodes[KeyEvent.KEYCODE_TV_AUDIO_DESCRIPTION_MIX_UP] = "TVAudioDescriptionMixUp"; keyCodes[KeyEvent.KEYCODE_TV_AUDIO_DESCRIPTION_MIX_DOWN] = "TVAudioDescriptionMixDown"; keyCodes[KeyEvent.KEYCODE_TV_ZOOM_MODE] = "TVZoomMode"; keyCodes[KeyEvent.KEYCODE_TV_CONTENTS_MENU] = "TVContentsMenu"; keyCodes[KeyEvent.KEYCODE_TV_MEDIA_CONTEXT_MENU] = "TVMediaContextMenu"; keyCodes[KeyEvent.KEYCODE_TV_TIMER_PROGRAMMING] = "TVTimerProgramming"; keyCodes[KeyEvent.KEYCODE_HELP] = "Help"; keyCodes[KeyEvent.KEYCODE_NAVIGATE_PREVIOUS] = "NavigatePrevious"; keyCodes[KeyEvent.KEYCODE_NAVIGATE_NEXT] = "NavigateNext"; keyCodes[KeyEvent.KEYCODE_NAVIGATE_IN] = "NavigateIn"; keyCodes[KeyEvent.KEYCODE_NAVIGATE_OUT] = "NavigateOut"; keyCodes[KeyEvent.KEYCODE_MEDIA_SKIP_FORWARD] = "MediaSkipForward"; keyCodes[KeyEvent.KEYCODE_MEDIA_SKIP_BACKWARD] = "MediaSkipBackward"; keyCodes[KeyEvent.KEYCODE_MEDIA_STEP_FORWARD] = "MediaStepForward"; keyCodes[KeyEvent.KEYCODE_MEDIA_STEP_BACKWARD] = "MediaStepBackward"; } catch(ArrayIndexOutOfBoundsException err){ } } public float pixelRatio; public boolean isPhone; public MainActivity app; public boolean keyboardVisible = false; public KeyboardText keyboardText; static void init(Device device, MainActivity app) { device.app = app; // create EditText to handle KeyInput device.keyboardText = new KeyboardText(app.getApplicationContext(), app); app.view.addView(device.keyboardText); // hide keyboard final Window window = ((Activity) app.view.getContext()).getWindow(); window.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_HIDDEN); // DEVICE_PIXEL_RATIO final DisplayMetrics metrics = app.getResources().getDisplayMetrics(); device.pixelRatio = metrics.density; app.client.pushAction(OutAction.DEVICE_PIXEL_RATIO); app.client.pushFloat(device.pixelRatio); // DEVICE_IS_PHONE device.isPhone = (app.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_LARGE; app.client.pushAction(OutAction.DEVICE_IS_PHONE); app.client.pushBoolean(device.isPhone); } static void register(MainActivity app){ final Device device = app.renderer.device; app.client.actions.put(InAction.DEVICE_LOG, new Action() { @Override public void work(Reader reader) { device.log(reader.getString()); } }); app.client.actions.put(InAction.DEVICE_SHOW_KEYBOARD, new Action() { @Override public void work(Reader reader) { device.showKeyboard(); } }); app.client.actions.put(InAction.DEVICE_HIDE_KEYBOARD, new Action() { @Override public void work(Reader reader) { device.hideKeyboard(); } }); } void log(String val){ Log.i("Neft", val); } void showKeyboard(){ InputMethodManager imm = (InputMethodManager) app.getSystemService(Context.INPUT_METHOD_SERVICE); imm.toggleSoftInputFromWindow(app.view.getWindowToken(), InputMethodManager.SHOW_FORCED, 0); keyboardText.requestFocus(); this.keyboardVisible = true; app.client.pushAction(OutAction.DEVICE_KEYBOARD_SHOW); } void hideKeyboard(){ InputMethodManager imm = (InputMethodManager) app.getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(app.view.getWindowToken(), 0); keyboardText.clearFocus(); keyboardText.setText(null); this.keyboardVisible = false; app.client.pushAction(OutAction.DEVICE_KEYBOARD_HIDE); } public boolean onTouchEvent(MotionEvent event){ switch (event.getAction()){ case MotionEvent.ACTION_DOWN: app.client.pushAction(OutAction.POINTER_PRESS); break; case MotionEvent.ACTION_UP: app.client.pushAction(OutAction.POINTER_RELEASE); break; case MotionEvent.ACTION_MOVE: app.client.pushAction(OutAction.POINTER_MOVE); break; default: return true; } int[] location = new int[2]; App.getApp().view.getLocationOnScreen(location); float x = event.getRawX() - location[0]; float y = event.getRawY() - location[1]; app.client.pushFloat(x / pixelRatio); app.client.pushFloat(y / pixelRatio); app.client.sendData(); return true; } public boolean onKey(int keyCode, KeyEvent event){ // hide keyboard if (keyboardVisible && (keyCode & (KeyEvent.KEYCODE_BACK | KeyEvent.KEYCODE_ENTER)) > 0) { this.hideKeyboard(); } switch (event.getAction()){ case KeyEvent.ACTION_DOWN: app.client.pushAction(OutAction.KEY_PRESS); break; case KeyEvent.ACTION_UP: app.client.pushAction(OutAction.KEY_RELEASE); break; case KeyEvent.ACTION_MULTIPLE: app.client.pushAction(OutAction.KEY_HOLD); break; default: return false; } app.client.pushString(keyCodes[keyCode]); return keyCode == KeyEvent.KEYCODE_BACK; } }
runtimes/android/app/src/main/java/io/neft/renderer/Device.java
package io.neft.renderer; import android.app.Activity; import android.content.Context; import android.content.res.Configuration; import android.text.Editable; import android.text.InputType; import android.text.TextWatcher; import android.util.DisplayMetrics; import android.util.Log; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.view.inputmethod.InputMethodManager; import android.widget.EditText; import io.neft.App; import io.neft.client.Action; import io.neft.client.InAction; import io.neft.client.OutAction; import io.neft.client.Reader; import io.neft.MainActivity; class KeyboardText extends EditText { private final MainActivity app; public KeyboardText(Context context, final MainActivity app){ super(context); this.app = app; this.setInputType(InputType.TYPE_CLASS_TEXT); this.setVisibility(View.INVISIBLE); // set layout to get onKeyPreIme this.layout(0, 0, 1, 1); this.setOnKeyListener(new OnKeyListener() { @Override public boolean onKey(View v, int keyCode, KeyEvent event) { return app.renderer.device.onKey(keyCode, event); } }); this.addTextChangedListener(new TextWatcher() { public void afterTextChanged(Editable s) { } public void beforeTextChanged(CharSequence s, int start, int count, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { final int startIndex = start + before; final int endIndex = start + count; if (endIndex > startIndex) { final CharSequence text = s.subSequence(startIndex, endIndex); app.client.pushAction(OutAction.KEY_INPUT); app.client.pushString(text.toString()); } } }); } public boolean onKeyPreIme(int keyCode, KeyEvent event){ return app.renderer.device.onKey(keyCode, event); } } public class Device { static final String[] keyCodes; static { keyCodes = new String[KeyEvent.getMaxKeyCode()]; try { keyCodes[KeyEvent.KEYCODE_UNKNOWN] = "Unknown"; keyCodes[KeyEvent.KEYCODE_SOFT_LEFT] = "SoftLeft"; keyCodes[KeyEvent.KEYCODE_SOFT_RIGHT] = "SoftRight"; keyCodes[KeyEvent.KEYCODE_HOME] = "Home"; keyCodes[KeyEvent.KEYCODE_BACK] = "Back"; keyCodes[KeyEvent.KEYCODE_CALL] = "Call"; keyCodes[KeyEvent.KEYCODE_ENDCALL] = "EndCall"; keyCodes[KeyEvent.KEYCODE_0] = "0"; keyCodes[KeyEvent.KEYCODE_1] = "1"; keyCodes[KeyEvent.KEYCODE_2] = "2"; keyCodes[KeyEvent.KEYCODE_3] = "3"; keyCodes[KeyEvent.KEYCODE_4] = "4"; keyCodes[KeyEvent.KEYCODE_5] = "5"; keyCodes[KeyEvent.KEYCODE_6] = "6"; keyCodes[KeyEvent.KEYCODE_7] = "7"; keyCodes[KeyEvent.KEYCODE_8] = "8"; keyCodes[KeyEvent.KEYCODE_9] = "9"; keyCodes[KeyEvent.KEYCODE_STAR] = "Star"; keyCodes[KeyEvent.KEYCODE_POUND] = "Pound"; keyCodes[KeyEvent.KEYCODE_DPAD_UP] = "DPadUp"; keyCodes[KeyEvent.KEYCODE_DPAD_DOWN] = "DPadDown"; keyCodes[KeyEvent.KEYCODE_DPAD_LEFT] = "DPadLeft"; keyCodes[KeyEvent.KEYCODE_DPAD_RIGHT] = "DPadRight"; keyCodes[KeyEvent.KEYCODE_DPAD_CENTER] = "DPadCenter"; keyCodes[KeyEvent.KEYCODE_VOLUME_UP] = "VolumeUp"; keyCodes[KeyEvent.KEYCODE_VOLUME_DOWN] = "VolumeDown"; keyCodes[KeyEvent.KEYCODE_POWER] = "Power"; keyCodes[KeyEvent.KEYCODE_CAMERA] = "Camera"; keyCodes[KeyEvent.KEYCODE_CLEAR] = "Clear"; keyCodes[KeyEvent.KEYCODE_A] = "A"; keyCodes[KeyEvent.KEYCODE_B] = "B"; keyCodes[KeyEvent.KEYCODE_C] = "C"; keyCodes[KeyEvent.KEYCODE_D] = "D"; keyCodes[KeyEvent.KEYCODE_E] = "E"; keyCodes[KeyEvent.KEYCODE_F] = "F"; keyCodes[KeyEvent.KEYCODE_G] = "G"; keyCodes[KeyEvent.KEYCODE_H] = "H"; keyCodes[KeyEvent.KEYCODE_I] = "I"; keyCodes[KeyEvent.KEYCODE_J] = "J"; keyCodes[KeyEvent.KEYCODE_K] = "K"; keyCodes[KeyEvent.KEYCODE_L] = "L"; keyCodes[KeyEvent.KEYCODE_M] = "M"; keyCodes[KeyEvent.KEYCODE_N] = "N"; keyCodes[KeyEvent.KEYCODE_O] = "O"; keyCodes[KeyEvent.KEYCODE_P] = "P"; keyCodes[KeyEvent.KEYCODE_Q] = "Q"; keyCodes[KeyEvent.KEYCODE_R] = "R"; keyCodes[KeyEvent.KEYCODE_S] = "S"; keyCodes[KeyEvent.KEYCODE_T] = "T"; keyCodes[KeyEvent.KEYCODE_U] = "U"; keyCodes[KeyEvent.KEYCODE_V] = "V"; keyCodes[KeyEvent.KEYCODE_W] = "W"; keyCodes[KeyEvent.KEYCODE_X] = "X"; keyCodes[KeyEvent.KEYCODE_Y] = "Y"; keyCodes[KeyEvent.KEYCODE_Z] = "Z"; keyCodes[KeyEvent.KEYCODE_COMMA] = ","; keyCodes[KeyEvent.KEYCODE_PERIOD] = "."; keyCodes[KeyEvent.KEYCODE_ALT_LEFT] = "AltLeft"; keyCodes[KeyEvent.KEYCODE_ALT_RIGHT] = "AltRight"; keyCodes[KeyEvent.KEYCODE_SHIFT_LEFT] = "ShiftLeft"; keyCodes[KeyEvent.KEYCODE_SHIFT_RIGHT] = "ShiftRight"; keyCodes[KeyEvent.KEYCODE_TAB] = "Tab"; keyCodes[KeyEvent.KEYCODE_SPACE] = "Space"; keyCodes[KeyEvent.KEYCODE_SYM] = "Symbol"; keyCodes[KeyEvent.KEYCODE_EXPLORER] = "Explorer"; keyCodes[KeyEvent.KEYCODE_ENVELOPE] = "Envelope"; keyCodes[KeyEvent.KEYCODE_ENTER] = "Enter"; keyCodes[KeyEvent.KEYCODE_DEL] = "Backspace"; keyCodes[KeyEvent.KEYCODE_GRAVE] = "`"; keyCodes[KeyEvent.KEYCODE_MINUS] = "-"; keyCodes[KeyEvent.KEYCODE_EQUALS] = "="; keyCodes[KeyEvent.KEYCODE_LEFT_BRACKET] = "["; keyCodes[KeyEvent.KEYCODE_RIGHT_BRACKET] = "]"; keyCodes[KeyEvent.KEYCODE_BACKSLASH] = "\\"; keyCodes[KeyEvent.KEYCODE_SEMICOLON] = ";"; keyCodes[KeyEvent.KEYCODE_APOSTROPHE] = "'"; keyCodes[KeyEvent.KEYCODE_SLASH] = "/"; keyCodes[KeyEvent.KEYCODE_AT] = "@"; keyCodes[KeyEvent.KEYCODE_NUM] = "Num"; keyCodes[KeyEvent.KEYCODE_HEADSETHOOK] = "HeadsetHool"; keyCodes[KeyEvent.KEYCODE_FOCUS] = "Focus"; keyCodes[KeyEvent.KEYCODE_PLUS] = "+"; keyCodes[KeyEvent.KEYCODE_MENU] = "Menu"; keyCodes[KeyEvent.KEYCODE_NOTIFICATION] = "Notification"; keyCodes[KeyEvent.KEYCODE_SEARCH] = "Search"; keyCodes[KeyEvent.KEYCODE_MEDIA_PLAY_PAUSE] = "MediaPlayPause"; keyCodes[KeyEvent.KEYCODE_MEDIA_STOP] = "MediaStop"; keyCodes[KeyEvent.KEYCODE_MEDIA_NEXT] = "MediaNext"; keyCodes[KeyEvent.KEYCODE_MEDIA_PREVIOUS] = "MediaPrevious"; keyCodes[KeyEvent.KEYCODE_MEDIA_REWIND] = "MediaRewind"; keyCodes[KeyEvent.KEYCODE_MEDIA_FAST_FORWARD] = "MediaFastForward"; keyCodes[KeyEvent.KEYCODE_MUTE] = "Mute"; keyCodes[KeyEvent.KEYCODE_PAGE_UP] = "PageUp"; keyCodes[KeyEvent.KEYCODE_PAGE_DOWN] = "PageDown"; keyCodes[KeyEvent.KEYCODE_PICTSYMBOLS] = "PictureSymbols"; keyCodes[KeyEvent.KEYCODE_SWITCH_CHARSET] = "SwitchCharset"; keyCodes[KeyEvent.KEYCODE_BUTTON_A] = "ButtonA"; keyCodes[KeyEvent.KEYCODE_BUTTON_B] = "ButtonB"; keyCodes[KeyEvent.KEYCODE_BUTTON_C] = "ButtonC"; keyCodes[KeyEvent.KEYCODE_BUTTON_X] = "ButtonX"; keyCodes[KeyEvent.KEYCODE_BUTTON_Y] = "ButtonY"; keyCodes[KeyEvent.KEYCODE_BUTTON_Z] = "ButtonZ"; keyCodes[KeyEvent.KEYCODE_BUTTON_L1] = "ButtonL1"; keyCodes[KeyEvent.KEYCODE_BUTTON_R1] = "ButtonR1"; keyCodes[KeyEvent.KEYCODE_BUTTON_L2] = "ButtonL2"; keyCodes[KeyEvent.KEYCODE_BUTTON_R2] = "ButtonR2"; keyCodes[KeyEvent.KEYCODE_BUTTON_THUMBL] = "ButtonThumbLeft"; keyCodes[KeyEvent.KEYCODE_BUTTON_THUMBR] = "ButtonThumbRight"; keyCodes[KeyEvent.KEYCODE_BUTTON_START] = "ButtonStart"; keyCodes[KeyEvent.KEYCODE_BUTTON_SELECT] = "ButtonSelect"; keyCodes[KeyEvent.KEYCODE_BUTTON_MODE] = "ButtonMode"; keyCodes[KeyEvent.KEYCODE_ESCAPE] = "Escape"; keyCodes[KeyEvent.KEYCODE_FORWARD_DEL] = "Delete"; keyCodes[KeyEvent.KEYCODE_CTRL_LEFT] = "CtrlLeft"; keyCodes[KeyEvent.KEYCODE_CTRL_RIGHT] = "CtrlRight"; keyCodes[KeyEvent.KEYCODE_CAPS_LOCK] = "CapsLock"; keyCodes[KeyEvent.KEYCODE_SCROLL_LOCK] = "ScrollLock"; keyCodes[KeyEvent.KEYCODE_META_LEFT] = "MetaLeft"; keyCodes[KeyEvent.KEYCODE_META_RIGHT] = "MetaRight"; keyCodes[KeyEvent.KEYCODE_FUNCTION] = "Function"; keyCodes[KeyEvent.KEYCODE_SYSRQ] = "PrintScreen"; keyCodes[KeyEvent.KEYCODE_BREAK] = "Break"; keyCodes[KeyEvent.KEYCODE_MOVE_HOME] = "Home"; keyCodes[KeyEvent.KEYCODE_MOVE_END] = "End"; keyCodes[KeyEvent.KEYCODE_INSERT] = "Insert"; keyCodes[KeyEvent.KEYCODE_FORWARD] = "Forward"; keyCodes[KeyEvent.KEYCODE_MEDIA_PLAY] = "MediaPlay"; keyCodes[KeyEvent.KEYCODE_MEDIA_PAUSE] = "MediaPause"; keyCodes[KeyEvent.KEYCODE_MEDIA_CLOSE] = "MediaClose"; keyCodes[KeyEvent.KEYCODE_MEDIA_EJECT] = "MediaEject"; keyCodes[KeyEvent.KEYCODE_MEDIA_RECORD] = "MediaRecord"; keyCodes[KeyEvent.KEYCODE_F1] = "F1"; keyCodes[KeyEvent.KEYCODE_F2] = "F2"; keyCodes[KeyEvent.KEYCODE_F3] = "F3"; keyCodes[KeyEvent.KEYCODE_F4] = "F4"; keyCodes[KeyEvent.KEYCODE_F5] = "F5"; keyCodes[KeyEvent.KEYCODE_F6] = "F6"; keyCodes[KeyEvent.KEYCODE_F7] = "F7"; keyCodes[KeyEvent.KEYCODE_F8] = "F8"; keyCodes[KeyEvent.KEYCODE_F9] = "F9"; keyCodes[KeyEvent.KEYCODE_F10] = "F10"; keyCodes[KeyEvent.KEYCODE_F11] = "F11"; keyCodes[KeyEvent.KEYCODE_F12] = "F12"; keyCodes[KeyEvent.KEYCODE_NUM_LOCK] = "NumLock"; keyCodes[KeyEvent.KEYCODE_NUMPAD_0] = "NumPad0"; keyCodes[KeyEvent.KEYCODE_NUMPAD_1] = "NumPad1"; keyCodes[KeyEvent.KEYCODE_NUMPAD_2] = "NumPad2"; keyCodes[KeyEvent.KEYCODE_NUMPAD_3] = "NumPad3"; keyCodes[KeyEvent.KEYCODE_NUMPAD_4] = "NumPad4"; keyCodes[KeyEvent.KEYCODE_NUMPAD_5] = "NumPad5"; keyCodes[KeyEvent.KEYCODE_NUMPAD_6] = "NumPad6"; keyCodes[KeyEvent.KEYCODE_NUMPAD_7] = "NumPad7"; keyCodes[KeyEvent.KEYCODE_NUMPAD_8] = "NumPad8"; keyCodes[KeyEvent.KEYCODE_NUMPAD_9] = "NumPad9"; keyCodes[KeyEvent.KEYCODE_NUMPAD_DIVIDE] = "NumPad/"; keyCodes[KeyEvent.KEYCODE_NUMPAD_MULTIPLY] = "NumPad*"; keyCodes[KeyEvent.KEYCODE_NUMPAD_SUBTRACT] = "NumPad-"; keyCodes[KeyEvent.KEYCODE_NUMPAD_ADD] = "NumPad+"; keyCodes[KeyEvent.KEYCODE_NUMPAD_DOT] = "NumPad."; keyCodes[KeyEvent.KEYCODE_NUMPAD_COMMA] = "NumPad,"; keyCodes[KeyEvent.KEYCODE_NUMPAD_ENTER] = "NumPadEnter"; keyCodes[KeyEvent.KEYCODE_NUMPAD_EQUALS] = "NumPad="; keyCodes[KeyEvent.KEYCODE_NUMPAD_LEFT_PAREN] = "NumPad("; keyCodes[KeyEvent.KEYCODE_NUMPAD_RIGHT_PAREN] = "NumPad)"; keyCodes[KeyEvent.KEYCODE_VOLUME_MUTE] = "VolumeMute"; keyCodes[KeyEvent.KEYCODE_INFO] = "Info"; keyCodes[KeyEvent.KEYCODE_CHANNEL_UP] = "ChannelUp"; keyCodes[KeyEvent.KEYCODE_CHANNEL_DOWN] = "ChannelDown"; keyCodes[KeyEvent.KEYCODE_ZOOM_IN] = "ZoomIn"; keyCodes[KeyEvent.KEYCODE_ZOOM_OUT] = "ZoomOut"; keyCodes[KeyEvent.KEYCODE_TV] = "TV"; keyCodes[KeyEvent.KEYCODE_WINDOW] = "Window"; keyCodes[KeyEvent.KEYCODE_GUIDE] = "Guide"; keyCodes[KeyEvent.KEYCODE_DVR] = "DVR"; keyCodes[KeyEvent.KEYCODE_BOOKMARK] = "Bookmark"; keyCodes[KeyEvent.KEYCODE_CAPTIONS] = "Captions"; keyCodes[KeyEvent.KEYCODE_SETTINGS] = "Settings"; keyCodes[KeyEvent.KEYCODE_TV_POWER] = "TVPower"; keyCodes[KeyEvent.KEYCODE_TV_INPUT] = "TVInput"; keyCodes[KeyEvent.KEYCODE_STB_POWER] = "STBPower"; keyCodes[KeyEvent.KEYCODE_STB_INPUT] = "STBInput"; keyCodes[KeyEvent.KEYCODE_AVR_POWER] = "AVRPower"; keyCodes[KeyEvent.KEYCODE_AVR_INPUT] = "AVRInput"; keyCodes[KeyEvent.KEYCODE_PROG_RED] = "ProgrammableRed"; keyCodes[KeyEvent.KEYCODE_PROG_GREEN] = "ProgrammableGreen"; keyCodes[KeyEvent.KEYCODE_PROG_YELLOW] = "ProgrammableYellow"; keyCodes[KeyEvent.KEYCODE_PROG_BLUE] = "ProgrammableBlue"; keyCodes[KeyEvent.KEYCODE_APP_SWITCH] = "AppSwitch"; keyCodes[KeyEvent.KEYCODE_BUTTON_1] = "Button1"; keyCodes[KeyEvent.KEYCODE_BUTTON_2] = "Button2"; keyCodes[KeyEvent.KEYCODE_BUTTON_3] = "Button3"; keyCodes[KeyEvent.KEYCODE_BUTTON_4] = "Button4"; keyCodes[KeyEvent.KEYCODE_BUTTON_5] = "Button5"; keyCodes[KeyEvent.KEYCODE_BUTTON_6] = "Button6"; keyCodes[KeyEvent.KEYCODE_BUTTON_7] = "Button7"; keyCodes[KeyEvent.KEYCODE_BUTTON_8] = "Button8"; keyCodes[KeyEvent.KEYCODE_BUTTON_9] = "Button9"; keyCodes[KeyEvent.KEYCODE_BUTTON_10] = "Button10"; keyCodes[KeyEvent.KEYCODE_BUTTON_11] = "Button11"; keyCodes[KeyEvent.KEYCODE_BUTTON_12] = "Button12"; keyCodes[KeyEvent.KEYCODE_BUTTON_13] = "Button13"; keyCodes[KeyEvent.KEYCODE_BUTTON_14] = "Button14"; keyCodes[KeyEvent.KEYCODE_BUTTON_15] = "Button15"; keyCodes[KeyEvent.KEYCODE_BUTTON_16] = "Button16"; keyCodes[KeyEvent.KEYCODE_LANGUAGE_SWITCH] = "LanguageSwitch"; keyCodes[KeyEvent.KEYCODE_MANNER_MODE] = "MannerMode"; keyCodes[KeyEvent.KEYCODE_3D_MODE] = "3DMode"; keyCodes[KeyEvent.KEYCODE_CONTACTS] = "Contacts"; keyCodes[KeyEvent.KEYCODE_CALENDAR] = "Calendar"; keyCodes[KeyEvent.KEYCODE_MUSIC] = "Music"; keyCodes[KeyEvent.KEYCODE_CALCULATOR] = "Calculator"; keyCodes[KeyEvent.KEYCODE_ZENKAKU_HANKAKU] = "ZenkakuHankaku"; keyCodes[KeyEvent.KEYCODE_EISU] = "Eisu"; keyCodes[KeyEvent.KEYCODE_MUHENKAN] = "Muhenkan"; keyCodes[KeyEvent.KEYCODE_HENKAN] = "Henkan"; keyCodes[KeyEvent.KEYCODE_KATAKANA_HIRAGANA] = "KatakanaHiragana"; keyCodes[KeyEvent.KEYCODE_YEN] = "Yen"; keyCodes[KeyEvent.KEYCODE_RO] = "Ro"; keyCodes[KeyEvent.KEYCODE_KANA] = "Kana"; keyCodes[KeyEvent.KEYCODE_ASSIST] = "Assist"; keyCodes[KeyEvent.KEYCODE_BRIGHTNESS_DOWN] = "BrightnessDown"; keyCodes[KeyEvent.KEYCODE_BRIGHTNESS_UP] = "BrightnessUp"; keyCodes[KeyEvent.KEYCODE_MEDIA_AUDIO_TRACK] = "MediaAudioTrack"; keyCodes[KeyEvent.KEYCODE_SLEEP] = "Sleep"; keyCodes[KeyEvent.KEYCODE_WAKEUP] = "WakeUp"; keyCodes[KeyEvent.KEYCODE_PAIRING] = "Pairing"; keyCodes[KeyEvent.KEYCODE_MEDIA_TOP_MENU] = "MediaTopMenu"; keyCodes[KeyEvent.KEYCODE_11] = "11"; keyCodes[KeyEvent.KEYCODE_12] = "12"; keyCodes[KeyEvent.KEYCODE_LAST_CHANNEL] = "LastChannel"; keyCodes[KeyEvent.KEYCODE_TV_DATA_SERVICE] = "TVDataService"; keyCodes[KeyEvent.KEYCODE_VOICE_ASSIST] = "VoiceAssist"; keyCodes[KeyEvent.KEYCODE_TV_RADIO_SERVICE] = "TVRadioService"; keyCodes[KeyEvent.KEYCODE_TV_TELETEXT] = "TVTeletext"; keyCodes[KeyEvent.KEYCODE_TV_NUMBER_ENTRY] = "TVNumberEntry"; keyCodes[KeyEvent.KEYCODE_TV_TERRESTRIAL_ANALOG] = "TVTerrestrialAnalog"; keyCodes[KeyEvent.KEYCODE_TV_TERRESTRIAL_DIGITAL] = "TVTerrestrialDigital"; keyCodes[KeyEvent.KEYCODE_TV_SATELLITE] = "TVSatellite"; keyCodes[KeyEvent.KEYCODE_TV_SATELLITE_BS] = "TVSatelliteBS"; keyCodes[KeyEvent.KEYCODE_TV_SATELLITE_CS] = "TVSatelliteCS"; keyCodes[KeyEvent.KEYCODE_TV_SATELLITE_SERVICE] = "TVSatelliteService"; keyCodes[KeyEvent.KEYCODE_TV_NETWORK] = "TVNetwork"; keyCodes[KeyEvent.KEYCODE_TV_ANTENNA_CABLE] = "TVAntennaCable"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_HDMI_1] = "TVInputHDMI1"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_HDMI_2] = "TVInputHDMI2"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_HDMI_3] = "TVInputHDMI3"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_HDMI_4] = "TVInputHDMI4"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_COMPOSITE_1] = "TVInputComposite1"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_COMPOSITE_2] = "TVInputComposite1"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_COMPONENT_1] = "TVInputComponent1"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_COMPONENT_2] = "TVInputComponent2"; keyCodes[KeyEvent.KEYCODE_TV_INPUT_VGA_1] = "TVInputVGA1"; keyCodes[KeyEvent.KEYCODE_TV_AUDIO_DESCRIPTION] = "TVAudioDescription"; keyCodes[KeyEvent.KEYCODE_TV_AUDIO_DESCRIPTION_MIX_UP] = "TVAudioDescriptionMixUp"; keyCodes[KeyEvent.KEYCODE_TV_AUDIO_DESCRIPTION_MIX_DOWN] = "TVAudioDescriptionMixDown"; keyCodes[KeyEvent.KEYCODE_TV_ZOOM_MODE] = "TVZoomMode"; keyCodes[KeyEvent.KEYCODE_TV_CONTENTS_MENU] = "TVContentsMenu"; keyCodes[KeyEvent.KEYCODE_TV_MEDIA_CONTEXT_MENU] = "TVMediaContextMenu"; keyCodes[KeyEvent.KEYCODE_TV_TIMER_PROGRAMMING] = "TVTimerProgramming"; keyCodes[KeyEvent.KEYCODE_HELP] = "Help"; keyCodes[KeyEvent.KEYCODE_NAVIGATE_PREVIOUS] = "NavigatePrevious"; keyCodes[KeyEvent.KEYCODE_NAVIGATE_NEXT] = "NavigateNext"; keyCodes[KeyEvent.KEYCODE_NAVIGATE_IN] = "NavigateIn"; keyCodes[KeyEvent.KEYCODE_NAVIGATE_OUT] = "NavigateOut"; keyCodes[KeyEvent.KEYCODE_MEDIA_SKIP_FORWARD] = "MediaSkipForward"; keyCodes[KeyEvent.KEYCODE_MEDIA_SKIP_BACKWARD] = "MediaSkipBackward"; keyCodes[KeyEvent.KEYCODE_MEDIA_STEP_FORWARD] = "MediaStepForward"; keyCodes[KeyEvent.KEYCODE_MEDIA_STEP_BACKWARD] = "MediaStepBackward"; } catch(ArrayIndexOutOfBoundsException err){ } } public float pixelRatio; public boolean isPhone; public MainActivity app; public boolean keyboardVisible = false; public KeyboardText keyboardText; static void init(Device device, MainActivity app) { device.app = app; // create EditText to handle KeyInput device.keyboardText = new KeyboardText(app.getApplicationContext(), app); app.view.addView(device.keyboardText); // hide keyboard final Window window = ((Activity) app.view.getContext()).getWindow(); window.setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_HIDDEN); // DEVICE_PIXEL_RATIO final DisplayMetrics metrics = app.getResources().getDisplayMetrics(); device.pixelRatio = metrics.density; app.client.pushAction(OutAction.DEVICE_PIXEL_RATIO); app.client.pushFloat(device.pixelRatio); // DEVICE_IS_PHONE device.isPhone = (app.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK) >= Configuration.SCREENLAYOUT_SIZE_LARGE; app.client.pushAction(OutAction.DEVICE_IS_PHONE); app.client.pushBoolean(device.isPhone); } static void register(MainActivity app){ final Device device = app.renderer.device; app.client.actions.put(InAction.DEVICE_LOG, new Action() { @Override public void work(Reader reader) { device.log(reader.getString()); } }); app.client.actions.put(InAction.DEVICE_SHOW_KEYBOARD, new Action() { @Override public void work(Reader reader) { device.showKeyboard(); } }); app.client.actions.put(InAction.DEVICE_HIDE_KEYBOARD, new Action() { @Override public void work(Reader reader) { device.hideKeyboard(); } }); } void log(String val){ Log.i("Neft", val); } void showKeyboard(){ InputMethodManager imm = (InputMethodManager) app.getSystemService(Context.INPUT_METHOD_SERVICE); imm.toggleSoftInputFromWindow(app.view.getWindowToken(), InputMethodManager.SHOW_FORCED, 0); keyboardText.requestFocus(); this.keyboardVisible = true; app.client.pushAction(OutAction.DEVICE_KEYBOARD_SHOW); } void hideKeyboard(){ InputMethodManager imm = (InputMethodManager) app.getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(app.view.getWindowToken(), 0); keyboardText.clearFocus(); keyboardText.setText(null); this.keyboardVisible = false; app.client.pushAction(OutAction.DEVICE_KEYBOARD_HIDE); } public boolean onTouchEvent(MotionEvent event){ switch (event.getAction()){ case MotionEvent.ACTION_DOWN: app.client.pushAction(OutAction.POINTER_PRESS); break; case MotionEvent.ACTION_UP: app.client.pushAction(OutAction.POINTER_RELEASE); break; case MotionEvent.ACTION_MOVE: app.client.pushAction(OutAction.POINTER_MOVE); break; default: return true; } int[] location = new int[2]; App.getApp().view.getLocationOnScreen(location); float x = event.getRawX() - location[0]; float y = event.getRawY() - location[1]; app.client.pushFloat(x / pixelRatio); app.client.pushFloat(y / pixelRatio); return true; } public boolean onKey(int keyCode, KeyEvent event){ // hide keyboard if (keyboardVisible && (keyCode & (KeyEvent.KEYCODE_BACK | KeyEvent.KEYCODE_ENTER)) > 0) { this.hideKeyboard(); } switch (event.getAction()){ case KeyEvent.ACTION_DOWN: app.client.pushAction(OutAction.KEY_PRESS); break; case KeyEvent.ACTION_UP: app.client.pushAction(OutAction.KEY_RELEASE); break; case KeyEvent.ACTION_MULTIPLE: app.client.pushAction(OutAction.KEY_HOLD); break; default: return false; } app.client.pushString(keyCodes[keyCode]); return keyCode == KeyEvent.KEYCODE_BACK; } }
Android bug with missed pointer events fixed
runtimes/android/app/src/main/java/io/neft/renderer/Device.java
Android bug with missed pointer events fixed
<ide><path>untimes/android/app/src/main/java/io/neft/renderer/Device.java <ide> <ide> app.client.pushFloat(x / pixelRatio); <ide> app.client.pushFloat(y / pixelRatio); <add> app.client.sendData(); <ide> <ide> return true; <ide> }
Java
apache-2.0
b0b0cf976769bf581042060ac39d92b8bf194b4d
0
AliMehrpour/ClipBox
package com.volcano.clipbox.activity; import android.os.Bundle; import android.support.v7.app.ActionBarActivity; import android.support.v7.widget.Toolbar; import com.volcano.clipbox.R; /** * Settings Activity for ClipBox */ public class SettingActivity extends ActionBarActivity{ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_setting); final Toolbar toolbar =(Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); } }
app/src/main/java/com/volcano/clipbox/activity/SettingActivity.java
package com.volcano.clipbox.activity; import android.os.Bundle; import android.support.v7.app.ActionBarActivity; import android.support.v7.widget.Toolbar; import com.volcano.clipbox.R; /** * Settings Activity */ public class SettingActivity extends ActionBarActivity{ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_setting); final Toolbar toolbar =(Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); } }
misc cleanup
app/src/main/java/com/volcano/clipbox/activity/SettingActivity.java
misc cleanup
<ide><path>pp/src/main/java/com/volcano/clipbox/activity/SettingActivity.java <ide> import com.volcano.clipbox.R; <ide> <ide> /** <del> * Settings Activity <add> * Settings Activity for ClipBox <ide> */ <ide> public class SettingActivity extends ActionBarActivity{ <ide>
Java
apache-2.0
82039baaeeb175cfc0d4422aac4d715b262681da
0
spring-cloud/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp,spring-cloud/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp,spring-cloud/spring-cloud-gcp,GoogleCloudPlatform/spring-cloud-gcp,spring-cloud/spring-cloud-gcp
/* * Copyright 2018 original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.gcp.data.spanner.core.mapping; import java.util.List; import com.google.cloud.spanner.Key; import org.junit.Test; import org.springframework.context.ApplicationContext; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.PropertyHandler; import org.springframework.data.mapping.SimplePropertyHandler; import org.springframework.data.util.ClassTypeInformation; import org.springframework.expression.spel.SpelEvaluationException; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * @author Chengyuan Zhao * @author Balint Pato */ public class SpannerPersistentEntityImplTests { private final SpannerMappingContext spannerMappingContext = new SpannerMappingContext(); @Test public void testTableName() { SpannerPersistentEntityImpl<TestEntity> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(TestEntity.class)); assertThat(entity.tableName(), is("custom_test_table")); } @Test public void testRawTableName() { SpannerPersistentEntityImpl<EntityNoCustomName> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityNoCustomName.class)); assertThat(entity.tableName(), is("entityNoCustomName")); } @Test public void testEmptyCustomTableName() { SpannerPersistentEntityImpl<EntityEmptyCustomName> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityEmptyCustomName.class)); assertThat(entity.tableName(), is("entityEmptyCustomName")); } @Test public void testColumns() { assertThat(new SpannerMappingContext().getPersistentEntity(TestEntity.class) .columns(), containsInAnyOrder("id", "custom_col")); } @Test(expected = SpelEvaluationException.class) public void testExpressionResolutionWithoutApplicationContext() { SpannerPersistentEntityImpl<EntityWithExpression> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityWithExpression.class)); entity.tableName(); } @Test public void testExpressionResolutionFromApplicationContext() { SpannerPersistentEntityImpl<EntityWithExpression> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityWithExpression.class)); ApplicationContext applicationContext = mock(ApplicationContext.class); when(applicationContext.getBean("tablePostfix")).thenReturn("something"); when(applicationContext.containsBean("tablePostfix")).thenReturn(true); entity.setApplicationContext(applicationContext); assertThat(entity.tableName(), is("table_something")); } @Test(expected = SpannerDataException.class) public void testDuplicatePrimaryKeyOrder() { new SpannerMappingContext() .getPersistentEntity(EntityWithDuplicatePrimaryKeyOrder.class); } @Test(expected = SpannerDataException.class) public void testInvalidPrimaryKeyOrder() { new SpannerMappingContext() .getPersistentEntity(EntityWithWronglyOrderedKeys.class).getIdProperty(); } @Test public void testNoIdEntity() { new SpannerMappingContext().getPersistentEntity(EntityWithNoId.class) .getIdProperty(); } @Test public void testGetIdProperty() { assertTrue(new SpannerMappingContext().getPersistentEntity(TestEntity.class) .getIdProperty() instanceof SpannerCompositeKeyProperty); } @Test public void testHasIdProperty() { assertTrue(new SpannerMappingContext().getPersistentEntity(TestEntity.class) .hasIdProperty()); } @Test(expected = SpannerDataException.class) public void testSetIdProperty() { SpannerPersistentEntity entity = new SpannerMappingContext() .getPersistentEntity(TestEntity.class); PersistentProperty idProperty = entity.getIdProperty(); TestEntity t = new TestEntity(); entity.getPropertyAccessor(t).setProperty(idProperty, Key.of("blah")); } @Test public void testIgnoredProperty() { TestEntity t = new TestEntity(); t.id = "a"; t.something = "a"; t.notMapped = "b"; SpannerPersistentEntity p = new SpannerMappingContext() .getPersistentEntity(TestEntity.class); PersistentPropertyAccessor accessor = p.getPropertyAccessor(t); p.doWithProperties((SimplePropertyHandler) property -> assertNotEquals("b", accessor.getProperty(property))); } @Test(expected = SpannerDataException.class) public void testInvalidTableName() { SpannerPersistentEntityImpl<EntityBadName> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityBadName.class)); entity.tableName(); } @Test(expected = SpannerDataException.class) public void testSpELInvalidName() { SpannerPersistentEntityImpl<EntityWithExpression> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityWithExpression.class)); ApplicationContext applicationContext = mock(ApplicationContext.class); when(applicationContext.getBean("tablePostfix")) .thenReturn("; DROP TABLE your_table;"); when(applicationContext.containsBean("tablePostfix")).thenReturn(true); entity.setApplicationContext(applicationContext); entity.tableName(); } @Test(expected = SpannerDataException.class) public void testDuplicateEmbeddedColumnName() { this.spannerMappingContext .getPersistentEntity(EmbeddedParentDuplicateColumn.class); } @Test public void testEmbeddedParentKeys() { GrandParentEmbedded grandParentEmbedded = new GrandParentEmbedded(); grandParentEmbedded.id = "1"; ParentEmbedded parentEmbedded = new ParentEmbedded(); parentEmbedded.grandParentEmbedded = grandParentEmbedded; parentEmbedded.id2 = "2"; parentEmbedded.id3 = "3"; ChildEmbedded childEmbedded = new ChildEmbedded(); childEmbedded.parentEmbedded = parentEmbedded; childEmbedded.id4 = "4"; Key key = (Key) this.spannerMappingContext .getPersistentEntity(ChildEmbedded.class) .getIdentifierAccessor(childEmbedded).getIdentifier(); assertEquals( Key.newBuilder().append("1").append("2").append("3").append("4").build(), key); } @Test public void testExcludeEmbeddedColumnNames() { assertThat(this.spannerMappingContext.getPersistentEntity(ChildEmbedded.class) .columns(), containsInAnyOrder("id", "id2", "id3", "id4")); } @Test public void doWithChildrenCollectionsTest() { PropertyHandler<SpannerPersistentProperty> mockHandler = mock(PropertyHandler.class); SpannerPersistentEntity spannerPersistentEntity = this.spannerMappingContext.getPersistentEntity(ParentInRelationship.class); doAnswer(invocation -> { String colName = ((SpannerPersistentProperty) invocation.getArgument(0)) .getName(); assertTrue(colName.equals("childrenA") || colName.equals("childrenB")); return null; }).when(mockHandler).doWithPersistentProperty(any()); spannerPersistentEntity.doWithInterleavedProperties(mockHandler); verify(mockHandler, times(2)).doWithPersistentProperty(any()); } @Test(expected = SpannerDataException.class) public void testParentChildPkNamesMismatch() { this.spannerMappingContext .getPersistentEntity(ParentInRelationshipMismatchedKeyName.class); } private static class ParentInRelationship { @PrimaryKey String id; @Interleaved List<ChildAInRelationship> childrenA; @Interleaved List<ChildBInRelationship> childrenB; } private static class ChildAInRelationship { @PrimaryKey String id; @PrimaryKey(keyOrder = 2) String id2; } private static class ChildBInRelationship { @PrimaryKey String id; @PrimaryKey(keyOrder = 2) String id2; } private static class ParentInRelationshipMismatchedKeyName { @PrimaryKey String idNameDifferentThanChildren; @Interleaved List<ChildAInRelationship> childrenA; } private static class GrandParentEmbedded { @PrimaryKey String id; } private static class ParentEmbedded { @PrimaryKey @Embedded GrandParentEmbedded grandParentEmbedded; @PrimaryKey(keyOrder = 2) String id2; @PrimaryKey(keyOrder = 3) String id3; } private static class ChildEmbedded { @PrimaryKey @Embedded ParentEmbedded parentEmbedded; @PrimaryKey(keyOrder = 2) String id4; } private static class EmbeddedParentDuplicateColumn { @PrimaryKey String id; String other; @Embedded EmbeddedChildDuplicateColumn embeddedChildDuplicateColumn; } private static class EmbeddedChildDuplicateColumn { @Column(name = "other") String stuff; } @Table(name = ";DROP TABLE your_table;") private static class EntityBadName { @PrimaryKey(keyOrder = 1) String id; String something; } @Table(name = "custom_test_table") private static class TestEntity { @PrimaryKey(keyOrder = 1) String id; @Column(name = "custom_col") String something; @NotMapped String notMapped; } private static class EntityNoCustomName { @PrimaryKey(keyOrder = 1) String id; String something; } @Table private static class EntityEmptyCustomName { @PrimaryKey(keyOrder = 1) String id; String something; } @Table(name = "#{'table_'.concat(tablePostfix)}") private static class EntityWithExpression { @PrimaryKey(keyOrder = 1) String id; String something; } private static class EntityWithDuplicatePrimaryKeyOrder { @PrimaryKey(keyOrder = 1) String id; @PrimaryKey(keyOrder = 1) String id2; } private static class EntityWithWronglyOrderedKeys { @PrimaryKey(keyOrder = 1) String id; @PrimaryKey(keyOrder = 3) String id2; } private static class EntityWithNoId { String id; } }
spring-cloud-gcp-data-spanner/src/test/java/org/springframework/cloud/gcp/data/spanner/core/mapping/SpannerPersistentEntityImplTests.java
/* * Copyright 2018 original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.gcp.data.spanner.core.mapping; import java.util.List; import com.google.cloud.spanner.Key; import org.junit.Test; import org.springframework.context.ApplicationContext; import org.springframework.data.mapping.PersistentProperty; import org.springframework.data.mapping.PersistentPropertyAccessor; import org.springframework.data.mapping.PropertyHandler; import org.springframework.data.mapping.SimplePropertyHandler; import org.springframework.data.util.ClassTypeInformation; import org.springframework.expression.spel.SpelEvaluationException; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; /** * @author Chengyuan Zhao * @author Balint Pato */ public class SpannerPersistentEntityImplTests { private final SpannerMappingContext spannerMappingContext = new SpannerMappingContext(); @Test public void testTableName() { SpannerPersistentEntityImpl<TestEntity> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(TestEntity.class)); assertThat(entity.tableName(), is("custom_test_table")); } @Test public void testRawTableName() { SpannerPersistentEntityImpl<EntityNoCustomName> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityNoCustomName.class)); assertThat(entity.tableName(), is("entityNoCustomName")); } @Test public void testEmptyCustomTableName() { SpannerPersistentEntityImpl<EntityEmptyCustomName> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityEmptyCustomName.class)); assertThat(entity.tableName(), is("entityEmptyCustomName")); } @Test public void testColumns() { assertThat(new SpannerMappingContext().getPersistentEntity(TestEntity.class) .columns(), containsInAnyOrder("id", "custom_col")); } @Test(expected = SpelEvaluationException.class) public void testExpressionResolutionWithoutApplicationContext() { SpannerPersistentEntityImpl<EntityWithExpression> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityWithExpression.class)); entity.tableName(); } @Test public void testExpressionResolutionFromApplicationContext() { SpannerPersistentEntityImpl<EntityWithExpression> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityWithExpression.class)); ApplicationContext applicationContext = mock(ApplicationContext.class); when(applicationContext.getBean("tablePostfix")).thenReturn("something"); when(applicationContext.containsBean("tablePostfix")).thenReturn(true); entity.setApplicationContext(applicationContext); assertThat(entity.tableName(), is("table_something")); } @Test(expected = SpannerDataException.class) public void testDuplicatePrimaryKeyOrder() { new SpannerMappingContext() .getPersistentEntity(EntityWithDuplicatePrimaryKeyOrder.class); } @Test(expected = SpannerDataException.class) public void testInvalidPrimaryKeyOrder() { new SpannerMappingContext() .getPersistentEntity(EntityWithWronglyOrderedKeys.class).getIdProperty(); } @Test public void testNoIdEntity() { new SpannerMappingContext().getPersistentEntity(EntityWithNoId.class) .getIdProperty(); } @Test public void testGetIdProperty() { assertTrue(new SpannerMappingContext().getPersistentEntity(TestEntity.class) .getIdProperty() instanceof SpannerCompositeKeyProperty); } @Test public void testHasIdProperty() { assertTrue(new SpannerMappingContext().getPersistentEntity(TestEntity.class) .hasIdProperty()); } @Test(expected = SpannerDataException.class) public void testSetIdProperty() { SpannerPersistentEntity entity = new SpannerMappingContext() .getPersistentEntity(TestEntity.class); PersistentProperty idProperty = entity.getIdProperty(); TestEntity t = new TestEntity(); entity.getPropertyAccessor(t).setProperty(idProperty, Key.of("blah")); } @Test public void testIgnoredProperty() { TestEntity t = new TestEntity(); t.id = "a"; t.something = "a"; t.notMapped = "b"; SpannerPersistentEntity p = new SpannerMappingContext() .getPersistentEntity(TestEntity.class); PersistentPropertyAccessor accessor = p.getPropertyAccessor(t); p.doWithProperties((SimplePropertyHandler) property -> assertNotEquals("b", accessor.getProperty(property))); } @Test(expected = SpannerDataException.class) public void testInvalidTableName() { SpannerPersistentEntityImpl<EntityBadName> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityBadName.class)); entity.tableName(); } @Test(expected = SpannerDataException.class) public void testSpELInvalidName() { SpannerPersistentEntityImpl<EntityWithExpression> entity = new SpannerPersistentEntityImpl<>( ClassTypeInformation.from(EntityWithExpression.class)); ApplicationContext applicationContext = mock(ApplicationContext.class); when(applicationContext.getBean("tablePostfix")) .thenReturn("; DROP TABLE your_table;"); when(applicationContext.containsBean("tablePostfix")).thenReturn(true); entity.setApplicationContext(applicationContext); entity.tableName(); } @Test(expected = SpannerDataException.class) public void testDuplicateEmbeddedColumnName() { this.spannerMappingContext .getPersistentEntity(EmbeddedParentDuplicateColumn.class); } @Test public void testEmbeddedParentKeys() { GrandParentEmbedded grandParentEmbedded = new GrandParentEmbedded(); grandParentEmbedded.id = "1"; ParentEmbedded parentEmbedded = new ParentEmbedded(); parentEmbedded.grandParentEmbedded = grandParentEmbedded; parentEmbedded.id2 = "2"; parentEmbedded.id3 = "3"; ChildEmbedded childEmbedded = new ChildEmbedded(); childEmbedded.parentEmbedded = parentEmbedded; childEmbedded.id4 = "4"; Key key = (Key) this.spannerMappingContext .getPersistentEntity(ChildEmbedded.class) .getIdentifierAccessor(childEmbedded).getIdentifier(); assertEquals( Key.newBuilder().append("1").append("2").append("3").append("4").build(), key); } @Test public void testExcludeEmbeddedColumnNames() { assertThat(this.spannerMappingContext.getPersistentEntity(ChildEmbedded.class) .columns(), containsInAnyOrder("id", "id2", "id3", "id4")); } @Test public void doWithChildrenCollectionsTest() { PropertyHandler<SpannerPersistentProperty> mockHandler = mock(PropertyHandler.class); SpannerPersistentEntity spannerPersistentEntity = this.spannerMappingContext.getPersistentEntity(ParentInRelationship.class); doAnswer(invocation -> { String colName = ((SpannerPersistentProperty) invocation.getArgument(0)) .getColumnName(); assertTrue(colName.equals("childrenA") || colName.equals("childrenB")); return null; }).when(mockHandler).doWithPersistentProperty(any()); spannerPersistentEntity.doWithInterleavedProperties(mockHandler); verify(mockHandler, times(2)).doWithPersistentProperty(any()); } @Test(expected = SpannerDataException.class) public void testParentChildPkNamesMismatch() { this.spannerMappingContext .getPersistentEntity(ParentInRelationshipMismatchedKeyName.class); } private static class ParentInRelationship { @PrimaryKey String id; @Interleaved List<ChildAInRelationship> childrenA; @Interleaved List<ChildBInRelationship> childrenB; } private static class ChildAInRelationship { @PrimaryKey String id; @PrimaryKey(keyOrder = 2) String id2; } private static class ChildBInRelationship { @PrimaryKey String id; @PrimaryKey(keyOrder = 2) String id2; } private static class ParentInRelationshipMismatchedKeyName { @PrimaryKey String idNameDifferentThanChildren; @Interleaved List<ChildAInRelationship> childrenA; } private static class GrandParentEmbedded { @PrimaryKey String id; } private static class ParentEmbedded { @PrimaryKey @Embedded GrandParentEmbedded grandParentEmbedded; @PrimaryKey(keyOrder = 2) String id2; @PrimaryKey(keyOrder = 3) String id3; } private static class ChildEmbedded { @PrimaryKey @Embedded ParentEmbedded parentEmbedded; @PrimaryKey(keyOrder = 2) String id4; } private static class EmbeddedParentDuplicateColumn { @PrimaryKey String id; String other; @Embedded EmbeddedChildDuplicateColumn embeddedChildDuplicateColumn; } private static class EmbeddedChildDuplicateColumn { @Column(name = "other") String stuff; } @Table(name = ";DROP TABLE your_table;") private static class EntityBadName { @PrimaryKey(keyOrder = 1) String id; String something; } @Table(name = "custom_test_table") private static class TestEntity { @PrimaryKey(keyOrder = 1) String id; @Column(name = "custom_col") String something; @NotMapped String notMapped; } private static class EntityNoCustomName { @PrimaryKey(keyOrder = 1) String id; String something; } @Table private static class EntityEmptyCustomName { @PrimaryKey(keyOrder = 1) String id; String something; } @Table(name = "#{'table_'.concat(tablePostfix)}") private static class EntityWithExpression { @PrimaryKey(keyOrder = 1) String id; String something; } private static class EntityWithDuplicatePrimaryKeyOrder { @PrimaryKey(keyOrder = 1) String id; @PrimaryKey(keyOrder = 1) String id2; } private static class EntityWithWronglyOrderedKeys { @PrimaryKey(keyOrder = 1) String id; @PrimaryKey(keyOrder = 3) String id2; } private static class EntityWithNoId { String id; } }
Spanner test fix from merge (#945)
spring-cloud-gcp-data-spanner/src/test/java/org/springframework/cloud/gcp/data/spanner/core/mapping/SpannerPersistentEntityImplTests.java
Spanner test fix from merge (#945)
<ide><path>pring-cloud-gcp-data-spanner/src/test/java/org/springframework/cloud/gcp/data/spanner/core/mapping/SpannerPersistentEntityImplTests.java <ide> this.spannerMappingContext.getPersistentEntity(ParentInRelationship.class); <ide> doAnswer(invocation -> { <ide> String colName = ((SpannerPersistentProperty) invocation.getArgument(0)) <del> .getColumnName(); <add> .getName(); <ide> assertTrue(colName.equals("childrenA") || colName.equals("childrenB")); <ide> return null; <ide> }).when(mockHandler).doWithPersistentProperty(any());
Java
apache-2.0
e869a48116ec549170cee7ef7d415ef71a8c50f9
0
hannesa2/AndroidSlidingUpPanel,hannesa2/AndroidSlidingUpPanel
package com.sothree.slidinguppanel; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.os.Parcelable; import android.util.AttributeSet; import android.util.Log; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.accessibility.AccessibilityEvent; import android.view.animation.AnimationUtils; import android.view.animation.Interpolator; import androidx.core.content.ContextCompat; import androidx.core.view.ViewCompat; import com.sothree.slidinguppanel.canvassaveproxy.CanvasSaveProxy; import com.sothree.slidinguppanel.canvassaveproxy.CanvasSaveProxyFactory; import com.sothree.slidinguppanel.library.R; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; public class SlidingUpPanelLayout extends ViewGroup { private static final String TAG = SlidingUpPanelLayout.class.getSimpleName(); /** * Default peeking out panel height */ private static final int DEFAULT_PANEL_HEIGHT = 68; // dp; /** * Default anchor point height */ private static final float DEFAULT_ANCHOR_POINT = 1.0f; // In relative % /** * Default maximum sliding offset */ private static final float DEFAULT_MAX_SLIDING_OFFSET = 1.0f; /** * Default initial state for the component */ private static PanelState DEFAULT_SLIDE_STATE = PanelState.COLLAPSED; /** * Default height of the shadow above the peeking out panel */ private static final int DEFAULT_SHADOW_HEIGHT = 4; // dp; /** * If no fade color is given by default it will fade to 80% gray. */ private static final int DEFAULT_FADE_COLOR = 0x99000000; /** * Default Minimum velocity that will be detected as a fling */ private static final int DEFAULT_MIN_FLING_VELOCITY = 400; // dips per second /** * Default is set to false because that is how it was written */ private static final boolean DEFAULT_OVERLAY_FLAG = false; /** * Default is set to true for clip panel for performance reasons */ private static final boolean DEFAULT_CLIP_PANEL_FLAG = true; /** * Default attributes for layout */ private static final int[] DEFAULT_ATTRS = new int[]{ android.R.attr.gravity }; /** * Tag for the sliding state stored inside the bundle */ public static final String SLIDING_STATE = "sliding_state"; /** * Minimum velocity that will be detected as a fling */ private int mMinFlingVelocity = DEFAULT_MIN_FLING_VELOCITY; /** * The fade color used for the panel covered by the slider. 0 = no fading. */ private int mCoveredFadeColor = DEFAULT_FADE_COLOR; /** * Default parallax length of the main view */ private static final int DEFAULT_PARALLAX_OFFSET = 0; /** * The paint used to dim the main layout when sliding */ private final Paint mCoveredFadePaint = new Paint(); /** * Drawable used to draw the shadow between panes. */ private final Drawable mShadowDrawable; /** * The size of the overhang in pixels. */ private int mPanelHeight = -1; /** * The size of the shadow in pixels. */ private int mShadowHeight = -1; /** * Parallax offset */ private int mParallaxOffset = -1; /** * True if the collapsed panel should be dragged up. */ private boolean mIsSlidingUp; /** * Panel overlays the windows instead of putting it underneath it. */ private boolean mOverlayContent = DEFAULT_OVERLAY_FLAG; /** * The main view is clipped to the main top border */ private boolean mClipPanel = DEFAULT_CLIP_PANEL_FLAG; /** * If provided, the panel can be dragged by only this view. Otherwise, the entire panel can be * used for dragging. */ private View mDragView; /** * If provided, the panel can be dragged by only this view. Otherwise, the entire panel can be * used for dragging. */ private int mDragViewResId = -1; /** * If provided, the panel will transfer the scroll from this view to itself when needed. */ private View mScrollableView; private int mScrollableViewResId; private ScrollableViewHelper mScrollableViewHelper = new ScrollableViewHelper(); /** * The child view that can slide, if any. */ private View mSlideableView; /** * The main view */ private View mMainView; /** * Current state of the slideable view. */ public enum PanelState { EXPANDED, COLLAPSED, ANCHORED, HIDDEN, DRAGGING } private PanelState mSlideState = DEFAULT_SLIDE_STATE; /** * If the current slide state is DRAGGING, this will store the last non dragging state */ private PanelState mLastNotDraggingSlideState = DEFAULT_SLIDE_STATE; /** * How far the panel is offset from its expanded position. * range [0, 1] where 0 = collapsed, 1 = expanded. */ private float mSlideOffset; /** * How far in pixels the slideable panel may move. */ private int mSlideRange; /** * Maximum sliding panel movement in expanded state */ private float mMaxSlideOffset = DEFAULT_MAX_SLIDING_OFFSET; /** * An anchor point where the panel can stop during sliding */ private float mAnchorPoint = 1.f; /** * A panel view is locked into internal scrolling or another condition that * is preventing a drag. */ private boolean mIsUnableToDrag; /** * Flag indicating that sliding feature is enabled\disabled */ private boolean mIsTouchEnabled; private float mPrevMotionX; private float mPrevMotionY; private float mInitialMotionX; private float mInitialMotionY; private boolean mIsScrollableViewHandlingTouch = false; private final List<PanelSlideListener> mPanelSlideListeners = new CopyOnWriteArrayList<>(); private View.OnClickListener mFadeOnClickListener; private final ViewDragHelper mDragHelper; private final CanvasSaveProxyFactory mCanvasSaveProxyFactory; private CanvasSaveProxy mCanvasSaveProxy; /** * Stores whether or not the pane was expanded the last time it was slideable. * If expand/collapse operations are invoked this state is modified. Used by * instance state save/restore. */ private boolean mFirstLayout = true; private final Rect mTmpRect = new Rect(); /** * Listener for monitoring events about sliding panes. */ public interface PanelSlideListener { /** * Called when a sliding pane's position changes. * * @param panel The child view that was moved * @param slideOffset The new offset of this sliding pane within its range, from 0-1 */ void onPanelSlide(View panel, float slideOffset); /** * Called when a sliding panel state changes * * @param panel The child view that was slid to an collapsed position */ void onPanelStateChanged(View panel, PanelState previousState, PanelState newState); } /** * No-op stubs for {@link PanelSlideListener}. If you only want to implement a subset * of the listener methods you can extend this instead of implement the full interface. */ public static class SimplePanelSlideListener implements PanelSlideListener { @Override public void onPanelSlide(View panel, float slideOffset) { } @Override public void onPanelStateChanged(View panel, PanelState previousState, PanelState newState) { } } public SlidingUpPanelLayout(Context context) { this(context, null); } public SlidingUpPanelLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public SlidingUpPanelLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); mCanvasSaveProxyFactory = new CanvasSaveProxyFactory(); if (isInEditMode()) { mShadowDrawable = null; mDragHelper = null; return; } Interpolator scrollerInterpolator = null; if (attrs != null) { TypedArray defAttrs = context.obtainStyledAttributes(attrs, DEFAULT_ATTRS); try { int gravity = defAttrs.getInt(0, Gravity.NO_GRAVITY); setGravity(gravity); } finally { defAttrs.recycle(); } TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.SlidingUpPanelLayout); try { mPanelHeight = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_umanoPanelHeight, -1); mShadowHeight = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_umanoShadowHeight, -1); mParallaxOffset = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_umanoParallaxOffset, -1); mMinFlingVelocity = ta.getInt(R.styleable.SlidingUpPanelLayout_umanoFlingVelocity, DEFAULT_MIN_FLING_VELOCITY); mCoveredFadeColor = ta.getColor(R.styleable.SlidingUpPanelLayout_umanoFadeColor, DEFAULT_FADE_COLOR); mDragViewResId = ta.getResourceId(R.styleable.SlidingUpPanelLayout_umanoDragView, -1); mScrollableViewResId = ta.getResourceId(R.styleable.SlidingUpPanelLayout_umanoScrollableView, -1); mOverlayContent = ta.getBoolean(R.styleable.SlidingUpPanelLayout_umanoOverlay, DEFAULT_OVERLAY_FLAG); mClipPanel = ta.getBoolean(R.styleable.SlidingUpPanelLayout_umanoClipPanel, DEFAULT_CLIP_PANEL_FLAG); mAnchorPoint = ta.getFloat(R.styleable.SlidingUpPanelLayout_umanoAnchorPoint, DEFAULT_ANCHOR_POINT); mSlideState = PanelState.values()[ta.getInt(R.styleable.SlidingUpPanelLayout_umanoInitialState, DEFAULT_SLIDE_STATE.ordinal())]; int interpolatorResId = ta.getResourceId(R.styleable.SlidingUpPanelLayout_umanoScrollInterpolator, -1); if (interpolatorResId != -1) { scrollerInterpolator = AnimationUtils.loadInterpolator(context, interpolatorResId); } } finally { ta.recycle(); } } final float density = context.getResources().getDisplayMetrics().density; if (mPanelHeight == -1) { mPanelHeight = (int) (DEFAULT_PANEL_HEIGHT * density + 0.5f); } if (mShadowHeight == -1) { mShadowHeight = (int) (DEFAULT_SHADOW_HEIGHT * density + 0.5f); } if (mParallaxOffset == -1) { mParallaxOffset = (int) (DEFAULT_PARALLAX_OFFSET * density); } // If the shadow height is zero, don't show the shadow if (mShadowHeight > 0) { if (mIsSlidingUp) { mShadowDrawable = ContextCompat.getDrawable(context, R.drawable.above_shadow); } else { mShadowDrawable = ContextCompat.getDrawable(context, R.drawable.below_shadow); } } else { mShadowDrawable = null; } setWillNotDraw(false); mDragHelper = ViewDragHelper.create(this, 0.5f, scrollerInterpolator, new DragHelperCallback()); mDragHelper.setMinVelocity(mMinFlingVelocity * density); mIsTouchEnabled = true; } /** * Set the Drag View after the view is inflated */ @Override protected void onFinishInflate() { super.onFinishInflate(); if (mDragViewResId != -1) { setDragView(findViewById(mDragViewResId)); } if (mScrollableViewResId != -1) { setScrollableView(findViewById(mScrollableViewResId)); } } public void setGravity(int gravity) { if (gravity != Gravity.TOP && gravity != Gravity.BOTTOM) { throw new IllegalArgumentException("gravity must be set to either top or bottom"); } mIsSlidingUp = gravity == Gravity.BOTTOM; if (!mFirstLayout) { requestLayout(); } } /** * Set the color used to fade the pane covered by the sliding pane out when the pane * will become fully covered in the expanded state. * * @param color An ARGB-packed color value */ public void setCoveredFadeColor(int color) { mCoveredFadeColor = color; requestLayout(); } /** * @return The ARGB-packed color value used to fade the fixed pane */ public int getCoveredFadeColor() { return mCoveredFadeColor; } /** * Set sliding enabled flag * * @param enabled flag value */ public void setTouchEnabled(boolean enabled) { mIsTouchEnabled = enabled; } public boolean isTouchEnabled() { return mIsTouchEnabled && mSlideableView != null && mSlideState != PanelState.HIDDEN; } /** * Set the collapsed panel height in pixels * * @param val A height in pixels */ public void setPanelHeight(int val) { if (getPanelHeight() == val) { return; } mPanelHeight = val; boolean onCollapsedMode = getPanelState() == PanelState.COLLAPSED; if (!mFirstLayout) { if (!onCollapsedMode) { requestLayout(); return; } } if (onCollapsedMode && !smoothToBottom()) { // Only invalidating when animation was not done invalidate(); } } protected boolean smoothToBottom() { return smoothSlideTo(0, 0); } /** * @return The current shadow height */ public int getShadowHeight() { return mShadowHeight; } /** * Set the shadow height * * @param val A height in pixels */ public void setShadowHeight(int val) { mShadowHeight = val; if (!mFirstLayout) { invalidate(); } } /** * @return The current collapsed panel height */ public int getPanelHeight() { return mPanelHeight; } /** * @return The current parallax offset */ public int getCurrentParallaxOffset() { // Clamp slide offset at zero for parallax computation; int offset = (int) (mParallaxOffset * Math.max(mSlideOffset, 0)); return mIsSlidingUp ? -offset : offset; } /** * Set parallax offset for the panel * * @param val A height in pixels */ public void setParallaxOffset(int val) { mParallaxOffset = val; if (!mFirstLayout) { requestLayout(); } } /** * @return The current minimin fling velocity */ public int getMinFlingVelocity() { return mMinFlingVelocity; } /** * Sets the minimum fling velocity for the panel * * @param val the new value */ public void setMinFlingVelocity(int val) { mMinFlingVelocity = val; } public void addPanelSlideListener(PanelSlideListener listener) { synchronized (mPanelSlideListeners) { mPanelSlideListeners.add(listener); } } public void removePanelSlideListener(PanelSlideListener listener) { synchronized (mPanelSlideListeners) { mPanelSlideListeners.remove(listener); } } /** * Provides an on click for the portion of the main view that is dimmed. The listener is not * triggered if the panel is in a collapsed or a hidden position. If the on click listener is * not provided, the clicks on the dimmed area are passed through to the main layout. */ public void setFadeOnClickListener(View.OnClickListener listener) { mFadeOnClickListener = listener; } /** * Set the draggable view portion. Use to null, to allow the whole panel to be draggable * * @param dragView A view that will be used to drag the panel. */ public void setDragView(View dragView) { if (mDragView != null) { mDragView.setOnClickListener(null); } mDragView = dragView; if (mDragView != null) { mDragView.setClickable(true); mDragView.setFocusable(false); mDragView.setFocusableInTouchMode(false); mDragView.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (!isEnabled() || !isTouchEnabled()) return; if (mSlideState != PanelState.EXPANDED && mSlideState != PanelState.ANCHORED) { if (mAnchorPoint < DEFAULT_ANCHOR_POINT) { setPanelState(PanelState.ANCHORED); } else { setPanelState(PanelState.EXPANDED); } } else { setPanelState(PanelState.COLLAPSED); } } }); } } /** * Set the draggable view portion. Use to null, to allow the whole panel to be draggable * * @param dragViewResId The resource ID of the new drag view */ public void setDragView(int dragViewResId) { mDragViewResId = dragViewResId; setDragView(findViewById(dragViewResId)); } /** * Set the scrollable child of the sliding layout. If set, scrolling will be transfered between * the panel and the view when necessary * * @param scrollableView The scrollable view */ public void setScrollableView(View scrollableView) { mScrollableView = scrollableView; } public View getScrollableView() { return mScrollableView; } /** * Sets the current scrollable view helper. See ScrollableViewHelper description for details. */ public void setScrollableViewHelper(ScrollableViewHelper helper) { mScrollableViewHelper = helper; } /** * Set an anchor point where the panel can stop during sliding * * @param anchorPoint A value between 0 and 1, determining the position of the anchor point * starting from the top of the layout. */ public void setAnchorPoint(float anchorPoint) { if (anchorPoint > 0 && anchorPoint <= 1) { mAnchorPoint = anchorPoint; mFirstLayout = true; requestLayout(); } } /** * Set maximum slide offset to move sliding layout in expanded state * The value must be in range of [ 0, 1] * * @param offset max sliding offset */ public void setMaxSlideOffset(float offset) { if (offset <= DEFAULT_MAX_SLIDING_OFFSET) { mMaxSlideOffset = offset; } } /** * Gets the currently set anchor point * * @return the currently set anchor point */ public float getAnchorPoint() { return mAnchorPoint; } /** * Sets whether or not the panel overlays the content */ public void setOverlayed(boolean overlayed) { mOverlayContent = overlayed; } /** * Check if the panel is set as an overlay. */ public boolean isOverlayed() { return mOverlayContent; } /** * Sets whether or not the main content is clipped to the top of the panel */ public void setClipPanel(boolean clip) { mClipPanel = clip; } /** * Check whether or not the main content is clipped to the top of the panel */ public boolean isClipPanel() { return mClipPanel; } void dispatchOnPanelSlide(View panel) { synchronized (mPanelSlideListeners) { for (PanelSlideListener l : mPanelSlideListeners) { l.onPanelSlide(panel, mSlideOffset); } } } void dispatchOnPanelStateChanged(View panel, PanelState previousState, PanelState newState) { synchronized (mPanelSlideListeners) { for (PanelSlideListener l : mPanelSlideListeners) { l.onPanelStateChanged(panel, previousState, newState); } } sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED); } void updateObscuredViewVisibility() { if (getChildCount() == 0) { return; } final int leftBound = getPaddingLeft(); final int rightBound = getWidth() - getPaddingRight(); final int topBound = getPaddingTop(); final int bottomBound = getHeight() - getPaddingBottom(); final int left; final int right; final int top; final int bottom; if (mSlideableView != null && hasOpaqueBackground(mSlideableView)) { left = mSlideableView.getLeft(); right = mSlideableView.getRight(); top = mSlideableView.getTop(); bottom = mSlideableView.getBottom(); } else { left = right = top = bottom = 0; } View child = getChildAt(0); final int clampedChildLeft = Math.max(leftBound, child.getLeft()); final int clampedChildTop = Math.max(topBound, child.getTop()); final int clampedChildRight = Math.min(rightBound, child.getRight()); final int clampedChildBottom = Math.min(bottomBound, child.getBottom()); final int vis; if (clampedChildLeft >= left && clampedChildTop >= top && clampedChildRight <= right && clampedChildBottom <= bottom) { vis = INVISIBLE; } else { vis = VISIBLE; } child.setVisibility(vis); } void setAllChildrenVisible() { for (int i = 0, childCount = getChildCount(); i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == INVISIBLE) { child.setVisibility(VISIBLE); } } } private static boolean hasOpaqueBackground(View v) { final Drawable background = v.getBackground(); return background != null && background.getOpacity() == PixelFormat.OPAQUE; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mFirstLayout = true; } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mFirstLayout = true; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { final int widthMode = MeasureSpec.getMode(widthMeasureSpec); final int widthSize = MeasureSpec.getSize(widthMeasureSpec); final int heightMode = MeasureSpec.getMode(heightMeasureSpec); final int heightSize = MeasureSpec.getSize(heightMeasureSpec); if (widthMode != MeasureSpec.EXACTLY && widthMode != MeasureSpec.AT_MOST) { throw new IllegalStateException("Width must have an exact value or MATCH_PARENT"); } else if (heightMode != MeasureSpec.EXACTLY && heightMode != MeasureSpec.AT_MOST) { throw new IllegalStateException("Height must have an exact value or MATCH_PARENT"); } final int childCount = getChildCount(); if (childCount != 2) { throw new IllegalStateException("Sliding up panel layout must have exactly 2 children!"); } mMainView = getChildAt(0); mSlideableView = getChildAt(1); if (mDragView == null) { setDragView(mSlideableView); } // If the sliding panel is not visible, then put the whole view in the hidden state if (mSlideableView.getVisibility() != VISIBLE) { mSlideState = PanelState.HIDDEN; } int layoutHeight = heightSize - getPaddingTop() - getPaddingBottom(); int layoutWidth = widthSize - getPaddingLeft() - getPaddingRight(); // First pass. Measure based on child LayoutParams width/height. for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); // We always measure the sliding panel in order to know it's height (needed for show panel) if (child.getVisibility() == GONE && i == 0) { continue; } int height = layoutHeight; int width = layoutWidth; if (child == mMainView) { if (!mOverlayContent && mSlideState != PanelState.HIDDEN) { height -= mPanelHeight; } width -= lp.leftMargin + lp.rightMargin; } else if (child == mSlideableView) { // The slideable view should be aware of its top margin. // See https://github.com/umano/AndroidSlidingUpPanel/issues/412. height -= lp.topMargin; } int childWidthSpec; if (lp.width == LayoutParams.WRAP_CONTENT) { childWidthSpec = MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST); } else if (lp.width == LayoutParams.MATCH_PARENT) { childWidthSpec = MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY); } else { childWidthSpec = MeasureSpec.makeMeasureSpec(lp.width, MeasureSpec.EXACTLY); } int childHeightSpec; if (lp.height == LayoutParams.WRAP_CONTENT) { childHeightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST); } else { // Modify the height based on the weight. if (lp.weight > 0 && lp.weight < 1) { height = (int) (height * lp.weight); } else if (lp.height != LayoutParams.MATCH_PARENT) { height = lp.height; } childHeightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY); } child.measure(childWidthSpec, childHeightSpec); if (child == mSlideableView) { mSlideRange = mSlideableView.getMeasuredHeight() - mPanelHeight; } } setMeasuredDimension(widthSize, heightSize); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { final int paddingLeft = getPaddingLeft(); final int paddingTop = getPaddingTop(); final int childCount = getChildCount(); if (mFirstLayout) { switch (mSlideState) { case EXPANDED: mSlideOffset = mMaxSlideOffset; break; case ANCHORED: mSlideOffset = mAnchorPoint; break; case HIDDEN: int newTop = computePanelTopPosition(0.0f) + (mIsSlidingUp ? +mPanelHeight : -mPanelHeight); mSlideOffset = computeSlideOffset(newTop); break; default: mSlideOffset = 0.f; break; } } for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); // Always layout the sliding view on the first layout if (child.getVisibility() == GONE && (i == 0 || mFirstLayout)) { continue; } final int childHeight = child.getMeasuredHeight(); int childTop = paddingTop; if (child == mSlideableView) { childTop = computePanelTopPosition(mSlideOffset); } if (!mIsSlidingUp) { if (child == mMainView && !mOverlayContent) { childTop = computePanelTopPosition(mSlideOffset) + mSlideableView.getMeasuredHeight(); } } final int childBottom = childTop + childHeight; final int childLeft = paddingLeft + lp.leftMargin; final int childRight = childLeft + child.getMeasuredWidth(); child.layout(childLeft, childTop, childRight, childBottom); } if (mFirstLayout) { updateObscuredViewVisibility(); } applyParallaxForCurrentSlideOffset(); mFirstLayout = false; } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); // Recalculate sliding panes and their details if (h != oldh) { mFirstLayout = true; } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { // If the scrollable view is handling touch, never intercept if (mIsScrollableViewHandlingTouch || !isTouchEnabled()) { mDragHelper.abort(); return false; } final int action = ev.getAction(); final float x = ev.getX(); final float y = ev.getY(); final float adx = Math.abs(x - mInitialMotionX); final float ady = Math.abs(y - mInitialMotionY); final int dragSlop = mDragHelper.getTouchSlop(); switch (action) { case MotionEvent.ACTION_DOWN: { mIsUnableToDrag = false; mInitialMotionX = x; mInitialMotionY = y; if (!isViewUnder(mDragView, (int) x, (int) y)) { mDragHelper.cancel(); mIsUnableToDrag = true; return false; } break; } case MotionEvent.ACTION_MOVE: { if (ady > dragSlop && adx > ady) { mDragHelper.cancel(); mIsUnableToDrag = true; return false; } break; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: // If the dragView is still dragging when we get here, we need to call processTouchEvent // so that the view is settled // Added to make scrollable views work (tokudu) if (mDragHelper.isDragging()) { mDragHelper.processTouchEvent(ev); return true; } // Check if this was a click on the faded part of the screen, and fire off the listener if there is one. if (ady <= dragSlop && adx <= dragSlop && mSlideOffset > 0 && !isViewUnder(mSlideableView, (int) mInitialMotionX, (int) mInitialMotionY) && mFadeOnClickListener != null) { playSoundEffect(android.view.SoundEffectConstants.CLICK); mFadeOnClickListener.onClick(this); return true; } break; } return mDragHelper.shouldInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent ev) { if (!isEnabled() || !isTouchEnabled()) { return super.onTouchEvent(ev); } try { mDragHelper.processTouchEvent(ev); return true; } catch (Exception ex) { // Ignore the pointer out of range exception return false; } } @Override public boolean dispatchTouchEvent(MotionEvent ev) { final int action = ev.getAction(); if (!isEnabled() || !isTouchEnabled() || (mIsUnableToDrag && action != MotionEvent.ACTION_DOWN)) { mDragHelper.abort(); return super.dispatchTouchEvent(ev); } final float x = ev.getX(); final float y = ev.getY(); if (action == MotionEvent.ACTION_DOWN) { mIsScrollableViewHandlingTouch = false; mPrevMotionX = x; mPrevMotionY = y; } else if (action == MotionEvent.ACTION_MOVE) { float dx = x - mPrevMotionX; float dy = y - mPrevMotionY; mPrevMotionX = x; mPrevMotionY = y; if (Math.abs(dx) > Math.abs(dy)) { // Scrolling horizontally, so ignore return super.dispatchTouchEvent(ev); } // If the scroll view isn't under the touch, pass the // event along to the dragView. if (!isViewUnder(mScrollableView, (int) mInitialMotionX, (int) mInitialMotionY)) { return super.dispatchTouchEvent(ev); } // Which direction (up or down) is the drag moving? if (dy * (mIsSlidingUp ? 1 : -1) > 0) { // Collapsing // Is the child less than fully scrolled? // Then let the child handle it. if (mScrollableViewHelper.getScrollableViewScrollPosition(mScrollableView, mIsSlidingUp) > 0) { mIsScrollableViewHandlingTouch = true; return super.dispatchTouchEvent(ev); } // Was the child handling the touch previously? // Then we need to rejigger things so that the // drag panel gets a proper down event. if (mIsScrollableViewHandlingTouch) { // Send an 'UP' event to the child. MotionEvent up = MotionEvent.obtain(ev); up.setAction(MotionEvent.ACTION_CANCEL); super.dispatchTouchEvent(up); up.recycle(); // Send a 'DOWN' event to the panel. (We'll cheat // and hijack this one) ev.setAction(MotionEvent.ACTION_DOWN); } mIsScrollableViewHandlingTouch = false; return this.onTouchEvent(ev); } else if (dy * (mIsSlidingUp ? 1 : -1) < 0) { // Expanding // Is the panel less than fully expanded? // Then we'll handle the drag here. if (mSlideOffset < mMaxSlideOffset) { mIsScrollableViewHandlingTouch = false; return this.onTouchEvent(ev); } // Was the panel handling the touch previously? // Then we need to rejigger things so that the // child gets a proper down event. if (!mIsScrollableViewHandlingTouch && mDragHelper.isDragging()) { mDragHelper.cancel(); ev.setAction(MotionEvent.ACTION_DOWN); } mIsScrollableViewHandlingTouch = true; return super.dispatchTouchEvent(ev); } } else if (action == MotionEvent.ACTION_UP) { // If the scrollable view was handling the touch and we receive an up // we want to clear any previous dragging state so we don't intercept a touch stream accidentally if (mIsScrollableViewHandlingTouch) { mDragHelper.setDragState(ViewDragHelper.STATE_IDLE); } } // In all other cases, just let the default behavior take over. return super.dispatchTouchEvent(ev); } private boolean isViewUnder(View view, int x, int y) { if (view == null) return false; int[] viewLocation = new int[2]; view.getLocationOnScreen(viewLocation); int[] parentLocation = new int[2]; this.getLocationOnScreen(parentLocation); int screenX = parentLocation[0] + x; int screenY = parentLocation[1] + y; return screenX >= viewLocation[0] && screenX < viewLocation[0] + view.getWidth() && screenY >= viewLocation[1] && screenY < viewLocation[1] + view.getHeight(); } /* * Computes the top position of the panel based on the slide offset. */ private int computePanelTopPosition(float slideOffset) { int slidingViewHeight = mSlideableView != null ? mSlideableView.getMeasuredHeight() : 0; int slidePixelOffset = (int) (slideOffset * mSlideRange); // Compute the top of the panel if its collapsed return mIsSlidingUp ? getMeasuredHeight() - getPaddingBottom() - mPanelHeight - slidePixelOffset : getPaddingTop() - slidingViewHeight + mPanelHeight + slidePixelOffset; } /* * Computes the slide offset based on the top position of the panel */ private float computeSlideOffset(int topPosition) { // Compute the panel top position if the panel is collapsed (offset 0) final int topBoundCollapsed = computePanelTopPosition(0); // Determine the new slide offset based on the collapsed top position and the new required // top position return (mIsSlidingUp ? (float) (topBoundCollapsed - topPosition) / mSlideRange : (float) (topPosition - topBoundCollapsed) / mSlideRange); } /** * Returns the current state of the panel as an enum. * * @return the current panel state */ public PanelState getPanelState() { return mSlideState; } /** * Change panel state to the given state with * * @param state - new panel state */ public void setPanelState(PanelState state) { // Abort any running animation, to allow state change if (mDragHelper.getViewDragState() == ViewDragHelper.STATE_SETTLING) { Log.d(TAG, "View is settling. Aborting animation."); mDragHelper.abort(); } if (state == null || state == PanelState.DRAGGING) { throw new IllegalArgumentException("Panel state cannot be null or DRAGGING."); } if (!isEnabled() || (!mFirstLayout && mSlideableView == null) || state == mSlideState || mSlideState == PanelState.DRAGGING) return; if (mFirstLayout) { setPanelStateInternal(state); } else { if (mSlideState == PanelState.HIDDEN) { mSlideableView.setVisibility(View.VISIBLE); requestLayout(); } switch (state) { case ANCHORED: smoothSlideTo(mAnchorPoint, 0); break; case COLLAPSED: smoothSlideTo(0, 0); break; case EXPANDED: smoothSlideTo(mMaxSlideOffset, 0); break; case HIDDEN: int newTop = computePanelTopPosition(0.0f) + (mIsSlidingUp ? +mPanelHeight : -mPanelHeight); smoothSlideTo(computeSlideOffset(newTop), 0); break; } } } private void setPanelStateInternal(PanelState state) { if (mSlideState == state) return; PanelState oldState = mSlideState; mSlideState = state; dispatchOnPanelStateChanged(this, oldState, state); } /** * Update the parallax based on the current slide offset. */ @SuppressLint("NewApi") private void applyParallaxForCurrentSlideOffset() { if (mParallaxOffset > 0) { int mainViewOffset = getCurrentParallaxOffset(); mMainView.setTranslationY(mainViewOffset); } } private void onPanelDragged(int newTop) { if (mSlideState != PanelState.DRAGGING) { mLastNotDraggingSlideState = mSlideState; } setPanelStateInternal(PanelState.DRAGGING); // Recompute the slide offset based on the new top position mSlideOffset = computeSlideOffset(newTop); applyParallaxForCurrentSlideOffset(); // Dispatch the slide event dispatchOnPanelSlide(mSlideableView); // If the slide offset is negative, and overlay is not on, we need to increase the // height of the main content LayoutParams lp = (LayoutParams) mMainView.getLayoutParams(); int defaultHeight = getHeight() - getPaddingBottom() - getPaddingTop() - (mSlideOffset < 0 ? 0 : mPanelHeight); if (mSlideOffset <= 0 && !mOverlayContent) { // expand the main view lp.height = mIsSlidingUp ? (newTop - getPaddingBottom()) : (getHeight() - getPaddingBottom() - mSlideableView.getMeasuredHeight() - newTop); if (lp.height == defaultHeight) { lp.height = LayoutParams.MATCH_PARENT; } mMainView.requestLayout(); } else if (lp.height != LayoutParams.MATCH_PARENT && !mOverlayContent) { lp.height = LayoutParams.MATCH_PARENT; mMainView.requestLayout(); } } @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { boolean result; if (mCanvasSaveProxy == null || !mCanvasSaveProxy.isFor(canvas)) { mCanvasSaveProxy = mCanvasSaveProxyFactory.create(canvas); } final int save = mCanvasSaveProxy.save(); if (mSlideableView != null && mSlideableView != child) { // if main view // Clip against the slider; no sense drawing what will immediately be covered, // Unless the panel is set to overlay content canvas.getClipBounds(mTmpRect); if (!mOverlayContent) { if (mIsSlidingUp) { mTmpRect.bottom = Math.min(mTmpRect.bottom, mSlideableView.getTop()); } else { mTmpRect.top = Math.max(mTmpRect.top, mSlideableView.getBottom()); } } if (mClipPanel) { canvas.clipRect(mTmpRect); } result = super.drawChild(canvas, child, drawingTime); if (mCoveredFadeColor != 0 && mSlideOffset > 0) { final int baseAlpha = (mCoveredFadeColor & 0xff000000) >>> 24; final int imag = (int) (baseAlpha * mSlideOffset); final int color = imag << 24 | (mCoveredFadeColor & 0xffffff); mCoveredFadePaint.setColor(color); canvas.drawRect(mTmpRect, mCoveredFadePaint); } } else { result = super.drawChild(canvas, child, drawingTime); } canvas.restoreToCount(save); return result; } /** * Smoothly animate mDraggingPane to the target X position within its range. * * @param slideOffset position to animate to * @param velocity initial velocity in case of fling, or 0. */ boolean smoothSlideTo(float slideOffset, int velocity) { if (!isEnabled() || mSlideableView == null) { // Nothing to do. return false; } int panelTop = computePanelTopPosition(slideOffset); if (mDragHelper.smoothSlideViewTo(mSlideableView, mSlideableView.getLeft(), panelTop)) { setAllChildrenVisible(); ViewCompat.postInvalidateOnAnimation(this); return true; } return false; } @Override public void computeScroll() { if (mDragHelper != null && mDragHelper.continueSettling(true)) { if (!isEnabled()) { mDragHelper.abort(); return; } ViewCompat.postInvalidateOnAnimation(this); } } @Override public void draw(Canvas c) { super.draw(c); // draw the shadow if (mShadowDrawable != null && mSlideableView != null) { final int right = mSlideableView.getRight(); final int top; final int bottom; if (mIsSlidingUp) { top = mSlideableView.getTop() - mShadowHeight; bottom = mSlideableView.getTop(); } else { top = mSlideableView.getBottom(); bottom = mSlideableView.getBottom() + mShadowHeight; } final int left = mSlideableView.getLeft(); mShadowDrawable.setBounds(left, top, right, bottom); mShadowDrawable.draw(c); } } /** * Tests scrollability within child views of v given a delta of dx. * * @param v View to test for horizontal scrollability * @param checkV Whether the view v passed should itself be checked for scrollability (true), * or just its children (false). * @param dx Delta scrolled in pixels * @param x X coordinate of the active touch point * @param y Y coordinate of the active touch point * @return true if child views of v can be scrolled by delta of dx. */ protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) { if (v instanceof ViewGroup) { final ViewGroup group = (ViewGroup) v; final int scrollX = v.getScrollX(); final int scrollY = v.getScrollY(); final int count = group.getChildCount(); // Count backwards - let topmost views consume scroll distance first. for (int i = count - 1; i >= 0; i--) { final View child = group.getChildAt(i); if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() && y + scrollY >= child.getTop() && y + scrollY < child.getBottom() && canScroll(child, true, dx, x + scrollX - child.getLeft(), y + scrollY - child.getTop())) { return true; } } } return checkV && v.canScrollHorizontally(-dx); } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return p instanceof MarginLayoutParams ? new LayoutParams((MarginLayoutParams) p) : new LayoutParams(p); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams && super.checkLayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } @Override public Parcelable onSaveInstanceState() { Bundle bundle = new Bundle(); bundle.putParcelable("superState", super.onSaveInstanceState()); bundle.putSerializable(SLIDING_STATE, mSlideState != PanelState.DRAGGING ? mSlideState : mLastNotDraggingSlideState); return bundle; } @Override public void onRestoreInstanceState(Parcelable state) { if (state instanceof Bundle) { Bundle bundle = (Bundle) state; mSlideState = (PanelState) bundle.getSerializable(SLIDING_STATE); mSlideState = mSlideState == null ? DEFAULT_SLIDE_STATE : mSlideState; state = bundle.getParcelable("superState"); } super.onRestoreInstanceState(state); } private class DragHelperCallback extends ViewDragHelper.Callback { @Override public boolean tryCaptureView(View child, int pointerId) { return !mIsUnableToDrag && child == mSlideableView; } @Override public void onViewDragStateChanged(int state) { if (mDragHelper != null && mDragHelper.getViewDragState() == ViewDragHelper.STATE_IDLE) { mSlideOffset = computeSlideOffset(mSlideableView.getTop()); applyParallaxForCurrentSlideOffset(); if (mSlideOffset == 1) { updateObscuredViewVisibility(); setPanelStateInternal(PanelState.EXPANDED); } else if (mSlideOffset == 0) { setPanelStateInternal(PanelState.COLLAPSED); } else if (mSlideOffset < 0) { setPanelStateInternal(PanelState.HIDDEN); mSlideableView.setVisibility(View.INVISIBLE); } else { updateObscuredViewVisibility(); setPanelStateInternal(PanelState.ANCHORED); } } } @Override public void onViewCaptured(View capturedChild, int activePointerId) { setAllChildrenVisible(); } @Override public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) { onPanelDragged(top); invalidate(); } @Override public void onViewReleased(View releasedChild, float xvel, float yvel) { int target; // direction is always positive if we are sliding in the expanded direction float direction = mIsSlidingUp ? -yvel : yvel; if (direction > 0 && mSlideOffset <= mAnchorPoint) { // swipe up -> expand and stop at anchor point target = computePanelTopPosition(mAnchorPoint); } else if (direction > 0 && mSlideOffset > mAnchorPoint) { // swipe up past anchor -> expand target = computePanelTopPosition(mMaxSlideOffset); } else if (direction < 0 && mSlideOffset >= mAnchorPoint) { // swipe down -> collapse and stop at anchor point target = computePanelTopPosition(mAnchorPoint); } else if (direction < 0 && mSlideOffset < mAnchorPoint) { // swipe down past anchor -> collapse target = computePanelTopPosition(0.0f); } else if (mSlideOffset >= (1.f + mAnchorPoint) / 2) { // zero velocity, and far enough from anchor point => expand to the top target = computePanelTopPosition(mMaxSlideOffset); } else if (mSlideOffset >= mAnchorPoint / 2) { // zero velocity, and close enough to anchor point => go to anchor target = computePanelTopPosition(mAnchorPoint); } else { // settle at the bottom target = computePanelTopPosition(0.0f); } if (mDragHelper != null) { mDragHelper.settleCapturedViewAt(releasedChild.getLeft(), target); } invalidate(); } @Override public int getViewVerticalDragRange(View child) { return mSlideRange; } @Override public int clampViewPositionVertical(View child, int top, int dy) { final int collapsedTop = computePanelTopPosition(0.f); final int expandedTop = computePanelTopPosition(mMaxSlideOffset); if (mIsSlidingUp) { return Math.min(Math.max(top, expandedTop), collapsedTop); } else { return Math.min(Math.max(top, collapsedTop), expandedTop); } } } public static class LayoutParams extends ViewGroup.MarginLayoutParams { private static final int[] ATTRS = new int[]{ android.R.attr.layout_weight }; public float weight = 0; public LayoutParams() { super(MATCH_PARENT, MATCH_PARENT); } public LayoutParams(int width, int height) { super(width, height); } public LayoutParams(int width, int height, float weight) { super(width, height); this.weight = weight; } public LayoutParams(android.view.ViewGroup.LayoutParams source) { super(source); } public LayoutParams(MarginLayoutParams source) { super(source); } public LayoutParams(LayoutParams source) { super(source); } public LayoutParams(Context c, AttributeSet attrs) { super(c, attrs); final TypedArray ta = c.obtainStyledAttributes(attrs, ATTRS); try { this.weight = ta.getFloat(0, 0); } finally { ta.recycle(); } } } }
library/src/main/java/com/sothree/slidinguppanel/SlidingUpPanelLayout.java
package com.sothree.slidinguppanel; import android.annotation.SuppressLint; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.PixelFormat; import android.graphics.Rect; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.os.Parcelable; import android.util.AttributeSet; import android.util.Log; import android.view.Gravity; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.accessibility.AccessibilityEvent; import android.view.animation.AnimationUtils; import android.view.animation.Interpolator; import androidx.core.content.ContextCompat; import androidx.core.view.ViewCompat; import com.sothree.slidinguppanel.canvassaveproxy.CanvasSaveProxy; import com.sothree.slidinguppanel.canvassaveproxy.CanvasSaveProxyFactory; import com.sothree.slidinguppanel.library.R; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; public class SlidingUpPanelLayout extends ViewGroup { private static final String TAG = SlidingUpPanelLayout.class.getSimpleName(); /** * Default peeking out panel height */ private static final int DEFAULT_PANEL_HEIGHT = 68; // dp; /** * Default anchor point height */ private static final float DEFAULT_ANCHOR_POINT = 1.0f; // In relative % /** * Default maximum sliding offset */ private static final float DEFAULT_MAX_SLIDING_OFFSET = 1.0f; /** * Default initial state for the component */ private static PanelState DEFAULT_SLIDE_STATE = PanelState.COLLAPSED; /** * Default height of the shadow above the peeking out panel */ private static final int DEFAULT_SHADOW_HEIGHT = 4; // dp; /** * If no fade color is given by default it will fade to 80% gray. */ private static final int DEFAULT_FADE_COLOR = 0x99000000; /** * Default Minimum velocity that will be detected as a fling */ private static final int DEFAULT_MIN_FLING_VELOCITY = 400; // dips per second /** * Default is set to false because that is how it was written */ private static final boolean DEFAULT_OVERLAY_FLAG = false; /** * Default is set to true for clip panel for performance reasons */ private static final boolean DEFAULT_CLIP_PANEL_FLAG = true; /** * Default attributes for layout */ private static final int[] DEFAULT_ATTRS = new int[]{ android.R.attr.gravity }; /** * Tag for the sliding state stored inside the bundle */ public static final String SLIDING_STATE = "sliding_state"; /** * Minimum velocity that will be detected as a fling */ private int mMinFlingVelocity = DEFAULT_MIN_FLING_VELOCITY; /** * The fade color used for the panel covered by the slider. 0 = no fading. */ private int mCoveredFadeColor = DEFAULT_FADE_COLOR; /** * Default parallax length of the main view */ private static final int DEFAULT_PARALLAX_OFFSET = 0; /** * The paint used to dim the main layout when sliding */ private final Paint mCoveredFadePaint = new Paint(); /** * Drawable used to draw the shadow between panes. */ private final Drawable mShadowDrawable; /** * The size of the overhang in pixels. */ private int mPanelHeight = -1; /** * The size of the shadow in pixels. */ private int mShadowHeight = -1; /** * Parallax offset */ private int mParallaxOffset = -1; /** * True if the collapsed panel should be dragged up. */ private boolean mIsSlidingUp; /** * Panel overlays the windows instead of putting it underneath it. */ private boolean mOverlayContent = DEFAULT_OVERLAY_FLAG; /** * The main view is clipped to the main top border */ private boolean mClipPanel = DEFAULT_CLIP_PANEL_FLAG; /** * If provided, the panel can be dragged by only this view. Otherwise, the entire panel can be * used for dragging. */ private View mDragView; /** * If provided, the panel can be dragged by only this view. Otherwise, the entire panel can be * used for dragging. */ private int mDragViewResId = -1; /** * If provided, the panel will transfer the scroll from this view to itself when needed. */ private View mScrollableView; private int mScrollableViewResId; private ScrollableViewHelper mScrollableViewHelper = new ScrollableViewHelper(); /** * The child view that can slide, if any. */ private View mSlideableView; /** * The main view */ private View mMainView; /** * Current state of the slideable view. */ public enum PanelState { EXPANDED, COLLAPSED, ANCHORED, HIDDEN, DRAGGING } private PanelState mSlideState = DEFAULT_SLIDE_STATE; /** * If the current slide state is DRAGGING, this will store the last non dragging state */ private PanelState mLastNotDraggingSlideState = DEFAULT_SLIDE_STATE; /** * How far the panel is offset from its expanded position. * range [0, 1] where 0 = collapsed, 1 = expanded. */ private float mSlideOffset; /** * How far in pixels the slideable panel may move. */ private int mSlideRange; /** * Maximum sliding panel movement in expanded state */ private float mMaxSlideOffset = DEFAULT_MAX_SLIDING_OFFSET; /** * An anchor point where the panel can stop during sliding */ private float mAnchorPoint = 1.f; /** * A panel view is locked into internal scrolling or another condition that * is preventing a drag. */ private boolean mIsUnableToDrag; /** * Flag indicating that sliding feature is enabled\disabled */ private boolean mIsTouchEnabled; private float mPrevMotionX; private float mPrevMotionY; private float mInitialMotionX; private float mInitialMotionY; private boolean mIsScrollableViewHandlingTouch = false; private final List<PanelSlideListener> mPanelSlideListeners = new CopyOnWriteArrayList<>(); private View.OnClickListener mFadeOnClickListener; private final ViewDragHelper mDragHelper; private final CanvasSaveProxyFactory mCanvasSaveProxyFactory; private CanvasSaveProxy mCanvasSaveProxy; /** * Stores whether or not the pane was expanded the last time it was slideable. * If expand/collapse operations are invoked this state is modified. Used by * instance state save/restore. */ private boolean mFirstLayout = true; private final Rect mTmpRect = new Rect(); /** * Listener for monitoring events about sliding panes. */ public interface PanelSlideListener { /** * Called when a sliding pane's position changes. * * @param panel The child view that was moved * @param slideOffset The new offset of this sliding pane within its range, from 0-1 */ void onPanelSlide(View panel, float slideOffset); /** * Called when a sliding panel state changes * * @param panel The child view that was slid to an collapsed position */ void onPanelStateChanged(View panel, PanelState previousState, PanelState newState); } /** * No-op stubs for {@link PanelSlideListener}. If you only want to implement a subset * of the listener methods you can extend this instead of implement the full interface. */ public static class SimplePanelSlideListener implements PanelSlideListener { @Override public void onPanelSlide(View panel, float slideOffset) { } @Override public void onPanelStateChanged(View panel, PanelState previousState, PanelState newState) { } } public SlidingUpPanelLayout(Context context) { this(context, null); } public SlidingUpPanelLayout(Context context, AttributeSet attrs) { this(context, attrs, 0); } public SlidingUpPanelLayout(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); mCanvasSaveProxyFactory = new CanvasSaveProxyFactory(); if (isInEditMode()) { mShadowDrawable = null; mDragHelper = null; return; } Interpolator scrollerInterpolator = null; if (attrs != null) { TypedArray defAttrs = context.obtainStyledAttributes(attrs, DEFAULT_ATTRS); try { int gravity = defAttrs.getInt(0, Gravity.NO_GRAVITY); setGravity(gravity); } finally { defAttrs.recycle(); } TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.SlidingUpPanelLayout); try { mPanelHeight = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_umanoPanelHeight, -1); mShadowHeight = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_umanoShadowHeight, -1); mParallaxOffset = ta.getDimensionPixelSize(R.styleable.SlidingUpPanelLayout_umanoParallaxOffset, -1); mMinFlingVelocity = ta.getInt(R.styleable.SlidingUpPanelLayout_umanoFlingVelocity, DEFAULT_MIN_FLING_VELOCITY); mCoveredFadeColor = ta.getColor(R.styleable.SlidingUpPanelLayout_umanoFadeColor, DEFAULT_FADE_COLOR); mDragViewResId = ta.getResourceId(R.styleable.SlidingUpPanelLayout_umanoDragView, -1); mScrollableViewResId = ta.getResourceId(R.styleable.SlidingUpPanelLayout_umanoScrollableView, -1); mOverlayContent = ta.getBoolean(R.styleable.SlidingUpPanelLayout_umanoOverlay, DEFAULT_OVERLAY_FLAG); mClipPanel = ta.getBoolean(R.styleable.SlidingUpPanelLayout_umanoClipPanel, DEFAULT_CLIP_PANEL_FLAG); mAnchorPoint = ta.getFloat(R.styleable.SlidingUpPanelLayout_umanoAnchorPoint, DEFAULT_ANCHOR_POINT); mSlideState = PanelState.values()[ta.getInt(R.styleable.SlidingUpPanelLayout_umanoInitialState, DEFAULT_SLIDE_STATE.ordinal())]; int interpolatorResId = ta.getResourceId(R.styleable.SlidingUpPanelLayout_umanoScrollInterpolator, -1); if (interpolatorResId != -1) { scrollerInterpolator = AnimationUtils.loadInterpolator(context, interpolatorResId); } } finally { ta.recycle(); } } final float density = context.getResources().getDisplayMetrics().density; if (mPanelHeight == -1) { mPanelHeight = (int) (DEFAULT_PANEL_HEIGHT * density + 0.5f); } if (mShadowHeight == -1) { mShadowHeight = (int) (DEFAULT_SHADOW_HEIGHT * density + 0.5f); } if (mParallaxOffset == -1) { mParallaxOffset = (int) (DEFAULT_PARALLAX_OFFSET * density); } // If the shadow height is zero, don't show the shadow if (mShadowHeight > 0) { if (mIsSlidingUp) { mShadowDrawable = ContextCompat.getDrawable(context, R.drawable.above_shadow); } else { mShadowDrawable = ContextCompat.getDrawable(context, R.drawable.below_shadow); } } else { mShadowDrawable = null; } setWillNotDraw(false); mDragHelper = ViewDragHelper.create(this, 0.5f, scrollerInterpolator, new DragHelperCallback()); mDragHelper.setMinVelocity(mMinFlingVelocity * density); mIsTouchEnabled = true; } /** * Set the Drag View after the view is inflated */ @Override protected void onFinishInflate() { super.onFinishInflate(); if (mDragViewResId != -1) { setDragView(findViewById(mDragViewResId)); } if (mScrollableViewResId != -1) { setScrollableView(findViewById(mScrollableViewResId)); } } public void setGravity(int gravity) { if (gravity != Gravity.TOP && gravity != Gravity.BOTTOM) { throw new IllegalArgumentException("gravity must be set to either top or bottom"); } mIsSlidingUp = gravity == Gravity.BOTTOM; if (!mFirstLayout) { requestLayout(); } } /** * Set the color used to fade the pane covered by the sliding pane out when the pane * will become fully covered in the expanded state. * * @param color An ARGB-packed color value */ public void setCoveredFadeColor(int color) { mCoveredFadeColor = color; requestLayout(); } /** * @return The ARGB-packed color value used to fade the fixed pane */ public int getCoveredFadeColor() { return mCoveredFadeColor; } /** * Set sliding enabled flag * * @param enabled flag value */ public void setTouchEnabled(boolean enabled) { mIsTouchEnabled = enabled; } public boolean isTouchEnabled() { return mIsTouchEnabled && mSlideableView != null && mSlideState != PanelState.HIDDEN; } /** * Set the collapsed panel height in pixels * * @param val A height in pixels */ public void setPanelHeight(int val) { if (getPanelHeight() == val) { return; } mPanelHeight = val; boolean onCollapsedMode = getPanelState() == PanelState.COLLAPSED; if (!mFirstLayout) { if (!onCollapsedMode) { requestLayout(); return; } } if (onCollapsedMode && !smoothToBottom()) { // Only invalidating when animation was not done invalidate(); } } protected boolean smoothToBottom() { return smoothSlideTo(0, 0); } /** * @return The current shadow height */ public int getShadowHeight() { return mShadowHeight; } /** * Set the shadow height * * @param val A height in pixels */ public void setShadowHeight(int val) { mShadowHeight = val; if (!mFirstLayout) { invalidate(); } } /** * @return The current collapsed panel height */ public int getPanelHeight() { return mPanelHeight; } /** * @return The current parallax offset */ public int getCurrentParallaxOffset() { // Clamp slide offset at zero for parallax computation; int offset = (int) (mParallaxOffset * Math.max(mSlideOffset, 0)); return mIsSlidingUp ? -offset : offset; } /** * Set parallax offset for the panel * * @param val A height in pixels */ public void setParallaxOffset(int val) { mParallaxOffset = val; if (!mFirstLayout) { requestLayout(); } } /** * @return The current minimin fling velocity */ public int getMinFlingVelocity() { return mMinFlingVelocity; } /** * Sets the minimum fling velocity for the panel * * @param val the new value */ public void setMinFlingVelocity(int val) { mMinFlingVelocity = val; } public void addPanelSlideListener(PanelSlideListener listener) { synchronized (mPanelSlideListeners) { mPanelSlideListeners.add(listener); } } public void removePanelSlideListener(PanelSlideListener listener) { synchronized (mPanelSlideListeners) { mPanelSlideListeners.remove(listener); } } /** * Provides an on click for the portion of the main view that is dimmed. The listener is not * triggered if the panel is in a collapsed or a hidden position. If the on click listener is * not provided, the clicks on the dimmed area are passed through to the main layout. */ public void setFadeOnClickListener(View.OnClickListener listener) { mFadeOnClickListener = listener; } /** * Set the draggable view portion. Use to null, to allow the whole panel to be draggable * * @param dragView A view that will be used to drag the panel. */ public void setDragView(View dragView) { if (mDragView != null) { mDragView.setOnClickListener(null); } mDragView = dragView; if (mDragView != null) { mDragView.setClickable(true); mDragView.setFocusable(false); mDragView.setFocusableInTouchMode(false); mDragView.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (!isEnabled() || !isTouchEnabled()) return; if (mSlideState != PanelState.EXPANDED && mSlideState != PanelState.ANCHORED) { if (mAnchorPoint < DEFAULT_ANCHOR_POINT) { setPanelState(PanelState.ANCHORED); } else { setPanelState(PanelState.EXPANDED); } } else { setPanelState(PanelState.COLLAPSED); } } }); } } /** * Set the draggable view portion. Use to null, to allow the whole panel to be draggable * * @param dragViewResId The resource ID of the new drag view */ public void setDragView(int dragViewResId) { mDragViewResId = dragViewResId; setDragView(findViewById(dragViewResId)); } /** * Set the scrollable child of the sliding layout. If set, scrolling will be transfered between * the panel and the view when necessary * * @param scrollableView The scrollable view */ public void setScrollableView(View scrollableView) { mScrollableView = scrollableView; } public View getScrollableView() { return mScrollableView; } /** * Sets the current scrollable view helper. See ScrollableViewHelper description for details. */ public void setScrollableViewHelper(ScrollableViewHelper helper) { mScrollableViewHelper = helper; } /** * Set an anchor point where the panel can stop during sliding * * @param anchorPoint A value between 0 and 1, determining the position of the anchor point * starting from the top of the layout. */ public void setAnchorPoint(float anchorPoint) { if (anchorPoint > 0 && anchorPoint <= 1) { mAnchorPoint = anchorPoint; mFirstLayout = true; requestLayout(); } } /** * Set maximum slide offset to move sliding layout in expanded state * The value must be in range of [ 0, 1] * * @param offset max sliding offset */ public void setMaxSlideOffset(float offset) { if (offset <= DEFAULT_MAX_SLIDING_OFFSET) { mMaxSlideOffset = offset; } } /** * Gets the currently set anchor point * * @return the currently set anchor point */ public float getAnchorPoint() { return mAnchorPoint; } /** * Sets whether or not the panel overlays the content */ public void setOverlayed(boolean overlayed) { mOverlayContent = overlayed; } /** * Check if the panel is set as an overlay. */ public boolean isOverlayed() { return mOverlayContent; } /** * Sets whether or not the main content is clipped to the top of the panel */ public void setClipPanel(boolean clip) { mClipPanel = clip; } /** * Check whether or not the main content is clipped to the top of the panel */ public boolean isClipPanel() { return mClipPanel; } void dispatchOnPanelSlide(View panel) { synchronized (mPanelSlideListeners) { for (PanelSlideListener l : mPanelSlideListeners) { l.onPanelSlide(panel, mSlideOffset); } } } void dispatchOnPanelStateChanged(View panel, PanelState previousState, PanelState newState) { synchronized (mPanelSlideListeners) { for (PanelSlideListener l : mPanelSlideListeners) { l.onPanelStateChanged(panel, previousState, newState); } } sendAccessibilityEvent(AccessibilityEvent.TYPE_WINDOW_STATE_CHANGED); } void updateObscuredViewVisibility() { if (getChildCount() == 0) { return; } final int leftBound = getPaddingLeft(); final int rightBound = getWidth() - getPaddingRight(); final int topBound = getPaddingTop(); final int bottomBound = getHeight() - getPaddingBottom(); final int left; final int right; final int top; final int bottom; if (mSlideableView != null && hasOpaqueBackground(mSlideableView)) { left = mSlideableView.getLeft(); right = mSlideableView.getRight(); top = mSlideableView.getTop(); bottom = mSlideableView.getBottom(); } else { left = right = top = bottom = 0; } View child = getChildAt(0); final int clampedChildLeft = Math.max(leftBound, child.getLeft()); final int clampedChildTop = Math.max(topBound, child.getTop()); final int clampedChildRight = Math.min(rightBound, child.getRight()); final int clampedChildBottom = Math.min(bottomBound, child.getBottom()); final int vis; if (clampedChildLeft >= left && clampedChildTop >= top && clampedChildRight <= right && clampedChildBottom <= bottom) { vis = INVISIBLE; } else { vis = VISIBLE; } child.setVisibility(vis); } void setAllChildrenVisible() { for (int i = 0, childCount = getChildCount(); i < childCount; i++) { final View child = getChildAt(i); if (child.getVisibility() == INVISIBLE) { child.setVisibility(VISIBLE); } } } private static boolean hasOpaqueBackground(View v) { final Drawable bg = v.getBackground(); return bg != null && bg.getOpacity() == PixelFormat.OPAQUE; } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); mFirstLayout = true; } @Override protected void onDetachedFromWindow() { super.onDetachedFromWindow(); mFirstLayout = true; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { final int widthMode = MeasureSpec.getMode(widthMeasureSpec); final int widthSize = MeasureSpec.getSize(widthMeasureSpec); final int heightMode = MeasureSpec.getMode(heightMeasureSpec); final int heightSize = MeasureSpec.getSize(heightMeasureSpec); if (widthMode != MeasureSpec.EXACTLY && widthMode != MeasureSpec.AT_MOST) { throw new IllegalStateException("Width must have an exact value or MATCH_PARENT"); } else if (heightMode != MeasureSpec.EXACTLY && heightMode != MeasureSpec.AT_MOST) { throw new IllegalStateException("Height must have an exact value or MATCH_PARENT"); } final int childCount = getChildCount(); if (childCount != 2) { throw new IllegalStateException("Sliding up panel layout must have exactly 2 children!"); } mMainView = getChildAt(0); mSlideableView = getChildAt(1); if (mDragView == null) { setDragView(mSlideableView); } // If the sliding panel is not visible, then put the whole view in the hidden state if (mSlideableView.getVisibility() != VISIBLE) { mSlideState = PanelState.HIDDEN; } int layoutHeight = heightSize - getPaddingTop() - getPaddingBottom(); int layoutWidth = widthSize - getPaddingLeft() - getPaddingRight(); // First pass. Measure based on child LayoutParams width/height. for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); // We always measure the sliding panel in order to know it's height (needed for show panel) if (child.getVisibility() == GONE && i == 0) { continue; } int height = layoutHeight; int width = layoutWidth; if (child == mMainView) { if (!mOverlayContent && mSlideState != PanelState.HIDDEN) { height -= mPanelHeight; } width -= lp.leftMargin + lp.rightMargin; } else if (child == mSlideableView) { // The slideable view should be aware of its top margin. // See https://github.com/umano/AndroidSlidingUpPanel/issues/412. height -= lp.topMargin; } int childWidthSpec; if (lp.width == LayoutParams.WRAP_CONTENT) { childWidthSpec = MeasureSpec.makeMeasureSpec(width, MeasureSpec.AT_MOST); } else if (lp.width == LayoutParams.MATCH_PARENT) { childWidthSpec = MeasureSpec.makeMeasureSpec(width, MeasureSpec.EXACTLY); } else { childWidthSpec = MeasureSpec.makeMeasureSpec(lp.width, MeasureSpec.EXACTLY); } int childHeightSpec; if (lp.height == LayoutParams.WRAP_CONTENT) { childHeightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.AT_MOST); } else { // Modify the height based on the weight. if (lp.weight > 0 && lp.weight < 1) { height = (int) (height * lp.weight); } else if (lp.height != LayoutParams.MATCH_PARENT) { height = lp.height; } childHeightSpec = MeasureSpec.makeMeasureSpec(height, MeasureSpec.EXACTLY); } child.measure(childWidthSpec, childHeightSpec); if (child == mSlideableView) { mSlideRange = mSlideableView.getMeasuredHeight() - mPanelHeight; } } setMeasuredDimension(widthSize, heightSize); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { final int paddingLeft = getPaddingLeft(); final int paddingTop = getPaddingTop(); final int childCount = getChildCount(); if (mFirstLayout) { switch (mSlideState) { case EXPANDED: mSlideOffset = mMaxSlideOffset; break; case ANCHORED: mSlideOffset = mAnchorPoint; break; case HIDDEN: int newTop = computePanelTopPosition(0.0f) + (mIsSlidingUp ? +mPanelHeight : -mPanelHeight); mSlideOffset = computeSlideOffset(newTop); break; default: mSlideOffset = 0.f; break; } } for (int i = 0; i < childCount; i++) { final View child = getChildAt(i); final LayoutParams lp = (LayoutParams) child.getLayoutParams(); // Always layout the sliding view on the first layout if (child.getVisibility() == GONE && (i == 0 || mFirstLayout)) { continue; } final int childHeight = child.getMeasuredHeight(); int childTop = paddingTop; if (child == mSlideableView) { childTop = computePanelTopPosition(mSlideOffset); } if (!mIsSlidingUp) { if (child == mMainView && !mOverlayContent) { childTop = computePanelTopPosition(mSlideOffset) + mSlideableView.getMeasuredHeight(); } } final int childBottom = childTop + childHeight; final int childLeft = paddingLeft + lp.leftMargin; final int childRight = childLeft + child.getMeasuredWidth(); child.layout(childLeft, childTop, childRight, childBottom); } if (mFirstLayout) { updateObscuredViewVisibility(); } applyParallaxForCurrentSlideOffset(); mFirstLayout = false; } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); // Recalculate sliding panes and their details if (h != oldh) { mFirstLayout = true; } } @Override public boolean onInterceptTouchEvent(MotionEvent ev) { // If the scrollable view is handling touch, never intercept if (mIsScrollableViewHandlingTouch || !isTouchEnabled()) { mDragHelper.abort(); return false; } final int action = ev.getAction(); final float x = ev.getX(); final float y = ev.getY(); final float adx = Math.abs(x - mInitialMotionX); final float ady = Math.abs(y - mInitialMotionY); final int dragSlop = mDragHelper.getTouchSlop(); switch (action) { case MotionEvent.ACTION_DOWN: { mIsUnableToDrag = false; mInitialMotionX = x; mInitialMotionY = y; if (!isViewUnder(mDragView, (int) x, (int) y)) { mDragHelper.cancel(); mIsUnableToDrag = true; return false; } break; } case MotionEvent.ACTION_MOVE: { if (ady > dragSlop && adx > ady) { mDragHelper.cancel(); mIsUnableToDrag = true; return false; } break; } case MotionEvent.ACTION_CANCEL: case MotionEvent.ACTION_UP: // If the dragView is still dragging when we get here, we need to call processTouchEvent // so that the view is settled // Added to make scrollable views work (tokudu) if (mDragHelper.isDragging()) { mDragHelper.processTouchEvent(ev); return true; } // Check if this was a click on the faded part of the screen, and fire off the listener if there is one. if (ady <= dragSlop && adx <= dragSlop && mSlideOffset > 0 && !isViewUnder(mSlideableView, (int) mInitialMotionX, (int) mInitialMotionY) && mFadeOnClickListener != null) { playSoundEffect(android.view.SoundEffectConstants.CLICK); mFadeOnClickListener.onClick(this); return true; } break; } return mDragHelper.shouldInterceptTouchEvent(ev); } @Override public boolean onTouchEvent(MotionEvent ev) { if (!isEnabled() || !isTouchEnabled()) { return super.onTouchEvent(ev); } try { mDragHelper.processTouchEvent(ev); return true; } catch (Exception ex) { // Ignore the pointer out of range exception return false; } } @Override public boolean dispatchTouchEvent(MotionEvent ev) { final int action = ev.getAction(); if (!isEnabled() || !isTouchEnabled() || (mIsUnableToDrag && action != MotionEvent.ACTION_DOWN)) { mDragHelper.abort(); return super.dispatchTouchEvent(ev); } final float x = ev.getX(); final float y = ev.getY(); if (action == MotionEvent.ACTION_DOWN) { mIsScrollableViewHandlingTouch = false; mPrevMotionX = x; mPrevMotionY = y; } else if (action == MotionEvent.ACTION_MOVE) { float dx = x - mPrevMotionX; float dy = y - mPrevMotionY; mPrevMotionX = x; mPrevMotionY = y; if (Math.abs(dx) > Math.abs(dy)) { // Scrolling horizontally, so ignore return super.dispatchTouchEvent(ev); } // If the scroll view isn't under the touch, pass the // event along to the dragView. if (!isViewUnder(mScrollableView, (int) mInitialMotionX, (int) mInitialMotionY)) { return super.dispatchTouchEvent(ev); } // Which direction (up or down) is the drag moving? if (dy * (mIsSlidingUp ? 1 : -1) > 0) { // Collapsing // Is the child less than fully scrolled? // Then let the child handle it. if (mScrollableViewHelper.getScrollableViewScrollPosition(mScrollableView, mIsSlidingUp) > 0) { mIsScrollableViewHandlingTouch = true; return super.dispatchTouchEvent(ev); } // Was the child handling the touch previously? // Then we need to rejigger things so that the // drag panel gets a proper down event. if (mIsScrollableViewHandlingTouch) { // Send an 'UP' event to the child. MotionEvent up = MotionEvent.obtain(ev); up.setAction(MotionEvent.ACTION_CANCEL); super.dispatchTouchEvent(up); up.recycle(); // Send a 'DOWN' event to the panel. (We'll cheat // and hijack this one) ev.setAction(MotionEvent.ACTION_DOWN); } mIsScrollableViewHandlingTouch = false; return this.onTouchEvent(ev); } else if (dy * (mIsSlidingUp ? 1 : -1) < 0) { // Expanding // Is the panel less than fully expanded? // Then we'll handle the drag here. if (mSlideOffset < mMaxSlideOffset) { mIsScrollableViewHandlingTouch = false; return this.onTouchEvent(ev); } // Was the panel handling the touch previously? // Then we need to rejigger things so that the // child gets a proper down event. if (!mIsScrollableViewHandlingTouch && mDragHelper.isDragging()) { mDragHelper.cancel(); ev.setAction(MotionEvent.ACTION_DOWN); } mIsScrollableViewHandlingTouch = true; return super.dispatchTouchEvent(ev); } } else if (action == MotionEvent.ACTION_UP) { // If the scrollable view was handling the touch and we receive an up // we want to clear any previous dragging state so we don't intercept a touch stream accidentally if (mIsScrollableViewHandlingTouch) { mDragHelper.setDragState(ViewDragHelper.STATE_IDLE); } } // In all other cases, just let the default behavior take over. return super.dispatchTouchEvent(ev); } private boolean isViewUnder(View view, int x, int y) { if (view == null) return false; int[] viewLocation = new int[2]; view.getLocationOnScreen(viewLocation); int[] parentLocation = new int[2]; this.getLocationOnScreen(parentLocation); int screenX = parentLocation[0] + x; int screenY = parentLocation[1] + y; return screenX >= viewLocation[0] && screenX < viewLocation[0] + view.getWidth() && screenY >= viewLocation[1] && screenY < viewLocation[1] + view.getHeight(); } /* * Computes the top position of the panel based on the slide offset. */ private int computePanelTopPosition(float slideOffset) { int slidingViewHeight = mSlideableView != null ? mSlideableView.getMeasuredHeight() : 0; int slidePixelOffset = (int) (slideOffset * mSlideRange); // Compute the top of the panel if its collapsed return mIsSlidingUp ? getMeasuredHeight() - getPaddingBottom() - mPanelHeight - slidePixelOffset : getPaddingTop() - slidingViewHeight + mPanelHeight + slidePixelOffset; } /* * Computes the slide offset based on the top position of the panel */ private float computeSlideOffset(int topPosition) { // Compute the panel top position if the panel is collapsed (offset 0) final int topBoundCollapsed = computePanelTopPosition(0); // Determine the new slide offset based on the collapsed top position and the new required // top position return (mIsSlidingUp ? (float) (topBoundCollapsed - topPosition) / mSlideRange : (float) (topPosition - topBoundCollapsed) / mSlideRange); } /** * Returns the current state of the panel as an enum. * * @return the current panel state */ public PanelState getPanelState() { return mSlideState; } /** * Change panel state to the given state with * * @param state - new panel state */ public void setPanelState(PanelState state) { // Abort any running animation, to allow state change if (mDragHelper.getViewDragState() == ViewDragHelper.STATE_SETTLING) { Log.d(TAG, "View is settling. Aborting animation."); mDragHelper.abort(); } if (state == null || state == PanelState.DRAGGING) { throw new IllegalArgumentException("Panel state cannot be null or DRAGGING."); } if (!isEnabled() || (!mFirstLayout && mSlideableView == null) || state == mSlideState || mSlideState == PanelState.DRAGGING) return; if (mFirstLayout) { setPanelStateInternal(state); } else { if (mSlideState == PanelState.HIDDEN) { mSlideableView.setVisibility(View.VISIBLE); requestLayout(); } switch (state) { case ANCHORED: smoothSlideTo(mAnchorPoint, 0); break; case COLLAPSED: smoothSlideTo(0, 0); break; case EXPANDED: smoothSlideTo(mMaxSlideOffset, 0); break; case HIDDEN: int newTop = computePanelTopPosition(0.0f) + (mIsSlidingUp ? +mPanelHeight : -mPanelHeight); smoothSlideTo(computeSlideOffset(newTop), 0); break; } } } private void setPanelStateInternal(PanelState state) { if (mSlideState == state) return; PanelState oldState = mSlideState; mSlideState = state; dispatchOnPanelStateChanged(this, oldState, state); } /** * Update the parallax based on the current slide offset. */ @SuppressLint("NewApi") private void applyParallaxForCurrentSlideOffset() { if (mParallaxOffset > 0) { int mainViewOffset = getCurrentParallaxOffset(); mMainView.setTranslationY(mainViewOffset); } } private void onPanelDragged(int newTop) { if (mSlideState != PanelState.DRAGGING) { mLastNotDraggingSlideState = mSlideState; } setPanelStateInternal(PanelState.DRAGGING); // Recompute the slide offset based on the new top position mSlideOffset = computeSlideOffset(newTop); applyParallaxForCurrentSlideOffset(); // Dispatch the slide event dispatchOnPanelSlide(mSlideableView); // If the slide offset is negative, and overlay is not on, we need to increase the // height of the main content LayoutParams lp = (LayoutParams) mMainView.getLayoutParams(); int defaultHeight = getHeight() - getPaddingBottom() - getPaddingTop() - (mSlideOffset < 0 ? 0 : mPanelHeight); if (mSlideOffset <= 0 && !mOverlayContent) { // expand the main view lp.height = mIsSlidingUp ? (newTop - getPaddingBottom()) : (getHeight() - getPaddingBottom() - mSlideableView.getMeasuredHeight() - newTop); if (lp.height == defaultHeight) { lp.height = LayoutParams.MATCH_PARENT; } mMainView.requestLayout(); } else if (lp.height != LayoutParams.MATCH_PARENT && !mOverlayContent) { lp.height = LayoutParams.MATCH_PARENT; mMainView.requestLayout(); } } @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { boolean result; if (mCanvasSaveProxy == null || !mCanvasSaveProxy.isFor(canvas)) { mCanvasSaveProxy = mCanvasSaveProxyFactory.create(canvas); } final int save = mCanvasSaveProxy.save(); if (mSlideableView != null && mSlideableView != child) { // if main view // Clip against the slider; no sense drawing what will immediately be covered, // Unless the panel is set to overlay content canvas.getClipBounds(mTmpRect); if (!mOverlayContent) { if (mIsSlidingUp) { mTmpRect.bottom = Math.min(mTmpRect.bottom, mSlideableView.getTop()); } else { mTmpRect.top = Math.max(mTmpRect.top, mSlideableView.getBottom()); } } if (mClipPanel) { canvas.clipRect(mTmpRect); } result = super.drawChild(canvas, child, drawingTime); if (mCoveredFadeColor != 0 && mSlideOffset > 0) { final int baseAlpha = (mCoveredFadeColor & 0xff000000) >>> 24; final int imag = (int) (baseAlpha * mSlideOffset); final int color = imag << 24 | (mCoveredFadeColor & 0xffffff); mCoveredFadePaint.setColor(color); canvas.drawRect(mTmpRect, mCoveredFadePaint); } } else { result = super.drawChild(canvas, child, drawingTime); } canvas.restoreToCount(save); return result; } /** * Smoothly animate mDraggingPane to the target X position within its range. * * @param slideOffset position to animate to * @param velocity initial velocity in case of fling, or 0. */ boolean smoothSlideTo(float slideOffset, int velocity) { if (!isEnabled() || mSlideableView == null) { // Nothing to do. return false; } int panelTop = computePanelTopPosition(slideOffset); if (mDragHelper.smoothSlideViewTo(mSlideableView, mSlideableView.getLeft(), panelTop)) { setAllChildrenVisible(); ViewCompat.postInvalidateOnAnimation(this); return true; } return false; } @Override public void computeScroll() { if (mDragHelper != null && mDragHelper.continueSettling(true)) { if (!isEnabled()) { mDragHelper.abort(); return; } ViewCompat.postInvalidateOnAnimation(this); } } @Override public void draw(Canvas c) { super.draw(c); // draw the shadow if (mShadowDrawable != null && mSlideableView != null) { final int right = mSlideableView.getRight(); final int top; final int bottom; if (mIsSlidingUp) { top = mSlideableView.getTop() - mShadowHeight; bottom = mSlideableView.getTop(); } else { top = mSlideableView.getBottom(); bottom = mSlideableView.getBottom() + mShadowHeight; } final int left = mSlideableView.getLeft(); mShadowDrawable.setBounds(left, top, right, bottom); mShadowDrawable.draw(c); } } /** * Tests scrollability within child views of v given a delta of dx. * * @param v View to test for horizontal scrollability * @param checkV Whether the view v passed should itself be checked for scrollability (true), * or just its children (false). * @param dx Delta scrolled in pixels * @param x X coordinate of the active touch point * @param y Y coordinate of the active touch point * @return true if child views of v can be scrolled by delta of dx. */ protected boolean canScroll(View v, boolean checkV, int dx, int x, int y) { if (v instanceof ViewGroup) { final ViewGroup group = (ViewGroup) v; final int scrollX = v.getScrollX(); final int scrollY = v.getScrollY(); final int count = group.getChildCount(); // Count backwards - let topmost views consume scroll distance first. for (int i = count - 1; i >= 0; i--) { final View child = group.getChildAt(i); if (x + scrollX >= child.getLeft() && x + scrollX < child.getRight() && y + scrollY >= child.getTop() && y + scrollY < child.getBottom() && canScroll(child, true, dx, x + scrollX - child.getLeft(), y + scrollY - child.getTop())) { return true; } } } return checkV && v.canScrollHorizontally(-dx); } @Override protected ViewGroup.LayoutParams generateDefaultLayoutParams() { return new LayoutParams(); } @Override protected ViewGroup.LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return p instanceof MarginLayoutParams ? new LayoutParams((MarginLayoutParams) p) : new LayoutParams(p); } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams && super.checkLayoutParams(p); } @Override public ViewGroup.LayoutParams generateLayoutParams(AttributeSet attrs) { return new LayoutParams(getContext(), attrs); } @Override public Parcelable onSaveInstanceState() { Bundle bundle = new Bundle(); bundle.putParcelable("superState", super.onSaveInstanceState()); bundle.putSerializable(SLIDING_STATE, mSlideState != PanelState.DRAGGING ? mSlideState : mLastNotDraggingSlideState); return bundle; } @Override public void onRestoreInstanceState(Parcelable state) { if (state instanceof Bundle) { Bundle bundle = (Bundle) state; mSlideState = (PanelState) bundle.getSerializable(SLIDING_STATE); mSlideState = mSlideState == null ? DEFAULT_SLIDE_STATE : mSlideState; state = bundle.getParcelable("superState"); } super.onRestoreInstanceState(state); } private class DragHelperCallback extends ViewDragHelper.Callback { @Override public boolean tryCaptureView(View child, int pointerId) { return !mIsUnableToDrag && child == mSlideableView; } @Override public void onViewDragStateChanged(int state) { if (mDragHelper != null && mDragHelper.getViewDragState() == ViewDragHelper.STATE_IDLE) { mSlideOffset = computeSlideOffset(mSlideableView.getTop()); applyParallaxForCurrentSlideOffset(); if (mSlideOffset == 1) { updateObscuredViewVisibility(); setPanelStateInternal(PanelState.EXPANDED); } else if (mSlideOffset == 0) { setPanelStateInternal(PanelState.COLLAPSED); } else if (mSlideOffset < 0) { setPanelStateInternal(PanelState.HIDDEN); mSlideableView.setVisibility(View.INVISIBLE); } else { updateObscuredViewVisibility(); setPanelStateInternal(PanelState.ANCHORED); } } } @Override public void onViewCaptured(View capturedChild, int activePointerId) { setAllChildrenVisible(); } @Override public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) { onPanelDragged(top); invalidate(); } @Override public void onViewReleased(View releasedChild, float xvel, float yvel) { int target; // direction is always positive if we are sliding in the expanded direction float direction = mIsSlidingUp ? -yvel : yvel; if (direction > 0 && mSlideOffset <= mAnchorPoint) { // swipe up -> expand and stop at anchor point target = computePanelTopPosition(mAnchorPoint); } else if (direction > 0 && mSlideOffset > mAnchorPoint) { // swipe up past anchor -> expand target = computePanelTopPosition(mMaxSlideOffset); } else if (direction < 0 && mSlideOffset >= mAnchorPoint) { // swipe down -> collapse and stop at anchor point target = computePanelTopPosition(mAnchorPoint); } else if (direction < 0 && mSlideOffset < mAnchorPoint) { // swipe down past anchor -> collapse target = computePanelTopPosition(0.0f); } else if (mSlideOffset >= (1.f + mAnchorPoint) / 2) { // zero velocity, and far enough from anchor point => expand to the top target = computePanelTopPosition(mMaxSlideOffset); } else if (mSlideOffset >= mAnchorPoint / 2) { // zero velocity, and close enough to anchor point => go to anchor target = computePanelTopPosition(mAnchorPoint); } else { // settle at the bottom target = computePanelTopPosition(0.0f); } if (mDragHelper != null) { mDragHelper.settleCapturedViewAt(releasedChild.getLeft(), target); } invalidate(); } @Override public int getViewVerticalDragRange(View child) { return mSlideRange; } @Override public int clampViewPositionVertical(View child, int top, int dy) { final int collapsedTop = computePanelTopPosition(0.f); final int expandedTop = computePanelTopPosition(mMaxSlideOffset); if (mIsSlidingUp) { return Math.min(Math.max(top, expandedTop), collapsedTop); } else { return Math.min(Math.max(top, collapsedTop), expandedTop); } } } public static class LayoutParams extends ViewGroup.MarginLayoutParams { private static final int[] ATTRS = new int[]{ android.R.attr.layout_weight }; public float weight = 0; public LayoutParams() { super(MATCH_PARENT, MATCH_PARENT); } public LayoutParams(int width, int height) { super(width, height); } public LayoutParams(int width, int height, float weight) { super(width, height); this.weight = weight; } public LayoutParams(android.view.ViewGroup.LayoutParams source) { super(source); } public LayoutParams(MarginLayoutParams source) { super(source); } public LayoutParams(LayoutParams source) { super(source); } public LayoutParams(Context c, AttributeSet attrs) { super(c, attrs); final TypedArray ta = c.obtainStyledAttributes(attrs, ATTRS); try { this.weight = ta.getFloat(0, 0); } finally { ta.recycle(); } } } }
rename
library/src/main/java/com/sothree/slidinguppanel/SlidingUpPanelLayout.java
rename
<ide><path>ibrary/src/main/java/com/sothree/slidinguppanel/SlidingUpPanelLayout.java <ide> } <ide> <ide> private static boolean hasOpaqueBackground(View v) { <del> final Drawable bg = v.getBackground(); <del> return bg != null && bg.getOpacity() == PixelFormat.OPAQUE; <add> final Drawable background = v.getBackground(); <add> return background != null && background.getOpacity() == PixelFormat.OPAQUE; <ide> } <ide> <ide> @Override
Java
apache-2.0
15beae366ff0a689969c26686a6aacb8dc289fbe
0
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
package io.quarkus.bootstrap.runner; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.zip.ZipEntry; /** * Data that reads serialized Class Path info * * This format is subject to change, and gives no compatibility guarantees, it is only intended to be used * with the same version of Quarkus that created it. */ public class SerializedApplication { public static final String META_INF_VERSIONS = "META-INF/versions/"; // the files immediately (i.e. not recursively) under these paths should all be indexed private static final Set<String> FULLY_INDEXED_PATHS = new LinkedHashSet<>(Arrays.asList("", "META-INF/services")); private static final int MAGIC = 0XF0315432; private static final int VERSION = 2; private final RunnerClassLoader runnerClassLoader; private final String mainClass; public SerializedApplication(RunnerClassLoader runnerClassLoader, String mainClass) { this.runnerClassLoader = runnerClassLoader; this.mainClass = mainClass; } public RunnerClassLoader getRunnerClassLoader() { return runnerClassLoader; } public String getMainClass() { return mainClass; } public static void write(OutputStream outputStream, String mainClass, Path applicationRoot, List<Path> classPath, List<Path> parentFirst, List<String> nonExistentResources) throws IOException { try (DataOutputStream data = new DataOutputStream(outputStream)) { data.writeInt(MAGIC); data.writeInt(VERSION); data.writeUTF(mainClass); data.writeShort(classPath.size()); Map<String, List<Integer>> directlyIndexedResourcesToCPJarIndex = new HashMap<>(); for (int i = 0; i < classPath.size(); i++) { Path jar = classPath.get(i); String relativePath = applicationRoot.relativize(jar).toString().replace('\\', '/'); data.writeUTF(relativePath); Collection<String> resources = writeJar(data, jar); for (String resource : resources) { directlyIndexedResourcesToCPJarIndex.computeIfAbsent(resource, s -> new ArrayList<>()).add(i); } } Set<String> parentFirstPackages = new HashSet<>(); for (Path jar : parentFirst) { collectPackages(jar, parentFirstPackages); } data.writeShort(parentFirstPackages.size()); for (String p : parentFirstPackages) { data.writeUTF(p.replace('/', '.').replace('\\', '.')); } data.writeShort(nonExistentResources.size()); for (String nonExistentResource : nonExistentResources) { data.writeUTF(nonExistentResource); } data.writeShort(directlyIndexedResourcesToCPJarIndex.size()); for (Map.Entry<String, List<Integer>> entry : directlyIndexedResourcesToCPJarIndex.entrySet()) { data.writeUTF(entry.getKey()); data.writeShort(entry.getValue().size()); for (Integer index : entry.getValue()) { data.writeShort(index); } } data.flush(); } } public static SerializedApplication read(InputStream inputStream, Path appRoot) throws IOException { try (DataInputStream in = new DataInputStream(inputStream)) { if (in.readInt() != MAGIC) { throw new RuntimeException("Wrong magic number"); } if (in.readInt() != VERSION) { throw new RuntimeException("Wrong class path version"); } String mainClass = in.readUTF(); Map<String, ClassLoadingResource[]> resourceDirectoryMap = new HashMap<>(); Set<String> parentFirstPackages = new HashSet<>(); int numPaths = in.readUnsignedShort(); ClassLoadingResource[] allClassLoadingResources = new ClassLoadingResource[numPaths]; for (int pathCount = 0; pathCount < numPaths; pathCount++) { String path = in.readUTF(); boolean hasManifest = in.readBoolean(); ManifestInfo info = null; if (hasManifest) { info = new ManifestInfo(readNullableString(in), readNullableString(in), readNullableString(in), readNullableString(in), readNullableString(in), readNullableString(in)); } JarResource resource = new JarResource(info, appRoot.resolve(path)); allClassLoadingResources[pathCount] = resource; int numDirs = in.readUnsignedShort(); for (int i = 0; i < numDirs; ++i) { String dir = in.readUTF(); int j = dir.indexOf('/'); while (j >= 0) { addResourceDir(dir.substring(0, j), resource, resourceDirectoryMap); j = dir.indexOf('/', j + 1); } addResourceDir(dir, resource, resourceDirectoryMap); } } int packages = in.readUnsignedShort(); for (int i = 0; i < packages; ++i) { parentFirstPackages.add(in.readUTF()); } Set<String> nonExistentResources = new HashSet<>(); int nonExistentResourcesSize = in.readUnsignedShort(); for (int i = 0; i < nonExistentResourcesSize; i++) { nonExistentResources.add(in.readUTF()); } // this map is populated correctly because the JarResource entries are added to allClassLoadingResources // in the same order as the classpath was written during the writing of the index Map<String, ClassLoadingResource[]> directlyIndexedResourcesIndexMap = new HashMap<>(); int directlyIndexedSize = in.readUnsignedShort(); for (int i = 0; i < directlyIndexedSize; i++) { String resource = in.readUTF(); int indexesSize = in.readUnsignedShort(); ClassLoadingResource[] matchingResources = new ClassLoadingResource[indexesSize]; for (int j = 0; j < indexesSize; j++) { matchingResources[j] = allClassLoadingResources[in.readUnsignedShort()]; } directlyIndexedResourcesIndexMap.put(resource, matchingResources); } RunnerClassLoader runnerClassLoader = new RunnerClassLoader(ClassLoader.getSystemClassLoader(), resourceDirectoryMap, parentFirstPackages, nonExistentResources, FULLY_INDEXED_PATHS, directlyIndexedResourcesIndexMap); for (ClassLoadingResource classLoadingResource : allClassLoadingResources) { classLoadingResource.init(runnerClassLoader); } return new SerializedApplication(runnerClassLoader, mainClass); } } private static void addResourceDir(String dir, JarResource resource, Map<String, ClassLoadingResource[]> resourceDirectoryMap) { ClassLoadingResource[] existing = resourceDirectoryMap.get(dir); if (existing == null) { resourceDirectoryMap.put(dir, new ClassLoadingResource[] { resource }); } else { ClassLoadingResource[] newResources = new ClassLoadingResource[existing.length + 1]; System.arraycopy(existing, 0, newResources, 0, existing.length); newResources[existing.length] = resource; resourceDirectoryMap.put(dir, newResources); } } private static String readNullableString(DataInputStream in) throws IOException { if (in.readBoolean()) { return in.readUTF(); } return null; } /** * @return a List of all resources that exist in the paths that we desire to have fully indexed * (configured via {@code FULLY_INDEXED_PATHS}) */ private static List<String> writeJar(DataOutputStream out, Path jar) throws IOException { try (JarFile zip = new JarFile(jar.toFile())) { Manifest manifest = zip.getManifest(); if (manifest == null) { out.writeBoolean(false); } else { //write the manifest Attributes ma = manifest.getMainAttributes(); if (ma == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeNullableString(out, ma.getValue(Attributes.Name.SPECIFICATION_TITLE)); writeNullableString(out, ma.getValue(Attributes.Name.SPECIFICATION_VERSION)); writeNullableString(out, ma.getValue(Attributes.Name.SPECIFICATION_VENDOR)); writeNullableString(out, ma.getValue(Attributes.Name.IMPLEMENTATION_TITLE)); writeNullableString(out, ma.getValue(Attributes.Name.IMPLEMENTATION_VERSION)); writeNullableString(out, ma.getValue(Attributes.Name.IMPLEMENTATION_VENDOR)); } } Set<String> dirs = new HashSet<>(); Map<String, List<String>> fullyIndexedPaths = new HashMap<>(); Enumeration<? extends ZipEntry> entries = zip.entries(); boolean hasDefaultPackage = false; while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (!entry.getName().contains("/")) { hasDefaultPackage = true; if (!entry.getName().isEmpty() && FULLY_INDEXED_PATHS.contains("")) { fullyIndexedPaths.computeIfAbsent("", s -> new ArrayList<>(10)).add(entry.getName()); } } else if (!entry.isDirectory()) { //some jars don't have correct directory entries //so we look at the file paths instead //looking at you h2 final int index = entry.getName().lastIndexOf('/'); dirs.add(entry.getName().substring(0, index)); if (entry.getName().startsWith(META_INF_VERSIONS)) { //multi release jar //we add all packages here //they may no be relevant for some versions, but that is fine String part = entry.getName().substring(META_INF_VERSIONS.length()); int slash = part.indexOf("/"); if (slash != -1) { final int subIndex = part.lastIndexOf('/'); if (subIndex != slash) { dirs.add(part.substring(slash + 1, subIndex)); } } } for (String path : FULLY_INDEXED_PATHS) { if (path.isEmpty()) { continue; } if (entry.getName().startsWith(path)) { fullyIndexedPaths.computeIfAbsent(path, s -> new ArrayList<>(10)).add(entry.getName()); } } } } if (hasDefaultPackage) { dirs.add(""); } out.writeShort(dirs.size()); for (String i : dirs) { out.writeUTF(i); } List<String> result = new ArrayList<>(); for (List<String> values : fullyIndexedPaths.values()) { result.addAll(values); } return result; } } private static void collectPackages(Path jar, Set<String> dirs) throws IOException { if (Files.isDirectory(jar)) { //this can only really happen when testing quarkus itself //but is included for completeness Files.walkFileTree(jar, new FileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { dirs.add(jar.relativize(dir).toString()); return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { return FileVisitResult.CONTINUE; } }); } else { try (JarFile zip = new JarFile(jar.toFile())) { Enumeration<? extends ZipEntry> entries = zip.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (!entry.isDirectory()) { //some jars don't have correct directory entries //so we look at the file paths instead //looking at you h2 final int index = entry.getName().lastIndexOf('/'); if (index > 0) { dirs.add(entry.getName().substring(0, index)); } } } } } } private static void writeNullableString(DataOutputStream out, String string) throws IOException { if (string == null) { out.writeBoolean(false); } else { out.writeBoolean(true); out.writeUTF(string); } } }
independent-projects/bootstrap/runner/src/main/java/io/quarkus/bootstrap/runner/SerializedApplication.java
package io.quarkus.bootstrap.runner; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.jar.Attributes; import java.util.jar.JarFile; import java.util.jar.Manifest; import java.util.zip.ZipEntry; /** * Data that reads serialized Class Path info * * This format is subject to change, and gives no compatibility guarantees, it is only intended to be used * with the same version of Quarkus that created it. */ public class SerializedApplication { public static final String META_INF_VERSIONS = "META-INF/versions/"; // the files immediately (i.e. not recursively) under these paths should all be indexed private static final Set<String> FULLY_INDEXED_PATHS = new LinkedHashSet<>(Arrays.asList("", "META-INF/services")); private static final int MAGIC = 0XF0315432; private static final int VERSION = 2; private final RunnerClassLoader runnerClassLoader; private final String mainClass; public SerializedApplication(RunnerClassLoader runnerClassLoader, String mainClass) { this.runnerClassLoader = runnerClassLoader; this.mainClass = mainClass; } public RunnerClassLoader getRunnerClassLoader() { return runnerClassLoader; } public String getMainClass() { return mainClass; } public static void write(OutputStream outputStream, String mainClass, Path applicationRoot, List<Path> classPath, List<Path> parentFirst, List<String> nonExistentResources) throws IOException { try (DataOutputStream data = new DataOutputStream(outputStream)) { data.writeInt(MAGIC); data.writeInt(VERSION); data.writeUTF(mainClass); data.writeShort(classPath.size()); Map<String, List<Integer>> directlyIndexedResourcesToCPJarIndex = new HashMap<>(); for (int i = 0; i < classPath.size(); i++) { Path jar = classPath.get(i); String relativePath = applicationRoot.relativize(jar).toString().replace('\\', '/'); data.writeUTF(relativePath); Collection<String> resources = writeJar(data, jar); for (String resource : resources) { directlyIndexedResourcesToCPJarIndex.computeIfAbsent(resource, s -> new ArrayList<>()).add(i); } } Set<String> parentFirstPackages = new HashSet<>(); for (Path jar : parentFirst) { collectPackages(jar, parentFirstPackages); } data.writeShort(parentFirstPackages.size()); for (String p : parentFirstPackages) { data.writeUTF(p.replace('/', '.').replace('\\', '.')); } data.writeShort(nonExistentResources.size()); for (String nonExistentResource : nonExistentResources) { data.writeUTF(nonExistentResource); } data.writeShort(directlyIndexedResourcesToCPJarIndex.size()); for (Map.Entry<String, List<Integer>> entry : directlyIndexedResourcesToCPJarIndex.entrySet()) { data.writeUTF(entry.getKey()); data.writeShort(entry.getValue().size()); for (Integer index : entry.getValue()) { data.writeShort(index); } } data.flush(); } } public static SerializedApplication read(InputStream inputStream, Path appRoot) throws IOException { try (DataInputStream in = new DataInputStream(inputStream)) { if (in.readInt() != MAGIC) { throw new RuntimeException("Wrong magic number"); } if (in.readInt() != VERSION) { throw new RuntimeException("Wrong class path version"); } String mainClass = in.readUTF(); Map<String, ClassLoadingResource[]> resourceDirectoryMap = new HashMap<>(); Set<String> parentFirstPackages = new HashSet<>(); int numPaths = in.readUnsignedShort(); ClassLoadingResource[] allClassLoadingResources = new ClassLoadingResource[numPaths]; for (int pathCount = 0; pathCount < numPaths; pathCount++) { String path = in.readUTF(); boolean hasManifest = in.readBoolean(); ManifestInfo info = null; if (hasManifest) { info = new ManifestInfo(readNullableString(in), readNullableString(in), readNullableString(in), readNullableString(in), readNullableString(in), readNullableString(in)); } JarResource resource = new JarResource(info, appRoot.resolve(path)); allClassLoadingResources[pathCount] = resource; int numDirs = in.readUnsignedShort(); for (int i = 0; i < numDirs; ++i) { String dir = in.readUTF(); ClassLoadingResource[] existing = resourceDirectoryMap.get(dir); if (existing == null) { resourceDirectoryMap.put(dir, new ClassLoadingResource[] { resource }); } else { ClassLoadingResource[] newResources = new ClassLoadingResource[existing.length + 1]; System.arraycopy(existing, 0, newResources, 0, existing.length); newResources[existing.length] = resource; resourceDirectoryMap.put(dir, newResources); } } } int packages = in.readUnsignedShort(); for (int i = 0; i < packages; ++i) { parentFirstPackages.add(in.readUTF()); } Set<String> nonExistentResources = new HashSet<>(); int nonExistentResourcesSize = in.readUnsignedShort(); for (int i = 0; i < nonExistentResourcesSize; i++) { nonExistentResources.add(in.readUTF()); } // this map is populated correctly because the JarResource entries are added to allClassLoadingResources // in the same order as the classpath was written during the writing of the index Map<String, ClassLoadingResource[]> directlyIndexedResourcesIndexMap = new HashMap<>(); int directlyIndexedSize = in.readUnsignedShort(); for (int i = 0; i < directlyIndexedSize; i++) { String resource = in.readUTF(); int indexesSize = in.readUnsignedShort(); ClassLoadingResource[] matchingResources = new ClassLoadingResource[indexesSize]; for (int j = 0; j < indexesSize; j++) { matchingResources[j] = allClassLoadingResources[in.readUnsignedShort()]; } directlyIndexedResourcesIndexMap.put(resource, matchingResources); } RunnerClassLoader runnerClassLoader = new RunnerClassLoader(ClassLoader.getSystemClassLoader(), resourceDirectoryMap, parentFirstPackages, nonExistentResources, FULLY_INDEXED_PATHS, directlyIndexedResourcesIndexMap); for (ClassLoadingResource classLoadingResource : allClassLoadingResources) { classLoadingResource.init(runnerClassLoader); } return new SerializedApplication(runnerClassLoader, mainClass); } } private static String readNullableString(DataInputStream in) throws IOException { if (in.readBoolean()) { return in.readUTF(); } return null; } /** * @return a List of all resources that exist in the paths that we desire to have fully indexed * (configured via {@code FULLY_INDEXED_PATHS}) */ private static List<String> writeJar(DataOutputStream out, Path jar) throws IOException { try (JarFile zip = new JarFile(jar.toFile())) { Manifest manifest = zip.getManifest(); if (manifest == null) { out.writeBoolean(false); } else { //write the manifest Attributes ma = manifest.getMainAttributes(); if (ma == null) { out.writeBoolean(false); } else { out.writeBoolean(true); writeNullableString(out, ma.getValue(Attributes.Name.SPECIFICATION_TITLE)); writeNullableString(out, ma.getValue(Attributes.Name.SPECIFICATION_VERSION)); writeNullableString(out, ma.getValue(Attributes.Name.SPECIFICATION_VENDOR)); writeNullableString(out, ma.getValue(Attributes.Name.IMPLEMENTATION_TITLE)); writeNullableString(out, ma.getValue(Attributes.Name.IMPLEMENTATION_VERSION)); writeNullableString(out, ma.getValue(Attributes.Name.IMPLEMENTATION_VENDOR)); } } Set<String> dirs = new HashSet<>(); Map<String, List<String>> fullyIndexedPaths = new HashMap<>(); Enumeration<? extends ZipEntry> entries = zip.entries(); boolean hasDefaultPackage = false; while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (!entry.getName().contains("/")) { hasDefaultPackage = true; if (!entry.getName().isEmpty() && FULLY_INDEXED_PATHS.contains("")) { fullyIndexedPaths.computeIfAbsent("", s -> new ArrayList<>(10)).add(entry.getName()); } } else if (!entry.isDirectory()) { //some jars don't have correct directory entries //so we look at the file paths instead //looking at you h2 final int index = entry.getName().lastIndexOf('/'); dirs.add(entry.getName().substring(0, index)); if (entry.getName().startsWith(META_INF_VERSIONS)) { //multi release jar //we add all packages here //they may no be relevant for some versions, but that is fine String part = entry.getName().substring(META_INF_VERSIONS.length()); int slash = part.indexOf("/"); if (slash != -1) { final int subIndex = part.lastIndexOf('/'); if (subIndex != slash) { dirs.add(part.substring(slash + 1, subIndex)); } } } for (String path : FULLY_INDEXED_PATHS) { if (path.isEmpty()) { continue; } if (entry.getName().startsWith(path)) { fullyIndexedPaths.computeIfAbsent(path, s -> new ArrayList<>(10)).add(entry.getName()); } } } } if (hasDefaultPackage) { dirs.add(""); } out.writeShort(dirs.size()); for (String i : dirs) { out.writeUTF(i); } List<String> result = new ArrayList<>(); for (List<String> values : fullyIndexedPaths.values()) { result.addAll(values); } return result; } } private static void collectPackages(Path jar, Set<String> dirs) throws IOException { if (Files.isDirectory(jar)) { //this can only really happen when testing quarkus itself //but is included for completeness Files.walkFileTree(jar, new FileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { dirs.add(jar.relativize(dir).toString()); return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { return FileVisitResult.CONTINUE; } }); } else { try (JarFile zip = new JarFile(jar.toFile())) { Enumeration<? extends ZipEntry> entries = zip.entries(); while (entries.hasMoreElements()) { ZipEntry entry = entries.nextElement(); if (!entry.isDirectory()) { //some jars don't have correct directory entries //so we look at the file paths instead //looking at you h2 final int index = entry.getName().lastIndexOf('/'); if (index > 0) { dirs.add(entry.getName().substring(0, index)); } } } } } } private static void writeNullableString(DataOutputStream out, String string) throws IOException { if (string == null) { out.writeBoolean(false); } else { out.writeBoolean(true); out.writeUTF(string); } } }
Register parent dirs when deserializing the app
independent-projects/bootstrap/runner/src/main/java/io/quarkus/bootstrap/runner/SerializedApplication.java
Register parent dirs when deserializing the app
<ide><path>ndependent-projects/bootstrap/runner/src/main/java/io/quarkus/bootstrap/runner/SerializedApplication.java <ide> int numDirs = in.readUnsignedShort(); <ide> for (int i = 0; i < numDirs; ++i) { <ide> String dir = in.readUTF(); <del> ClassLoadingResource[] existing = resourceDirectoryMap.get(dir); <del> if (existing == null) { <del> resourceDirectoryMap.put(dir, new ClassLoadingResource[] { resource }); <del> } else { <del> ClassLoadingResource[] newResources = new ClassLoadingResource[existing.length + 1]; <del> System.arraycopy(existing, 0, newResources, 0, existing.length); <del> newResources[existing.length] = resource; <del> resourceDirectoryMap.put(dir, newResources); <del> } <add> int j = dir.indexOf('/'); <add> while (j >= 0) { <add> addResourceDir(dir.substring(0, j), resource, resourceDirectoryMap); <add> j = dir.indexOf('/', j + 1); <add> } <add> addResourceDir(dir, resource, resourceDirectoryMap); <ide> } <ide> } <ide> int packages = in.readUnsignedShort(); <ide> classLoadingResource.init(runnerClassLoader); <ide> } <ide> return new SerializedApplication(runnerClassLoader, mainClass); <add> } <add> } <add> <add> private static void addResourceDir(String dir, JarResource resource, <add> Map<String, ClassLoadingResource[]> resourceDirectoryMap) { <add> ClassLoadingResource[] existing = resourceDirectoryMap.get(dir); <add> if (existing == null) { <add> resourceDirectoryMap.put(dir, new ClassLoadingResource[] { resource }); <add> } else { <add> ClassLoadingResource[] newResources = new ClassLoadingResource[existing.length + 1]; <add> System.arraycopy(existing, 0, newResources, 0, existing.length); <add> newResources[existing.length] = resource; <add> resourceDirectoryMap.put(dir, newResources); <ide> } <ide> } <ide>
JavaScript
agpl-3.0
4a9baba7082c593445c3bcad69fa6f2235da26af
0
hawkrives/gobbldygook,hawkrives/gobbldygook,hawkrives/gobbldygook
var _ = require('lodash') module.exports = { name: 'Hawken MacKay Rives', enrolled: 2012, graduation: 2016, studies: [ { id: 'ba', type: 'degree', abbr: 'B.A.', title: 'Bachelor of Arts', }, { id: 'bm', type: 'degree', abbr: 'B.M.', title: 'Bachelor of Music', }, { id: 'csci', type: 'major', abbr: 'CSCI', title: 'Computer Science', }, { id: 'asian', type: 'major', abbr: 'ASIAN', title: 'Asian Studies', }, { id: 'japan', type: 'concentration', abbr: 'JAPAN', title: 'Japan Studies', }, { id: 'math', type: 'concentration', abbr: 'MATH', title: 'Mathematics', }, ], schedules: [ { id: 1, title: "Schedule 1", sequence: 1, active: true, year: 2012, semester: 1, clbids: [123456, 123459, 123457, 123458] }, { id: 2, title: "Schedule 2", sequence: 2, year: 2012, semester: 1, clbids: [123456, 123459, 123460, 123458] }, ], overrides: [ { what: 'credits', with: 1 } ], // derived degrees: function() { return _.filter(this.studies, {kind: 'degree'}); }, majors: function() { return _.filter(this.studies, {kind: 'major'}); }, concentrations: function() { return _.filter(this.studies, {kind: 'concentration'}); }, clbids: function() { var activeSchedules = _.filter(this.schedules, 'active') var clbids = _.pluck(activeSchedules, 'clbids') return _.flatten(clbids) } }
mockups/demo_student.js
var _ = require('lodash') module.exports = { name: 'Hawken MacKay Rives', enrolled: 2012, graduation: 2016, studies: [ { id: 'ba', type: 'degree', abbr: 'B.A.', title: 'Bachelor of Arts', }, { id: 'bm', type: 'degree', abbr: 'B.M.', title: 'Bachelor of Music', }, { id: 'csci', type: 'major', abbr: 'CSCI', title: 'Computer Science', }, { id: 'asian', type: 'major', abbr: 'ASIAN', title: 'Asian Studies', }, { id: 'japan', type: 'concentration', abbr: 'JAPAN', title: 'Japan Studies', }, { id: 'math', type: 'concentration', abbr: 'MATH', title: 'Mathematics', }, ], schedules: [ { id: 1, title: "Schedule 1", sequence: 1, active: true, year: 2012, semester: 1, clbids: [123456, 123459, 123457, 123458] }, { id: 2, title: "Schedule 2", sequence: 2, year: 2012, semester: 1, clbids: [123456, 123459, 123460, 123458] }, ], overrides: [ { } ], // derived degrees: function() { return _.filter(this.studies, {kind: 'degree'}); }, majors: function() { return _.filter(this.studies, {kind: 'major'}); }, concentrations: function() { return _.filter(this.studies, {kind: 'concentration'}); }, clbids: function() { var activeSchedules = _.filter(this.schedules, 'active') var clbids = _.pluck(activeSchedules, 'clbids') return _.flatten(clbids) } }
Stub out an override
mockups/demo_student.js
Stub out an override
<ide><path>ockups/demo_student.js <ide> clbids: [123456, 123459, 123460, 123458] <ide> }, <ide> ], <add> <ide> overrides: [ <ide> { <del> <add> what: 'credits', <add> with: 1 <ide> } <ide> ], <ide>
Java
apache-2.0
64bcdad214ef0b6b8db966d5adbf5e866e56b5f5
0
sbespalov/strongbox,strongbox/strongbox,sbespalov/strongbox,sbespalov/strongbox,sbespalov/strongbox,strongbox/strongbox,strongbox/strongbox,strongbox/strongbox
package org.carlspring.strongbox.artifact.coordinates; import javax.persistence.Entity; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import java.util.Map; import org.carlspring.strongbox.util.PypiWheelArtifactCoordinatesUtils; import org.semver.Version; import org.apache.commons.lang3.StringUtils; /** * This class is an {@link ArtifactCoordinates} implementation for pypi artifacts * * Proper path for this coordinates is in the format of: * {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl. * Example: distribution-1.0-1-py27-none-any.whl * * @author alecg956 */ @Entity @SuppressWarnings("serial") @XmlRootElement(name = "PypiWheelArtifactCoordinates") @XmlAccessorType(XmlAccessType.NONE) @ArtifactCoordinatesLayout(name = PypiWheelArtifactCoordinates.LAYOUT_NAME, alias = PypiWheelArtifactCoordinates.LAYOUT_ALIAS) public class PypiWheelArtifactCoordinates extends AbstractArtifactCoordinates<PypiWheelArtifactCoordinates, Version> { public static final String LAYOUT_NAME = "PyPi"; public static final String LAYOUT_ALIAS = "pypi"; public static final String DISTRIBUTION = "distribution"; public static final String VERSION = "version"; public static final String BUILD = "build"; public static final String LANGUAGE_IMPLEMENTATION_VERSION = "languageImplementationVersion"; public static final String ABI = "abi"; public static final String PLATFORM = "platform"; /** * This method takes in all artifact coordinates of a PyPi Wheel filename, with build being * the empty string if it is not included in the filename * * @param distribution Uniquely identifying artifact coordinate (required) * @param version Packages current version (required) * @param build Build_tag parameter (optional) * @param languageImplementationVersion Language and Implementation version argument (required) * @param abi ABI tag parameter (required) * @param platform Platform tag parameter (required) */ public PypiWheelArtifactCoordinates(String distribution, String version, String build, String languageImplementationVersion, String abi, String platform) { // if any of the required arguments are empty, throw an error if (StringUtils.isBlank(distribution) || StringUtils.isBlank(version) || StringUtils.isBlank(languageImplementationVersion) || StringUtils.isBlank(abi) || StringUtils.isBlank(platform)) { throw new IllegalArgumentException("The distribution, version, languageImplementationVersion, abi, and platform fields are mandatory."); } if (!StringUtils.isBlank(build) && !Character.isDigit(build.charAt(0))) { throw new IllegalArgumentException("Illegal build tag!"); } setId(distribution); setVersion(version); setBuild(build); setLanguageImplementationVersion(languageImplementationVersion); setAbi(abi); setPlatform(platform); } /** * @param path The filename of the PyPi Wheel package * @return Returns a PyPiWheelArtifactCoordinates object with all included coordinates set */ public static PypiWheelArtifactCoordinates parse(String path) { return PypiWheelArtifactCoordinatesUtils.parse(path); } /** * @return Returns distribution coordinate value (serves as the unique ID) */ @Override public String getId() { return getCoordinate(DISTRIBUTION); } /** * @param id DISTRIBUTION coordinate will take this value */ @Override public void setId(String id) { setCoordinate(DISTRIBUTION, id); } /** * @return Returns the VERSION coordinate value */ @Override public String getVersion() { return getCoordinate(VERSION); } /** * @param version VERSION coordinate takes this value */ @Override public void setVersion(String version) { setCoordinate(VERSION, version); } /** * @return Returns the BUILD coordinate value */ @ArtifactLayoutCoordinate public String getBuild() { return getCoordinate(BUILD); } /** * @param build BUILD coordinate will take this value */ public void setBuild(String build) { setCoordinate(BUILD, build); } /** * @return Returns the LANGUAGE_IMPLEMENTATION_VERSION coordinate value */ @ArtifactLayoutCoordinate public String getLanguageImplementationVersion() { return getCoordinate(LANGUAGE_IMPLEMENTATION_VERSION); } /** * @param lang LANGUAGE_IMPLEMENTATION_VERSION takes this value */ public void setLanguageImplementationVersion(String lang) { setCoordinate(LANGUAGE_IMPLEMENTATION_VERSION, lang); } /** * @return Returns the ABI coordinate value */ @ArtifactLayoutCoordinate public String getAbi() { return getCoordinate(ABI); } /** * @param abi ABI coordinate takes this value */ public void setAbi(String abi) { setCoordinate(ABI, abi); } /** * @return Returns the PLATFORM coordinate value */ @ArtifactLayoutCoordinate public String getPlatform() { return getCoordinate(PLATFORM); } /** * @param platform PLATFORM coordinate takes this value */ public void setPlatform(String platform) { setCoordinate(PLATFORM, platform); } /** * @return Returns the reconstructed path from the stored coordinate values */ @Override public String toPath() { // if optional BUILD coordinate is empty, don't include it in the reconstruction if (StringUtils.isBlank(getBuild())) { return String.format("%s-%s-%s-%s-%s", getId(), getVersion(), getLanguageImplementationVersion(), getAbi(), getPlatform()) + ".whl"; } return String.format("%s-%s-%s-%s-%s-%s", getId(), getVersion(), getBuild(), getLanguageImplementationVersion(), getAbi(), getPlatform()) + ".whl"; } /** * @return Returns the native version of the package */ @Override public Version getNativeVersion() { String versionLocal = getVersion(); if (versionLocal == null) { return null; } try { return Version.parse(versionLocal); } catch (IllegalArgumentException e) { return null; } } /** * @return Returns a map data structure of the coordinates without the VERSION coordinate */ @Override public Map<String, String> dropVersion() { Map<String, String> result = getCoordinates(); result.remove(VERSION); return result; } }
strongbox-storage/strongbox-storage-layout-providers/strongbox-storage-pypi-layout-provider/src/main/java/org/carlspring/strongbox/artifact/coordinates/PypiWheelArtifactCoordinates.java
package org.carlspring.strongbox.artifact.coordinates; import javax.persistence.Entity; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlRootElement; import java.util.Map; import org.carlspring.strongbox.util.PypiWheelArtifactCoordinatesUtils; import org.semver.Version; import org.apache.commons.lang3.StringUtils; /** * This class is an {@link ArtifactCoordinates} implementation for pypi artifacts * * Proper path for this coordinates is in the format of: * {distribution}-{version}(-{build tag})?-{python tag}-{abi tag}-{platform tag}.whl. * Example: distribution-1.0-1-py27-none-any.whl * * @author alecg956 */ @Entity @SuppressWarnings("serial") @XmlRootElement(name = "PypiWheelArtifactCoordinates") @XmlAccessorType(XmlAccessType.NONE) @ArtifactCoordinatesLayout(name = PypiWheelArtifactCoordinates.LAYOUT_NAME, alias = PypiWheelArtifactCoordinates.LAYOUT_ALIAS) public class PypiWheelArtifactCoordinates extends AbstractArtifactCoordinates<PypiWheelArtifactCoordinates, Version> { public static final String LAYOUT_NAME = "PyPi"; public static final String LAYOUT_ALIAS = "pypi"; public static final String DISTRIBUTION = "distribution"; public static final String VERSION = "version"; public static final String BUILD_TAG = "build_tag"; public static final String LANGUAGE_IMPLEMENTATION_VERSION_TAG = "languageImplementationVersion_tag"; public static final String ABI_TAG = "abi_tag"; public static final String PLATFORM_TAG = "platform_tag"; /** * This method takes in all artifact coordinates of a PyPi Wheel filename, with build being * the empty string if it is not included in the filename * * @param distribution Uniquely identifying artifact coordinate (required) * @param version Packages current version (required) * @param build Build_tag parameter (optional) * @param languageImplementationVersion Language and Implementation version argument (required) * @param abi ABI tag parameter (required) * @param platform Platform tag parameter (required) */ public PypiWheelArtifactCoordinates(String distribution, String version, String build, String languageImplementationVersion, String abi, String platform) { // if any of the required arguments are empty, throw an error if (StringUtils.isBlank(distribution) || StringUtils.isBlank(version) || StringUtils.isBlank(languageImplementationVersion) || StringUtils.isBlank(abi) || StringUtils.isBlank(platform)) { throw new IllegalArgumentException("The distribution, version, language_implementation_version_tag, abi_tag, and platform_tag are mandatory fields."); } if (!StringUtils.isBlank(build) && !Character.isDigit(build.charAt(0))) { throw new IllegalArgumentException("Illegal build tag!"); } setId(distribution); setVersion(version); setBuild(build); setLanguageImplementationVersion(languageImplementationVersion); setAbi(abi); setPlatform(platform); } /** * @param path The filename of the PyPi Wheel package * @return Returns a PyPiWheelArtifactCoordinates object with all included coordinates set */ public static PypiWheelArtifactCoordinates parse(String path) { return PypiWheelArtifactCoordinatesUtils.parse(path); } /** * @return Returns distribution coordinate value (serves as the unique ID) */ @Override public String getId() { return getCoordinate(DISTRIBUTION); } /** * @param id DISTRIBUTION coordinate will take this value */ @Override public void setId(String id) { setCoordinate(DISTRIBUTION, id); } /** * @return Returns the VERSION coordinate value */ @Override public String getVersion() { return getCoordinate(VERSION); } /** * @param version VERSION coordinate takes this value */ @Override public void setVersion(String version) { setCoordinate(VERSION, version); } /** * @return Returns the BUILD_TAG coordinate value */ @ArtifactLayoutCoordinate public String getBuild() { return getCoordinate(BUILD_TAG); } /** * @param build BUILD_TAG coordinate will take this value */ public void setBuild(String build) { setCoordinate(BUILD_TAG, build); } /** * @return Returns the LANGUAGE_IMPLEMENTATION_VERSION_TAG coordinate value */ @ArtifactLayoutCoordinate public String getLanguageImplementationVersion() { return getCoordinate(LANGUAGE_IMPLEMENTATION_VERSION_TAG); } /** * @param lang LANGUAGE_IMPLEMENTATION_VERSION_TAG takes this value */ public void setLanguageImplementationVersion(String lang) { setCoordinate(LANGUAGE_IMPLEMENTATION_VERSION_TAG, lang); } /** * @return Returns the ABI_TAG coordinate value */ @ArtifactLayoutCoordinate public String getAbi() { return getCoordinate(ABI_TAG); } /** * @param abi ABI_TAG coordinate takes this value */ public void setAbi(String abi) { setCoordinate(ABI_TAG, abi); } /** * @return Returns the PLATFORM_TAG coordinate value */ @ArtifactLayoutCoordinate public String getPlatform() { return getCoordinate(PLATFORM_TAG); } /** * @param platform PLATFORM_TAG coordinate takes this value */ public void setPlatform(String platform) { setCoordinate(PLATFORM_TAG, platform); } /** * @return Returns the reconstructed path from the stored coordinate values */ @Override public String toPath() { // if optional BUILD_TAG coordinate is empty, don't include it in the reconstruction if (StringUtils.isBlank(getBuild())) { return String.format("%s-%s-%s-%s-%s", getId(), getVersion(), getLanguageImplementationVersion(), getAbi(), getPlatform()) + ".whl"; } return String.format("%s-%s-%s-%s-%s-%s", getId(), getVersion(), getBuild(), getLanguageImplementationVersion(), getAbi(), getPlatform()) + ".whl"; } /** * @return Returns the native version of the package */ @Override public Version getNativeVersion() { String versionLocal = getVersion(); if (versionLocal == null) { return null; } try { return Version.parse(versionLocal); } catch (IllegalArgumentException e) { return null; } } /** * @return Returns a map data structure of the coordinates without the VERSION coordinate */ @Override public Map<String, String> dropVersion() { Map<String, String> result = getCoordinates(); result.remove(VERSION); return result; } }
Issue 809: Removed TAG from the static variables in PypiWheelArtifactCoordinates.java
strongbox-storage/strongbox-storage-layout-providers/strongbox-storage-pypi-layout-provider/src/main/java/org/carlspring/strongbox/artifact/coordinates/PypiWheelArtifactCoordinates.java
Issue 809: Removed TAG from the static variables in PypiWheelArtifactCoordinates.java
<ide><path>trongbox-storage/strongbox-storage-layout-providers/strongbox-storage-pypi-layout-provider/src/main/java/org/carlspring/strongbox/artifact/coordinates/PypiWheelArtifactCoordinates.java <ide> <ide> public static final String VERSION = "version"; <ide> <del> public static final String BUILD_TAG = "build_tag"; <del> <del> public static final String LANGUAGE_IMPLEMENTATION_VERSION_TAG = "languageImplementationVersion_tag"; <del> <del> public static final String ABI_TAG = "abi_tag"; <del> <del> public static final String PLATFORM_TAG = "platform_tag"; <add> public static final String BUILD = "build"; <add> <add> public static final String LANGUAGE_IMPLEMENTATION_VERSION = "languageImplementationVersion"; <add> <add> public static final String ABI = "abi"; <add> <add> public static final String PLATFORM = "platform"; <ide> <ide> /** <ide> * This method takes in all artifact coordinates of a PyPi Wheel filename, with build being <ide> // if any of the required arguments are empty, throw an error <ide> if (StringUtils.isBlank(distribution) || StringUtils.isBlank(version) || StringUtils.isBlank(languageImplementationVersion) || StringUtils.isBlank(abi) || StringUtils.isBlank(platform)) <ide> { <del> throw new IllegalArgumentException("The distribution, version, language_implementation_version_tag, abi_tag, and platform_tag are mandatory fields."); <add> throw new IllegalArgumentException("The distribution, version, languageImplementationVersion, abi, and platform fields are mandatory."); <ide> } <ide> <ide> if (!StringUtils.isBlank(build) && !Character.isDigit(build.charAt(0))) <ide> } <ide> <ide> /** <del> * @return Returns the BUILD_TAG coordinate value <add> * @return Returns the BUILD coordinate value <ide> */ <ide> @ArtifactLayoutCoordinate <ide> public String getBuild() <ide> { <del> return getCoordinate(BUILD_TAG); <del> } <del> <del> /** <del> * @param build BUILD_TAG coordinate will take this value <add> return getCoordinate(BUILD); <add> } <add> <add> /** <add> * @param build BUILD coordinate will take this value <ide> */ <ide> public void setBuild(String build) <ide> { <del> setCoordinate(BUILD_TAG, build); <del> } <del> <del> /** <del> * @return Returns the LANGUAGE_IMPLEMENTATION_VERSION_TAG coordinate value <add> setCoordinate(BUILD, build); <add> } <add> <add> /** <add> * @return Returns the LANGUAGE_IMPLEMENTATION_VERSION coordinate value <ide> */ <ide> @ArtifactLayoutCoordinate <ide> public String getLanguageImplementationVersion() <ide> { <del> return getCoordinate(LANGUAGE_IMPLEMENTATION_VERSION_TAG); <del> } <del> <del> /** <del> * @param lang LANGUAGE_IMPLEMENTATION_VERSION_TAG takes this value <add> return getCoordinate(LANGUAGE_IMPLEMENTATION_VERSION); <add> } <add> <add> /** <add> * @param lang LANGUAGE_IMPLEMENTATION_VERSION takes this value <ide> */ <ide> public void setLanguageImplementationVersion(String lang) <ide> { <del> setCoordinate(LANGUAGE_IMPLEMENTATION_VERSION_TAG, lang); <del> } <del> <del> /** <del> * @return Returns the ABI_TAG coordinate value <add> setCoordinate(LANGUAGE_IMPLEMENTATION_VERSION, lang); <add> } <add> <add> /** <add> * @return Returns the ABI coordinate value <ide> */ <ide> @ArtifactLayoutCoordinate <ide> public String getAbi() <ide> { <del> return getCoordinate(ABI_TAG); <del> } <del> <del> /** <del> * @param abi ABI_TAG coordinate takes this value <add> return getCoordinate(ABI); <add> } <add> <add> /** <add> * @param abi ABI coordinate takes this value <ide> */ <ide> public void setAbi(String abi) <ide> { <del> setCoordinate(ABI_TAG, abi); <del> } <del> <del> /** <del> * @return Returns the PLATFORM_TAG coordinate value <add> setCoordinate(ABI, abi); <add> } <add> <add> /** <add> * @return Returns the PLATFORM coordinate value <ide> */ <ide> @ArtifactLayoutCoordinate <ide> public String getPlatform() <ide> { <del> return getCoordinate(PLATFORM_TAG); <del> } <del> <del> /** <del> * @param platform PLATFORM_TAG coordinate takes this value <add> return getCoordinate(PLATFORM); <add> } <add> <add> /** <add> * @param platform PLATFORM coordinate takes this value <ide> */ <ide> public void setPlatform(String platform) <ide> { <del> setCoordinate(PLATFORM_TAG, platform); <add> setCoordinate(PLATFORM, platform); <ide> } <ide> <ide> /** <ide> @Override <ide> public String toPath() <ide> { <del> // if optional BUILD_TAG coordinate is empty, don't include it in the reconstruction <add> // if optional BUILD coordinate is empty, don't include it in the reconstruction <ide> if (StringUtils.isBlank(getBuild())) <ide> { <ide> return String.format("%s-%s-%s-%s-%s", getId(), getVersion(), getLanguageImplementationVersion(), getAbi(), getPlatform()) + ".whl";
Java
apache-2.0
e353ca5ea8d8f9fc6214df8cfa09b6e97a2226eb
0
evanchooly/morphia,evanchooly/morphia
package com.google.code.morphia.query; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.bson.types.CodeWScope; import com.google.code.morphia.Datastore; import com.google.code.morphia.DatastoreImpl; import com.google.code.morphia.Key; import com.google.code.morphia.annotations.Entity; import com.google.code.morphia.logging.Logr; import com.google.code.morphia.logging.MorphiaLoggerFactory; import com.google.code.morphia.mapping.MappedClass; import com.google.code.morphia.mapping.Mapper; import com.google.code.morphia.mapping.cache.EntityCache; import com.mongodb.BasicDBObject; import com.mongodb.BasicDBObjectBuilder; import com.mongodb.Bytes; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; /** * <p>Implementation of Query</p> * * @author Scott Hernandez * * @param <T> The type we will be querying for, and returning. */ public class QueryImpl<T> extends CriteriaContainerImpl implements Query<T>, Criteria { private static final Logr log = MorphiaLoggerFactory.get(QueryImpl.class); private EntityCache cache; private boolean validateName = true; private boolean validateType = true; private String[] fields = null; private Boolean includeFields = null; private DBObject sort = null; private DatastoreImpl ds = null; private DBCollection dbColl = null; private int offset = 0; private int limit = -1; private int batchSize = 0; private String indexHint; private Class<T> clazz = null; private DBObject baseQuery = null; private boolean snapshotted = false; private boolean slaveOk = false; private boolean noTimeout = false; public QueryImpl(Class<T> clazz, DBCollection coll, Datastore ds) { super(CriteriaJoin.AND); this.query = this; this.clazz = clazz; this.ds = ((DatastoreImpl)ds); this.dbColl = coll; this.cache = this.ds.getMapper().createEntityCache(); MappedClass mc = this.ds.getMapper().getMappedClass(clazz); Entity entAn = mc == null ? null : mc.getEntityAnnotation(); if (entAn != null) this.slaveOk = this.ds.getMapper().getMappedClass(clazz).getEntityAnnotation().slaveOk(); } public QueryImpl(Class<T> clazz, DBCollection coll, Datastore ds, int offset, int limit) { this(clazz, coll, ds); this.offset = offset; this.limit = limit; } public QueryImpl(Class<T> clazz, DBCollection coll, DatastoreImpl ds, DBObject baseQuery) { this(clazz, coll, ds); this.baseQuery = baseQuery; } @Override public QueryImpl<T> clone(){ QueryImpl<T> n = new QueryImpl<T>(clazz, dbColl, ds); n.attachedTo = attachedTo; n.baseQuery = baseQuery; n.batchSize = batchSize; n.cache = cache; n.fields = fields; n.includeFields = includeFields; n.indexHint = indexHint; n.limit = limit; n.noTimeout = noTimeout; n.offset = offset; n.query = n; n.slaveOk = slaveOk; n.snapshotted = snapshotted; n.sort = sort; n.validateName = validateName; n.validateType = validateType; return n; } public DBCollection getCollection() { return dbColl; } public void setQueryObject(DBObject query) { this.baseQuery = query; } public int getOffset() { return offset; } public int getLimit() { return limit; } public DBObject getQueryObject() { DBObject obj = new BasicDBObject(); if (this.baseQuery != null) { obj.putAll(this.baseQuery); } this.addTo(obj); return obj; } public DatastoreImpl getDatastore() { return ds; } public DBObject getFieldsObject() { if (fields == null || fields.length == 0) return null; Map<String, Boolean> fieldsFilter = new HashMap<String, Boolean>(); for(String field : this.fields) fieldsFilter.put(field, (includeFields)); return new BasicDBObject(fieldsFilter); } public DBObject getSortObject() { return (sort == null) ? null : sort; } public boolean isValidatingNames() { return validateName; } public boolean isValidatingTypes() { return validateType; } public long countAll() { DBObject query = getQueryObject(); if (log.isTraceEnabled()) log.trace("Executing count(" + dbColl.getName() + ") for query: " + query); return dbColl.getCount(query); } public DBCursor prepareCursor() { DBObject query = getQueryObject(); DBObject fields = getFieldsObject(); if (log.isTraceEnabled()) log.trace("Running query(" + dbColl.getName() + ") : " + query + ", fields:" + fields + ",off:" + offset + ",limit:" + limit); DBCursor cursor = dbColl.find(query, fields); if (offset > 0) cursor.skip(offset); if (limit > 0) cursor.limit(limit); if (batchSize > 0) cursor.batchSize(batchSize); if (snapshotted) cursor.snapshot(); if (sort != null) cursor.sort(sort); if (indexHint != null) cursor.hint(indexHint); if (slaveOk) { int opts = dbColl.getOptions(); cursor.addOption(opts |= Bytes.QUERYOPTION_SLAVEOK); } if (noTimeout) { int opts = dbColl.getOptions(); cursor.addOption(opts |= Bytes.QUERYOPTION_NOTIMEOUT); } //Check for bad options. if (snapshotted && (sort!=null || indexHint!=null)) log.warning("Snapshotted query should not have hint/sort."); return cursor; } public Iterable<T> fetch() { DBCursor cursor = prepareCursor(); if (log.isTraceEnabled()) log.trace("Getting cursor(" + dbColl.getName() + ") for query:" + cursor.getQuery()); return new MorphiaIterator<T,T>(cursor, ds.getMapper(), clazz, dbColl.getName(), cache); } public Iterable<Key<T>> fetchKeys() { String[] oldFields = fields; Boolean oldInclude = includeFields; fields = new String[] {Mapper.ID_KEY}; includeFields = true; DBCursor cursor = prepareCursor(); if (log.isTraceEnabled()) log.trace("Getting cursor(" + dbColl.getName() + ") for query:" + cursor.getQuery()); fields = oldFields; includeFields = oldInclude; return new MorphiaKeyIterator<T>(cursor, ds.getMapper(), clazz, dbColl.getName()); } @SuppressWarnings("unchecked") public List<T> asList() { List<T> results = new ArrayList<T>(); MorphiaIterator<T,T> iter = (MorphiaIterator<T,T>) fetch().iterator(); for(T ent : iter) results.add(ent); if (log.isTraceEnabled()) log.trace(String.format("\nasList: %s \t %d entities, iterator time: driver %n ms, mapper %n ms \n cache: %s \n for $s \n ", dbColl.getName(), results.size(), iter.getDriverTime(), iter.getMapperTime(), cache.stats().toString(), getQueryObject())); return results; } public List<Key<T>> asKeyList() { List<Key<T>> results = new ArrayList<Key<T>>(); for(Key<T> key : fetchKeys()) results.add(key); return results; } public Iterable<T> fetchEmptyEntities() { String[] oldFields = fields; Boolean oldInclude = includeFields; fields = new String[] {Mapper.ID_KEY}; includeFields = true; Iterable<T> res = fetch(); fields = oldFields; includeFields = oldInclude; return res; } /** * Converts the textual operator (">", "<=", etc) into a FilterOperator. * Forgiving about the syntax; != and <> are NOT_EQUAL, = and == are EQUAL. */ protected FilterOperator translate(String operator) { operator = operator.trim(); if (operator.equals("=") || operator.equals("==")) return FilterOperator.EQUAL; else if (operator.equals(">")) return FilterOperator.GREATER_THAN; else if (operator.equals(">=")) return FilterOperator.GREATER_THAN_OR_EQUAL; else if (operator.equals("<")) return FilterOperator.LESS_THAN; else if (operator.equals("<=")) return FilterOperator.LESS_THAN_OR_EQUAL; else if (operator.equals("!=") || operator.equals("<>")) return FilterOperator.NOT_EQUAL; else if (operator.toLowerCase().equals("in")) return FilterOperator.IN; else if (operator.toLowerCase().equals("nin")) return FilterOperator.NOT_IN; else if (operator.toLowerCase().equals("all")) return FilterOperator.ALL; else if (operator.toLowerCase().equals("exists")) return FilterOperator.EXISTS; else if (operator.toLowerCase().equals("elem")) return FilterOperator.ELEMENT_MATCH; else if (operator.toLowerCase().equals("size")) return FilterOperator.SIZE; else if (operator.toLowerCase().equals("within")) return FilterOperator.WITHIN; else if (operator.toLowerCase().equals("near")) return FilterOperator.NEAR; else throw new IllegalArgumentException("Unknown operator '" + operator + "'"); } public Query<T> filter(String condition, Object value) { String[] parts = condition.trim().split(" "); if (parts.length < 1 || parts.length > 6) throw new IllegalArgumentException("'" + condition + "' is not a legal filter condition"); String prop = parts[0].trim(); FilterOperator op = (parts.length == 2) ? this.translate(parts[1]) : FilterOperator.EQUAL; this.add(new FieldCriteria(this, prop, op, value, this.validateName, this.validateType)); return this; } public Query<T> where(CodeWScope js) { this.add(new WhereCriteria(js)); return this; } public Query<T> where(String js) { this.add(new WhereCriteria(js)); return this; } public Query<T> enableValidation(){ validateName = validateType = true; return this; } public Query<T> disableValidation(){ validateName = validateType = false; return this; } QueryImpl<T> validateNames() {validateName = true; return this; } QueryImpl<T> disableTypeValidation() {validateType = false; return this; } public T get() { int oldLimit = limit; limit = 1; Iterator<T> it = fetch().iterator(); limit = oldLimit; return (it.hasNext()) ? it.next() : null ; } public Key<T> getKey() { int oldLimit = limit; limit = 1; Iterator<Key<T>> it = fetchKeys().iterator(); limit = oldLimit; return (it.hasNext()) ? it.next() : null; } public Query<T> limit(int value) { this.limit = value; return this; } public Query<T> batchSize(int value) { this.batchSize = value; return this; } public int getBatchSize() { return batchSize; } public Query<T> skip(int value) { this.offset = value; return this; } public Query<T> offset(int value) { this.offset = value; return this; } public Query<T> order(String condition) { if (snapshotted) throw new QueryException("order cannot be used on a snapshotted query."); //TODO: validate names and translate from java names. sort = parseSortString(condition); return this; } public static BasicDBObject parseSortString(String str) { BasicDBObjectBuilder ret = BasicDBObjectBuilder.start(); String[] parts = str.split(","); for (String s : parts) { s = s.trim(); int dir = 1; if (s.startsWith("-")) { dir = -1; s = s.substring(1).trim(); } ret = ret.add(s, dir); } return (BasicDBObject) ret.get(); } public Iterator<T> iterator() { return fetch().iterator(); } public Class<T> getEntityClass() { return this.clazz; } public String toString() { return this.getQueryObject().toString(); } public FieldEnd<? extends Query<T>> field(String name) { return this.field(name, this.validateName); } private FieldEnd<? extends Query<T>> field(String field, boolean validate) { return new FieldEndImpl<QueryImpl<T>>(this, field, this, validate); } public FieldEnd<? extends CriteriaContainerImpl> criteria(String field) { return this.criteria(field, this.validateName); } private FieldEnd<? extends CriteriaContainerImpl> criteria(String field, boolean validate) { CriteriaContainerImpl container = new CriteriaContainerImpl(this, CriteriaJoin.AND); this.add(container); return new FieldEndImpl<CriteriaContainerImpl>(this, field, container, validate); } //TODO: test this. public Query<T> hintIndex(String idxName) { indexHint = idxName; return this; } public Query<T> retrievedFields(boolean include, String...fields){ if (includeFields != null && include != includeFields) throw new IllegalStateException("You cannot mix include and excluded fields together!"); this.includeFields = include; this.fields = fields; return this; } /** Enabled snapshotted mode where duplicate results * (which may be updated during the lifetime of the cursor) * will not be returned. Not compatible with order/sort and hint. **/ public Query<T> enableSnapshotMode() { snapshotted = true; return this; } /** Disable snapshotted mode (default mode). This will be faster * but changes made during the cursor may cause duplicates. **/ public Query<T> disableSnapshotMode() { snapshotted = false; return this; } public Query<T> queryNonPrimary() { slaveOk = true; return this; } public Query<T> queryPrimaryOnly() { slaveOk = false; return this; } /** Disables cursor timeout on server. */ public Query<T> disableTimeout() { noTimeout = false; return this; } /** Enables cursor timeout on server. */ public Query<T> enableTimeout(){ noTimeout = true; return this; } }
morphia/src/main/java/com/google/code/morphia/query/QueryImpl.java
package com.google.code.morphia.query; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.bson.types.CodeWScope; import com.google.code.morphia.Datastore; import com.google.code.morphia.DatastoreImpl; import com.google.code.morphia.Key; import com.google.code.morphia.annotations.Entity; import com.google.code.morphia.logging.Logr; import com.google.code.morphia.logging.MorphiaLoggerFactory; import com.google.code.morphia.mapping.MappedClass; import com.google.code.morphia.mapping.Mapper; import com.google.code.morphia.mapping.cache.EntityCache; import com.mongodb.BasicDBObject; import com.mongodb.BasicDBObjectBuilder; import com.mongodb.Bytes; import com.mongodb.DBCollection; import com.mongodb.DBCursor; import com.mongodb.DBObject; /** * <p>Implementation of Query</p> * * @author Scott Hernandez * * @param <T> The type we will be querying for, and returning. */ public class QueryImpl<T> extends CriteriaContainerImpl implements Query<T>, Criteria { private static final Logr log = MorphiaLoggerFactory.get(QueryImpl.class); private EntityCache cache; private boolean validateName = true; private boolean validateType = true; private String[] fields = null; private Boolean includeFields = null; private DBObject sort = null; private DatastoreImpl ds = null; private DBCollection dbColl = null; private int offset = 0; private int limit = -1; private int batchSize = 0; private String indexHint; private Class<T> clazz = null; private DBObject baseQuery = null; private boolean snapshotted = false; private boolean slaveOk = false; private boolean noTimeout = false; public QueryImpl(Class<T> clazz, DBCollection coll, Datastore ds) { super(CriteriaJoin.AND); this.query = this; this.clazz = clazz; this.ds = ((DatastoreImpl)ds); this.dbColl = coll; this.cache = this.ds.getMapper().createEntityCache(); MappedClass mc = this.ds.getMapper().getMappedClass(clazz); Entity entAn = mc == null ? null : mc.getEntityAnnotation(); if (entAn != null) this.slaveOk = this.ds.getMapper().getMappedClass(clazz).getEntityAnnotation().slaveOk(); } public QueryImpl(Class<T> clazz, DBCollection coll, Datastore ds, int offset, int limit) { this(clazz, coll, ds); this.offset = offset; this.limit = limit; } public QueryImpl(Class<T> clazz, DBCollection coll, DatastoreImpl ds, DBObject baseQuery) { this(clazz, coll, ds); this.baseQuery = baseQuery; } @Override public QueryImpl<T> clone(){ QueryImpl<T> n = new QueryImpl<T>(clazz, dbColl, ds); n.attachedTo = attachedTo; n.baseQuery = baseQuery; n.batchSize = batchSize; n.cache = cache; n.fields = fields; n.includeFields = includeFields; n.indexHint = indexHint; n.limit = limit; n.noTimeout = noTimeout; n.offset = offset; n.query = n; n.slaveOk = slaveOk; n.snapshotted = snapshotted; n.sort = sort; n.validateName = validateName; n.validateType = validateType; return n; } public DBCollection getCollection() { return dbColl; } public void setQueryObject(DBObject query) { this.baseQuery = query; } public int getOffset() { return offset; } public int getLimit() { return limit; } public DBObject getQueryObject() { DBObject obj = new BasicDBObject(); if (this.baseQuery != null) { obj.putAll(this.baseQuery); } this.addTo(obj); return obj; } public DatastoreImpl getDatastore() { return ds; } public DBObject getFieldsObject() { if (fields == null || fields.length == 0) return null; Map<String, Boolean> fieldsFilter = new HashMap<String, Boolean>(); for(String field : this.fields) fieldsFilter.put(field, (includeFields)); return new BasicDBObject(fieldsFilter); } public DBObject getSortObject() { return (sort == null) ? null : sort; } public boolean isValidatingNames() { return validateName; } public boolean isValidatingTypes() { return validateType; } public long countAll() { DBObject query = getQueryObject(); if (log.isTraceEnabled()) log.trace("Executing count(" + dbColl.getName() + ") for query: " + query); return dbColl.getCount(query); } public DBCursor prepareCursor() { DBObject query = getQueryObject(); DBObject fields = getFieldsObject(); if (log.isTraceEnabled()) log.trace("Running query(" + dbColl.getName() + ") : " + query + ", fields:" + fields + ",off:" + offset + ",limit:" + limit); DBCursor cursor = dbColl.find(query, fields); if (offset > 0) cursor.skip(offset); if (limit > 0) cursor.limit(limit); if (batchSize > 0) cursor.batchSize(batchSize); if (snapshotted) cursor.snapshot(); if (sort != null) cursor.sort(sort); if (indexHint != null) cursor.hint(indexHint); if (slaveOk) { int opts = dbColl.getOptions(); cursor.addOption(opts |= Bytes.QUERYOPTION_SLAVEOK); } if (noTimeout) { int opts = dbColl.getOptions(); cursor.addOption(opts |= Bytes.QUERYOPTION_NOTIMEOUT); } //Check for bad options. if (snapshotted && (sort!=null || indexHint!=null)) log.warning("Snapshotted query should not have hint/sort."); return cursor; } public Iterable<T> fetch() { DBCursor cursor = prepareCursor(); if (log.isTraceEnabled()) log.trace("Getting cursor(" + dbColl.getName() + ") for query:" + cursor.getQuery()); return new MorphiaIterator<T,T>(cursor, ds.getMapper(), clazz, dbColl.getName(), cache); } public Iterable<Key<T>> fetchKeys() { String[] oldFields = fields; Boolean oldInclude = includeFields; fields = new String[] {Mapper.ID_KEY}; includeFields = true; DBCursor cursor = prepareCursor(); if (log.isTraceEnabled()) log.trace("Getting cursor(" + dbColl.getName() + ") for query:" + cursor.getQuery()); fields = oldFields; includeFields = oldInclude; return new MorphiaKeyIterator<T>(cursor, ds.getMapper(), clazz, dbColl.getName()); } @SuppressWarnings("unchecked") public List<T> asList() { List<T> results = new ArrayList<T>(); MorphiaIterator<T,T> iter = (MorphiaIterator<T,T>) fetch().iterator(); for(T ent : iter) results.add(ent); if (log.isTraceEnabled()) log.trace(String.format("\nasList: %s \t %d entities, iterator time: driver %n ms, mapper %n ms \n cache: %s \n for $s \n ", dbColl.getName(), results.size(), iter.getDriverTime(), iter.getMapperTime(), cache.stats().toString(), getQueryObject())); return results; } public List<Key<T>> asKeyList() { List<Key<T>> results = new ArrayList<Key<T>>(); for(Key<T> key : fetchKeys()) results.add(key); return results; } public Iterable<T> fetchEmptyEntities() { String[] oldFields = fields; Boolean oldInclude = includeFields; fields = new String[] {Mapper.ID_KEY}; includeFields = true; Iterable<T> res = fetch(); fields = oldFields; includeFields = oldInclude; return res; } /** * Converts the textual operator (">", "<=", etc) into a FilterOperator. * Forgiving about the syntax; != and <> are NOT_EQUAL, = and == are EQUAL. */ protected FilterOperator translate(String operator) { operator = operator.trim(); if (operator.equals("=") || operator.equals("==")) return FilterOperator.EQUAL; else if (operator.equals(">")) return FilterOperator.GREATER_THAN; else if (operator.equals(">=")) return FilterOperator.GREATER_THAN_OR_EQUAL; else if (operator.equals("<")) return FilterOperator.LESS_THAN; else if (operator.equals("<=")) return FilterOperator.LESS_THAN_OR_EQUAL; else if (operator.equals("!=") || operator.equals("<>")) return FilterOperator.NOT_EQUAL; else if (operator.toLowerCase().equals("in")) return FilterOperator.IN; else if (operator.toLowerCase().equals("nin")) return FilterOperator.NOT_IN; else if (operator.toLowerCase().equals("all")) return FilterOperator.ALL; else if (operator.toLowerCase().equals("exists")) return FilterOperator.EXISTS; else if (operator.toLowerCase().equals("elem")) return FilterOperator.ELEMENT_MATCH; else if (operator.toLowerCase().equals("size")) return FilterOperator.SIZE; else if (operator.toLowerCase().equals("within")) return FilterOperator.WITHIN; else if (operator.toLowerCase().equals("near")) return FilterOperator.NEAR; else throw new IllegalArgumentException("Unknown operator '" + operator + "'"); } public Query<T> filter(String condition, Object value) { String[] parts = condition.trim().split(" "); if (parts.length < 1 || parts.length > 6) throw new IllegalArgumentException("'" + condition + "' is not a legal filter condition"); String prop = parts[0].trim(); FilterOperator op = (parts.length == 2) ? this.translate(parts[1]) : FilterOperator.EQUAL; this.add(new FieldCriteria(this, prop, op, value, this.validateName, this.validateType)); return this; } public Query<T> where(CodeWScope js) { this.add(new WhereCriteria(js)); return this; } public Query<T> where(String js) { this.add(new WhereCriteria(js)); return this; } public Query<T> enableValidation(){ validateName = validateType = true; return this; } public Query<T> disableValidation(){ validateName = validateType = false; return this; } QueryImpl<T> validateNames() {validateName = true; return this; } QueryImpl<T> disableTypeValidation() {validateType = false; return this; } public T get() { int oldLimit = limit; limit = 1; Iterator<T> it = fetch().iterator(); limit = oldLimit; return (it.hasNext()) ? it.next() : null ; } public Key<T> getKey() { int oldLimit = limit; limit = 1; Iterator<Key<T>> it = fetchKeys().iterator(); limit = oldLimit; return (it.hasNext()) ? it.next() : null; } public Query<T> limit(int value) { this.limit = value; return this; } public Query<T> batchSize(int value) { this.batchSize = value; return this; } public int getBatchSize() { return batchSize; } public Query<T> skip(int value) { this.offset = value; return this; } public Query<T> offset(int value) { this.offset = value; return this; } public Query<T> order(String condition) { if (snapshotted) throw new QueryException("order cannot be used on a snapshotted query."); sort = parseSortString(condition); return this; } public static BasicDBObject parseSortString(String str) { BasicDBObjectBuilder ret = BasicDBObjectBuilder.start(); String[] parts = str.split(","); for (String s : parts) { s = s.trim(); int dir = 1; if (s.startsWith("-")) { dir = -1; s = s.substring(1).trim(); } ret = ret.add(s, dir); } return (BasicDBObject) ret.get(); } public Iterator<T> iterator() { return fetch().iterator(); } public Class<T> getEntityClass() { return this.clazz; } public String toString() { return this.getQueryObject().toString(); } public FieldEnd<? extends Query<T>> field(String name) { return this.field(name, this.validateName); } private FieldEnd<? extends Query<T>> field(String field, boolean validate) { return new FieldEndImpl<QueryImpl<T>>(this, field, this, validate); } public FieldEnd<? extends CriteriaContainerImpl> criteria(String field) { return this.criteria(field, this.validateName); } private FieldEnd<? extends CriteriaContainerImpl> criteria(String field, boolean validate) { CriteriaContainerImpl container = new CriteriaContainerImpl(this, CriteriaJoin.AND); this.add(container); return new FieldEndImpl<CriteriaContainerImpl>(this, field, container, validate); } //TODO: test this. public Query<T> hintIndex(String idxName) { indexHint = idxName; return this; } public Query<T> retrievedFields(boolean include, String...fields){ if (includeFields != null && include != includeFields) throw new IllegalStateException("You cannot mix include and excluded fields together!"); this.includeFields = include; this.fields = fields; return this; } /** Enabled snapshotted mode where duplicate results * (which may be updated during the lifetime of the cursor) * will not be returned. Not compatible with order/sort and hint. **/ public Query<T> enableSnapshotMode() { snapshotted = true; return this; } /** Disable snapshotted mode (default mode). This will be faster * but changes made during the cursor may cause duplicates. **/ public Query<T> disableSnapshotMode() { snapshotted = false; return this; } public Query<T> queryNonPrimary() { slaveOk = true; return this; } public Query<T> queryPrimaryOnly() { slaveOk = false; return this; } /** Disables cursor timeout on server. */ public Query<T> disableTimeout() { noTimeout = false; return this; } /** Enables cursor timeout on server. */ public Query<T> enableTimeout(){ noTimeout = true; return this; } }
added todo
morphia/src/main/java/com/google/code/morphia/query/QueryImpl.java
added todo
<ide><path>orphia/src/main/java/com/google/code/morphia/query/QueryImpl.java <ide> if (snapshotted) <ide> throw new QueryException("order cannot be used on a snapshotted query."); <ide> <add> //TODO: validate names and translate from java names. <ide> sort = parseSortString(condition); <ide> <ide> return this;
Java
apache-2.0
933c28f8b947b1ea89c79f6d2d693b340a026bcc
0
mythguided/hydra,mythguided/hydra,mythguided/hydra,mythguided/hydra
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.addthis.hydra.task.source; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import com.addthis.basis.util.Files; import com.addthis.basis.util.Parameter; import com.addthis.basis.util.Strings; import com.addthis.bundle.channel.DataChannelError; import com.addthis.bundle.core.Bundle; import com.addthis.bundle.core.BundleFactory; import com.addthis.bundle.core.BundleField; import com.addthis.bundle.core.list.ListBundle; import com.addthis.bundle.core.list.ListBundleFormat; import com.addthis.bundle.value.ValueFactory; import com.addthis.bundle.value.ValueString; import com.addthis.codec.Codec; import com.addthis.hydra.common.plugins.PluginReader; import com.addthis.hydra.data.filter.value.StringFilter; import com.addthis.hydra.store.db.DBKey; import com.addthis.hydra.store.db.PageDB; import com.addthis.hydra.task.run.TaskRunConfig; import com.addthis.hydra.task.source.bundleizer.ChannelBundleizer; import com.addthis.hydra.task.stream.PersistentStreamFileSource; import com.addthis.hydra.task.stream.StreamFile; import com.addthis.hydra.task.stream.StreamFileSource; import com.addthis.hydra.task.stream.StreamSourceFiltered; import com.addthis.hydra.task.stream.StreamSourceHashed; import com.google.common.base.Objects; import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Counter; import com.yammer.metrics.core.Histogram; import com.yammer.metrics.core.Timer; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Abstract implementation of TaskDataSource * <p/> * There are many common features that streaming data source needs to deal with * and this class performs those tasks so that subclasses can leverage the common * functionality. * <p/> * Stream sources are normally consume data from remote services * that run on servers with unbalanced loads and operations against those remote * services are subject to network latency and bandwidth constraints. The goal * of this class is to work with all available sources concurrently, pre-opening * packets from those sources and queueing them as the packets become available. * This allows clients of this class to consume a steady stream of packets from * any available host and prevents blocking waits while waiting for slower hosts * to make data available. * <p/> * This class consumes bytes that need to be turned into objects consumers understand. * Configuration inputs instruct the class on what type of objects to turn the bytes * into on receipt. The {@link BundleizerFactory} is responsible for performing this * conversion. * <p/> * This class maintains persistent state tracking which source files have been * consumed. This enables the class to intelligently ignore upstream files that * have already been completely consumed. The data is maintained in a KV data * store using the source file name as the key. */ public abstract class AbstractStreamFileDataSource extends TaskDataSource implements BundleFactory { private static final Logger log = LoggerFactory.getLogger(AbstractStreamFileDataSource.class); // note that some parameters have 'mesh' in the name. Leaving intact for backwards compatibility private static final int POLL_INTERVAL = Parameter.intValue("dataSourceMeshy2.pollInterval", 1000); private static final int POLL_COUNTDOWN = Parameter.intValue("dataSourceMeshy2.pollCountdown", 1800); // 30 minutes private static final int DEFAULT_PREOPEN = Parameter.intValue("dataSourceMeshy2.preopen", 1); private static final int DEFAULT_SKIP_SOURCE_EXIT = Parameter.intValue("dataSourceMeshy2.skipSourceExit", 0); private static final int MARK_PAGES = Parameter.intValue("source.meshy.mark.pages", 1000); private static final int MARK_PAGE_SIZE = Parameter.intValue("source.meshy.mark.pageSize", 20); private static final int DEFAULT_WORKERS = Parameter.intValue("dataSourceMeshy2.workers", 2); private static final int DEFAULT_BUFFER = Parameter.intValue("dataSourceMeshy2.buffer", 128); private static final int DEFAULT_COUNTDOWN_LATCH_TIMEOUT = Parameter.intValue("dataSourceMeshy2.timeout.sec", 30); /** * A StringFilter that processes file names as a string bundle field. If the filter returns * null (false), then the file is skipped. Otherwise, it uses whatever string the filter * returns. Anything that isn't a StringFilter throws a runtime error. */ @Codec.Set(codable = true) private StringFilter filter; /** * Specifies conversion to bundles. */ @Codec.Set(codable = true) private BundleizerFactory format = new ChannelBundleizer(); /** * Path to the mark directory. */ @Codec.Set(codable = true) private String markDir = "marks"; /** * Ignore the mark directory */ @Codec.Set(codable = true) private boolean ignoreMarkDir; /** * Enable metrics visible only from jmx */ @Codec.Set(codable = true) private boolean jmxMetrics; /** * Number of shards in the input source. */ @Codec.Set(codable = true) protected Integer shardTotal; /** * If specified then process only the shards specified in this array. */ @Codec.Set(codable = true) protected Integer shards[]; /** * If true then generate a hash of the filename input rather than use the {{mod}} field. Default is false. */ @Codec.Set(codable = true) protected boolean hash; /** * If true then allow all of the Hydra nodes to process all the data when * the hash field is false and the filename does not have {{mode}}. Default is false. */ @Codec.Set(codable = true) protected boolean processAllData; /** * If non-null, then inject the filename into the bundle field using this field name. Default is null. */ @Codec.Set(codable = true) private String injectSourceName; /** * Number of bundles to attempt to pull from a file before returning it to the * circular file queue. Difficult to understand without looking at the source code, * but if you feel the need for a lot of worker threads or are desperate for * potential performance gains you might try increasing this. 25 is a good place * to start, I think. The default is 1. */ @Codec.Set(codable = true) private int multiBundleReads = 1; /** * Number of bundles to fetch prior to starting the worker threads. * Default is either "dataSourceMeshy2.preopen" configuration value or 2. */ @Codec.Set(codable = true) private int preOpen = DEFAULT_PREOPEN; /** * Trigger an error when the number of skipped sources is greater than this value. * Default is either "dataSourceMeshy2.skipSourceExit" configuration value or 0. */ @Codec.Set(codable = true) private int skipSourceExit = DEFAULT_SKIP_SOURCE_EXIT; /** * Maximum size of the queue that stores bundles prior to their processing. * Default is either "dataSourceMeshy2.buffer" configuration value or 128. */ @Codec.Set(codable = true) private int buffer = DEFAULT_BUFFER; /** * Number of worker threads that request data from the meshy source. * Default is either "dataSourceMeshy2.workers" configuration value or 2. */ @Codec.Set(codable = true) private int workers = DEFAULT_WORKERS; /** * Set to enable marks compatibility mode with older source types. eg. 'mesh' for mesh1 and * 'stream' for stream2. 'stream2' is also fine. Do not set to anything unless doing an in-place * conversion of an existing job with an older source type. It won't be damaging for new jobs, but * it would be nice to eventually be able to drop support entirely. If cloning a job with this set, * then please remove it from the clone (before running). * <p/> * In more detail: Any non-null value will use legacy marks and anything beginning with * 'stream' will trim the starting '/' in a mesh path. */ @Codec.Set(codable = true) private String legacyMode; private final ListBundleFormat bundleFormat = new ListBundleFormat(); private final Bundle termBundle = new ListBundle(bundleFormat); private final SourceWorker sourceWorker = new SourceWorker(); private final ExecutorService workerThreadPool = Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("streamSourceWorker-%d").build()); /* metrics */ private Histogram queueSizeHisto = Metrics.newHistogram(getClass(), "queueSizeHisto"); private Histogram fileSizeHisto = Metrics.newHistogram(getClass(), "fileSizeHisto"); private Timer readTimer = Metrics.newTimer(getClass(), "readTimer", TimeUnit.MILLISECONDS, TimeUnit.SECONDS); private final Counter openNew = Metrics.newCounter(getClass(), "openNew"); private final Counter openIndex = Metrics.newCounter(getClass(), "openIndex"); private final Counter openSkip = Metrics.newCounter(getClass(), "openSkip"); private final Counter skipping = Metrics.newCounter(getClass(), "skipping"); private final Counter reading = Metrics.newCounter(getClass(), "reading"); private final Counter opening = Metrics.newCounter(getClass(), "opening"); // Concurrency provisions private CountDownLatch runningThreadCountDownLatch = null; private final Counter globalBundleSkip = Metrics.newCounter(getClass(), "globalBundleSkip"); private final AtomicInteger consecutiveFileSkip = new AtomicInteger(); private final ThreadLocal<Integer> localBundleSkip = new ThreadLocal<Integer>() { @Override protected Integer initialValue() { return 0; } }; // State control private final LinkedBlockingQueue<Wrap> preOpened = new LinkedBlockingQueue<>(); protected final AtomicBoolean done = new AtomicBoolean(false); protected final AtomicBoolean exiting = new AtomicBoolean(false); private final AtomicBoolean shutdown = new AtomicBoolean(false); private CountDownLatch initialized = new CountDownLatch(1); private boolean localInitialized = false; private BlockingQueue<Bundle> queue; private PageDB<SimpleMark> markDB; private BundleField injectSourceField; private File markDirFile; private int magicMarksNumber = 42; private boolean useSimpleMarks = false; private boolean useLegacyStreamPath = false; private StreamFileSource source; private final Object nextSourceLock = new Object(); public AbstractStreamFileDataSource() { Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { exiting.set(true); shutdown(); } }); } public File getMarkDirFile() { return markDirFile; } public void setSource(StreamFileSource source) { AbstractStreamFileDataSource.this.source = source; } protected abstract PersistentStreamFileSource getSource(); protected void pushTermBundle() { if (queue != null) { boolean setDone = done.compareAndSet(false, true); boolean pushedTerm = queue.offer(termBundle); log.info("initiating shutdown(). setDone={} done={} preOpened={} exiting={} pushedTerm={} queue={} moreData={}", setDone, done.get(), preOpened.size(), exiting.get(), pushedTerm, queue.size(), hadMoreData()); } } protected void closePreOpenedQueue() { for (Wrap wrap : preOpened) { try { wrap.close(); } catch (IOException e) { log.warn("", e); } } } protected void closeMarkDB() { if (markDB != null) { markDB.close(); } else { log.warn("markdb was null, and was not closed"); } } @Override protected void open(TaskRunConfig config) { if (legacyMode != null) { magicMarksNumber = 0; useSimpleMarks = true; if (legacyMode.startsWith("stream")) { log.info("Using legacy mode for 'stream2' marks"); useLegacyStreamPath = true; } else { log.info("Using legacy mode for 'mesh' marks"); } } try { if (ignoreMarkDir) { File md = new File(markDir); if (md.exists()) { FileUtils.deleteDirectory(md); log.warn("Deleted file : {}", md); } } markDirFile = Files.initDirectory(markDir); if (useSimpleMarks) { markDB = new PageDB<>(markDirFile, SimpleMark.class, MARK_PAGE_SIZE, MARK_PAGES); } else { markDB = new PageDB<SimpleMark>(markDirFile, Mark.class, MARK_PAGE_SIZE, MARK_PAGES); } } catch (Exception e) { throw new RuntimeException(e); } if (injectSourceName != null) { injectSourceField = bundleFormat.getField(injectSourceName); } if (shardTotal == null || shardTotal == 0) { shardTotal = config.nodeCount; } if (shards == null) { shards = config.calcShardList(shardTotal); } source = getSource(); PersistentStreamFileSource persistentStreamFileSource = null; if (source != null) { persistentStreamFileSource = (PersistentStreamFileSource) source; } if (!processAllData && !hash && !(persistentStreamFileSource != null && persistentStreamFileSource.hasMod())) { log.error("possible source misconfiguration. lacks both 'hash' and '{{mod}}'. fix or set processAllData:true"); throw new RuntimeException("Possible Source Misconfiguration"); } try { if (persistentStreamFileSource != null) { persistentStreamFileSource.init(getMarkDirFile(), shards); } if (filter != null) { setSource(new StreamSourceFiltered(source, filter)); } if (hash) { setSource(new StreamSourceHashed(source, shards, shardTotal, useLegacyStreamPath)); } log.info("buffering[capacity={};workers={};preopen={};marks={};maxSkip={};shards={}]", buffer, workers, preOpen, markDir, skipSourceExit, Strings.join(shards, ",")); } catch (Exception e) { throw new RuntimeException(e); } queue = new ArrayBlockingQueue<>(buffer); if (workers == 0) { log.error("Either we failed to find any meshy sources or workers was set to 0. Shutting down."); shutdown(); } runningThreadCountDownLatch = new CountDownLatch(workers); int workerId = workers; while (workerId-- > 0) { workerThreadPool.execute(sourceWorker); } } protected void shutdownBody() { // shutdown adds termBundle to queue pushTermBundle(); if (runningThreadCountDownLatch != null) { boolean success = false; log.info("Waiting up to {} seconds for outstanding threads to complete.", DEFAULT_COUNTDOWN_LATCH_TIMEOUT); try { success = runningThreadCountDownLatch.await(DEFAULT_COUNTDOWN_LATCH_TIMEOUT, TimeUnit.SECONDS); } catch (InterruptedException e) { log.warn("", e); } if (success) { log.info("All threads have finished."); } else { log.info("All threads did NOT finish."); } } log.debug("closing wrappers"); closePreOpenedQueue(); log.debug("shutting down mesh"); //we may overwrite the local source variable and in doing so throw away the Persistance flag PersistentStreamFileSource baseSource = getSource(); if (baseSource != null) { try { baseSource.shutdown(); } catch (IOException e) { log.warn("", e); } } else { log.warn("getSource() returned null and no source was shutdown"); } closeMarkDB(); log.info(fileStatsToString("shutdown complete")); } protected void shutdown() { if (!shutdown.getAndSet(true)) { /** * The body of the shutdown method has been moved into its own * method for testing purposes. Tests which must wait until the * shutdown has completed will override the shutdownBody() method * and place synchronization constructs at the end of the overridden * method. */ shutdownBody(); } } protected class Wrap { final DBKey dbKey; final StreamFile stream; final ValueString sourceName; InputStream input; Bundleizer bundleizer; boolean closed; SimpleMark mark; Wrap(StreamFile stream) throws IOException { fileSizeHisto.update(stream.length()); this.stream = stream; String keyString = stream.getPath(); if (useLegacyStreamPath && keyString.charAt(0) == '/') { keyString = keyString.substring(1); } this.dbKey = new DBKey(magicMarksNumber, keyString); this.sourceName = ValueFactory.create(stream.getPath()); mark = markDB.get(dbKey); String stateValue = Mark.calcValue(stream); if (mark == null) { if (useSimpleMarks) { mark = new SimpleMark().set(stateValue, 0); } else { mark = new Mark().set(stateValue, 0); } log.debug("mark.open {} / {}", mark, stream); opening.inc(); input = stream.getInputStream(); //blocks waiting for network opening.dec(); bundleizer = format.createBundleizer(input, AbstractStreamFileDataSource.this); openNew.inc(); } else { if (mark.getValue().equals(stateValue) && mark.isEnd()) { log.debug("mark.skip {} / {}", mark, stream); openSkip.inc(); if (skipSourceExit > 0) { consecutiveFileSkip.incrementAndGet(); } closed = true; return; } else { openIndex.inc(); if (skipSourceExit > 0) { consecutiveFileSkip.set(0); } } opening.inc(); input = stream.getInputStream(); //blocks waiting for network opening.dec(); bundleizer = format.createBundleizer(input, AbstractStreamFileDataSource.this); long read = mark.getIndex(); int bundlesSkipped = 0; skipping.inc(); while (read > 0) { if (++bundlesSkipped % 100 == 0) { int totalSkip = localBundleSkip.get() + bundlesSkipped; if ((totalSkip / 100) % 250 == 0) { log.info(Objects.toStringHelper(Thread.currentThread().getName() + " bundle skip log") .add("thread-skip", totalSkip) .add("file-skip", bundlesSkipped) .add("file-to-skip", read) .add("global-skip-estimate", bundlesSkipped + globalBundleSkip.count()) .toString()); localBundleSkip.set(totalSkip); globalBundleSkip.inc(bundlesSkipped); bundlesSkipped = 0; } } try { bundleizer.next(); read--; } catch (Exception e) { log.warn("", e); } } skipping.dec(); localBundleSkip.set(localBundleSkip.get() + bundlesSkipped); globalBundleSkip.inc(bundlesSkipped); log.debug("mark.indx {} / {}", mark, stream); } reading.inc(); } void close() throws IOException { if (!closed) { input.close(); mark.update(stream); markDB.put(dbKey, mark); log.debug("mark.save {}:{} / {}", dbKey, mark, stream); closed = true; reading.dec(); } } Bundle next() throws IOException { if (closed) { log.debug("next {} / {} CLOSED returns null", mark, stream); return null; } Bundle next = null; try { next = bundleizer.next(); log.debug("next {} / {} = {}", mark, stream, next); if (next == null) { mark.setEnd(true); close(); } else { mark.setIndex(mark.getIndex() + 1); if (injectSourceField != null) { next.setValue(injectSourceField, sourceName); } } } catch (Exception ex) { log.info("error {} / {}", mark, stream, ex); mark.setError(mark.getError() + 1); close(); } return next; } } private Wrap nextWrappedSource() throws IOException { StreamFile stream; synchronized (nextSourceLock) { stream = source.nextSource(); } if (stream == null) { return null; } return new Wrap(stream); } private boolean multiFill(Wrap wrap, int fillCount) throws Exception { for (int i = 0; i < fillCount; i++) { Bundle next = wrap.next(); if (next == null) //is source exhausted? { return false; } // looks like we can drop a bundle on done == true while (!queue.offer(next, 1, TimeUnit.SECONDS) && !done.get()) { } if (jmxMetrics) { queueSizeHisto.update(queue.size()); } // may get called multiple times but only first call matters if (!localInitialized) { initialized.countDown(); localInitialized = true; } } return true; } private void fill(int threadID) { Wrap wrap = null; try { while (!done.get() && !exiting.get()) { wrap = preOpened.poll(); //take if immediately available if (wrap == null) { return; //exits worker thread } if (!multiFill(wrap, multiBundleReads)) { wrap = nextWrappedSource(); } if (wrap != null) //May be null from nextWrappedSource -> decreases size of preOpened { preOpened.put(wrap); } wrap = null; } log.debug("[{}] read", threadID); } catch (Exception ex) { log.warn("", ex); } finally { if (wrap != null) { try { wrap.close(); } catch (Exception ex) { log.warn("", ex); } } } } @Override public Bundle next() throws DataChannelError { if (skipSourceExit > 0 && consecutiveFileSkip.get() >= skipSourceExit) { throw new DataChannelError("skipped too many sources: " + skipSourceExit + ". please check your job config."); } try { int countdown = POLL_COUNTDOWN; while ((localInitialized || waitForInitialized()) && POLL_COUNTDOWN == 0 || countdown-- > 0) { long startTime = jmxMetrics ? System.currentTimeMillis() : 0; Bundle next = queue.poll(POLL_INTERVAL, TimeUnit.MILLISECONDS); if (jmxMetrics) { readTimer.update(System.currentTimeMillis() - startTime, TimeUnit.MILLISECONDS); } if (next == termBundle) { // re-add TERM bundle in case there are more consumers if (!queue.offer(next)) { log.info("next offer TERM fail. queue={}", queue.size()); } return null; } else if (next != null) { return next; } if (done.get()) { return null; } if (POLL_COUNTDOWN > 0) { log.info("next polled null, retrying {} more times. done={} exiting={}", countdown, done.get(), exiting.get()); } log.info(fileStatsToString("null poll ")); } if (countdown < 0) { log.info("exit with no data during poll countdown"); } return null; } catch (Exception ex) { throw new DataChannelError(ex); } } private String fileStatsToString(String reason) { return Objects.toStringHelper(reason) .add("reading", reading.count()) .add("opening", opening.count()) .add("unseen", openNew.count()) .add("continued", openIndex.count()) .add("skipping", skipping.count()) .add("skipped", openSkip.count()) .add("bundles-skipped", globalBundleSkip.count()) .add("median-size", fileSizeHisto.getSnapshot().getMedian()) .toString(); } @Override public Bundle peek() throws DataChannelError { if (localInitialized || waitForInitialized()) { try { Bundle peek = queue.peek(); return peek == termBundle ? null : peek; } catch (Exception ex) { throw new DataChannelError(ex); } } return null; } private boolean waitForInitialized() { try { while (!localInitialized && !initialized.await(3, TimeUnit.SECONDS) && !done.get()) { log.info(fileStatsToString("waiting for initialization")); } log.info(fileStatsToString("initialized")); localInitialized = true; return true; } catch (InterruptedException ignored) { log.info("interrupted while waiting for initialization to be true"); return false; } } @Override public void close() { if (log.isDebugEnabled() || done.get()) { log.info("close() called. done={} queue={}", done, queue.size()); } done.set(true); } @Override public Bundle createBundle() { return new ListBundle(bundleFormat); } /** * chops inputstreams into bundles */ public interface Bundleizer { public Bundle next() throws Exception; } /** * Specifies the conversion into bundles (this is specific to mesh2). * <p>The following factories are available: * <ul> * <li>{@link ChannelBundleizer channel}</li> * <li>{@link com.addthis.hydra.task.source.bundleizer.ColumnBundleizer column}</li> * <li>{@link com.addthis.hydra.task.source.bundleizer.KVBundleizer kv}</li> * </ul> * * @user-reference */ @Codec.Set(classMapFactory = BundleizerFactory.CMAP.class) public abstract static class BundleizerFactory implements Codec.Codable { private static Codec.ClassMap cmap = new Codec.ClassMap() { @Override public String getClassField() { return "type"; } }; public static class CMAP implements Codec.ClassMapFactory { public Codec.ClassMap getClassMap() { return cmap; } } static { PluginReader.registerPlugin("-stream-bundleizer.classmap", cmap, BundleizerFactory.class); } @SuppressWarnings("unused") public static void registerBundleizer(String name, Class<? extends BundleizerFactory> clazz) { cmap.add(name, clazz); } public abstract Bundleizer createBundleizer(InputStream input, BundleFactory factory); } private class SourceWorker implements Runnable { private final AtomicInteger generateThreadIDs = new AtomicInteger(0); @Override public void run() { int threadID = generateThreadIDs.incrementAndGet(); log.debug("worker {} starting", threadID); try { // preopen a number of sources int preOpenSize = Math.max(1, preOpen / workers); for (int i = 0; i < preOpenSize; i++) { Wrap preOpenedWrap = nextWrappedSource(); log.debug("pre-open {}", preOpenedWrap); if (preOpenedWrap == null) { break; } preOpened.put(preOpenedWrap); } //fill already has a while loop that checks done fill(threadID); } catch (Exception e) { log.warn("Exception while running data source meshy worker thread.", e); } finally { log.debug("worker {} exiting done={}", threadID, done); runningThreadCountDownLatch.countDown(); /** * This expression can evaluate to true more than once. * It is OK because the shutdown() method has a guard * to ensure it is invoked at most once. */ if (runningThreadCountDownLatch.getCount() == 0) { log.info("No more workers are running. One or more threads will attempt to call shutdown."); shutdown(); } } } } }
hydra-task/src/main/java/com/addthis/hydra/task/source/AbstractStreamFileDataSource.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.addthis.hydra.task.source; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.concurrent.ArrayBlockingQueue; import java.util.concurrent.BlockingQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import com.addthis.basis.util.Files; import com.addthis.basis.util.Parameter; import com.addthis.basis.util.Strings; import com.addthis.bundle.channel.DataChannelError; import com.addthis.bundle.core.Bundle; import com.addthis.bundle.core.BundleFactory; import com.addthis.bundle.core.BundleField; import com.addthis.bundle.core.list.ListBundle; import com.addthis.bundle.core.list.ListBundleFormat; import com.addthis.bundle.value.ValueFactory; import com.addthis.bundle.value.ValueString; import com.addthis.codec.Codec; import com.addthis.hydra.common.plugins.PluginReader; import com.addthis.hydra.data.filter.value.StringFilter; import com.addthis.hydra.store.db.DBKey; import com.addthis.hydra.store.db.PageDB; import com.addthis.hydra.task.run.TaskRunConfig; import com.addthis.hydra.task.source.bundleizer.ChannelBundleizer; import com.addthis.hydra.task.stream.PersistentStreamFileSource; import com.addthis.hydra.task.stream.StreamFile; import com.addthis.hydra.task.stream.StreamFileSource; import com.addthis.hydra.task.stream.StreamSourceFiltered; import com.addthis.hydra.task.stream.StreamSourceHashed; import com.google.common.base.Objects; import com.yammer.metrics.Metrics; import com.yammer.metrics.core.Counter; import com.yammer.metrics.core.Histogram; import com.yammer.metrics.core.Timer; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Abstract implementation of TaskDataSource * <p/> * There are many common features that streaming data source needs to deal with * and this class performs those tasks so that subclasses can leverage the common * functionality. * <p/> * Stream sources are normally consume data from remote services * that run on servers with unbalanced loads and operations against those remote * services are subject to network latency and bandwidth constraints. The goal * of this class is to work with all available sources concurrently, pre-opening * packets from those sources and queueing them as the packets become available. * This allows clients of this class to consume a steady stream of packets from * any available host and prevents blocking waits while waiting for slower hosts * to make data available. * <p/> * This class consumes bytes that need to be turned into objects consumers understand. * Configuration inputs instruct the class on what type of objects to turn the bytes * into on receipt. The {@link BundleizerFactory} is responsible for performing this * conversion. * <p/> * This class maintains persistent state tracking which source files have been * consumed. This enables the class to intelligently ignore upstream files that * have already been completely consumed. The data is maintained in a KV data * store using the source file name as the key. */ public abstract class AbstractStreamFileDataSource extends TaskDataSource implements BundleFactory, Runnable { private static final Logger log = LoggerFactory.getLogger(AbstractStreamFileDataSource.class); // note that some parameters have 'mesh' in the name. Leaving intact for backwards compatibility private static final int POLL_INTERVAL = Parameter.intValue("dataSourceMeshy2.pollInterval", 1000); private static final int POLL_COUNTDOWN = Parameter.intValue("dataSourceMeshy2.pollCountdown", 1800); // 30 minutes private static final int DEFAULT_PREOPEN = Parameter.intValue("dataSourceMeshy2.preopen", 1); private static final int DEFAULT_SKIP_SOURCE_EXIT = Parameter.intValue("dataSourceMeshy2.skipSourceExit", 0); private static final int MARK_PAGES = Parameter.intValue("source.meshy.mark.pages", 1000); private static final int MARK_PAGE_SIZE = Parameter.intValue("source.meshy.mark.pageSize", 20); private static final int DEFAULT_WORKERS = Parameter.intValue("dataSourceMeshy2.workers", 2); private static final int DEFAULT_BUFFER = Parameter.intValue("dataSourceMeshy2.buffer", 128); private static final int DEFAULT_COUNTDOWN_LATCH_TIMEOUT = Parameter.intValue("dataSourceMeshy2.timeout.sec", 30); /** * A StringFilter that processes file names as a string bundle field. If the filter returns * null (false), then the file is skipped. Otherwise, it uses whatever string the filter * returns. Anything that isn't a StringFilter throws a runtime error. */ @Codec.Set(codable = true) private StringFilter filter; /** * Specifies conversion to bundles. */ @Codec.Set(codable = true) private BundleizerFactory format = new ChannelBundleizer(); /** * Path to the mark directory. */ @Codec.Set(codable = true) private String markDir = "marks"; /** * Ignore the mark directory */ @Codec.Set(codable = true) private boolean ignoreMarkDir; /** * Enable metrics visible only from jmx */ @Codec.Set(codable = true) private boolean jmxMetrics; /** * Number of shards in the input source. */ @Codec.Set(codable = true) protected Integer shardTotal; /** * If specified then process only the shards specified in this array. */ @Codec.Set(codable = true) protected Integer shards[]; /** * If true then generate a hash of the filename input rather than use the {{mod}} field. Default is false. */ @Codec.Set(codable = true) protected boolean hash; /** * If true then allow all of the Hydra nodes to process all the data when * the hash field is false and the filename does not have {{mode}}. Default is false. */ @Codec.Set(codable = true) protected boolean processAllData; /** * If non-null, then inject the filename into the bundle field using this field name. Default is null. */ @Codec.Set(codable = true) private String injectSourceName; /** * Number of bundles to attempt to pull from a file before returning it to the * circular file queue. Difficult to understand without looking at the source code, * but if you feel the need for a lot of worker threads or are desperate for * potential performance gains you might try increasing this. 25 is a good place * to start, I think. The default is 1. */ @Codec.Set(codable = true) private int multiBundleReads = 1; /** * Number of bundles to fetch prior to starting the worker threads. * Default is either "dataSourceMeshy2.preopen" configuration value or 2. */ @Codec.Set(codable = true) private int preOpen = DEFAULT_PREOPEN; /** * Trigger an error when the number of skipped sources is greater than this value. * Default is either "dataSourceMeshy2.skipSourceExit" configuration value or 0. */ @Codec.Set(codable = true) private int skipSourceExit = DEFAULT_SKIP_SOURCE_EXIT; /** * Maximum size of the queue that stores bundles prior to their processing. * Default is either "dataSourceMeshy2.buffer" configuration value or 128. */ @Codec.Set(codable = true) private int buffer = DEFAULT_BUFFER; /** * Number of worker threads that request data from the meshy source. * Default is either "dataSourceMeshy2.workers" configuration value or 2. */ @Codec.Set(codable = true) private int workers = DEFAULT_WORKERS; /** * Set to enable marks compatibility mode with older source types. eg. 'mesh' for mesh1 and * 'stream' for stream2. 'stream2' is also fine. Do not set to anything unless doing an in-place * conversion of an existing job with an older source type. It won't be damaging for new jobs, but * it would be nice to eventually be able to drop support entirely. If cloning a job with this set, * then please remove it from the clone (before running). * <p/> * In more detail: Any non-null value will use legacy marks and anything beginning with * 'stream' will trim the starting '/' in a mesh path. */ @Codec.Set(codable = true) private String legacyMode; private final ListBundleFormat bundleFormat = new ListBundleFormat(); private final Bundle termBundle = new ListBundle(bundleFormat); /* metrics */ private Histogram queueSizeHisto = Metrics.newHistogram(getClass(), "queueSizeHisto"); private Histogram fileSizeHisto = Metrics.newHistogram(getClass(), "fileSizeHisto"); private Timer readTimer = Metrics.newTimer(getClass(), "readTimer", TimeUnit.MILLISECONDS, TimeUnit.SECONDS); private final Counter openNew = Metrics.newCounter(getClass(), "openNew"); private final Counter openIndex = Metrics.newCounter(getClass(), "openIndex"); private final Counter openSkip = Metrics.newCounter(getClass(), "openSkip"); private final Counter skipping = Metrics.newCounter(getClass(), "skipping"); private final Counter reading = Metrics.newCounter(getClass(), "reading"); private final Counter opening = Metrics.newCounter(getClass(), "opening"); // Concurrency provisions private final AtomicInteger generateThreadIDs = new AtomicInteger(0); private CountDownLatch runningThreadCountDownLatch = null; private final Counter globalBundleSkip = Metrics.newCounter(getClass(), "globalBundleSkip"); private final AtomicInteger consecutiveFileSkip = new AtomicInteger(); private final ThreadLocal<Integer> localBundleSkip = new ThreadLocal<Integer>() { @Override protected Integer initialValue() { return 0; } }; // State control private final LinkedBlockingQueue<Wrap> preOpened = new LinkedBlockingQueue<>(); protected final AtomicBoolean done = new AtomicBoolean(false); protected final AtomicBoolean exiting = new AtomicBoolean(false); private final AtomicBoolean shutdown = new AtomicBoolean(false); private CountDownLatch initialized = new CountDownLatch(1); private boolean localInitialized = false; private BlockingQueue<Bundle> queue; private PageDB<SimpleMark> markDB; private BundleField injectSourceField; private File markDirFile; private int magicMarksNumber = 42; private boolean useSimpleMarks = false; private boolean useLegacyStreamPath = false; private StreamFileSource source; private final Object nextSourceLock = new Object(); public AbstractStreamFileDataSource() { Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { exiting.set(true); shutdown(); } }); } public File getMarkDirFile() { return markDirFile; } public void setSource(StreamFileSource source) { this.source = source; } protected abstract PersistentStreamFileSource getSource(); protected void pushTermBundle() { if (queue != null) { boolean setDone = done.compareAndSet(false, true); boolean pushedTerm = queue.offer(termBundle); log.info("initiating shutdown(). setDone={} done={} preOpened={} exiting={} pushedTerm={} queue={} moreData={}", setDone, done.get(), preOpened.size(), exiting.get(), pushedTerm, queue.size(), hadMoreData()); } } protected void closePreOpenedQueue() { for (Wrap wrap : preOpened) { try { wrap.close(); } catch (IOException e) { log.warn("", e); } } } protected void closeMarkDB() { if (markDB != null) { markDB.close(); } else { log.warn("markdb was null, and was not closed"); } } @Override protected void open(TaskRunConfig config) { if (legacyMode != null) { magicMarksNumber = 0; useSimpleMarks = true; if (legacyMode.startsWith("stream")) { log.info("Using legacy mode for 'stream2' marks"); useLegacyStreamPath = true; } else { log.info("Using legacy mode for 'mesh' marks"); } } try { if (ignoreMarkDir) { File md = new File(markDir); if (md.exists()) { FileUtils.deleteDirectory(md); log.warn("Deleted file : {}", md); } } markDirFile = Files.initDirectory(markDir); if (useSimpleMarks) { markDB = new PageDB<>(markDirFile, SimpleMark.class, MARK_PAGE_SIZE, MARK_PAGES); } else { markDB = new PageDB<SimpleMark>(markDirFile, Mark.class, MARK_PAGE_SIZE, MARK_PAGES); } } catch (Exception e) { throw new RuntimeException(e); } if (injectSourceName != null) { injectSourceField = bundleFormat.getField(injectSourceName); } if (shardTotal == null || shardTotal == 0) { shardTotal = config.nodeCount; } if (shards == null) { shards = config.calcShardList(shardTotal); } source = getSource(); PersistentStreamFileSource persistentStreamFileSource = null; if (source != null) { persistentStreamFileSource = (PersistentStreamFileSource) source; } if (!processAllData && !hash && !(persistentStreamFileSource != null && persistentStreamFileSource.hasMod())) { log.error("possible source misconfiguration. lacks both 'hash' and '{{mod}}'. fix or set processAllData:true"); throw new RuntimeException("Possible Source Misconfiguration"); } try { if (persistentStreamFileSource != null) { persistentStreamFileSource.init(getMarkDirFile(), shards); } if (filter != null) { setSource(new StreamSourceFiltered(source, filter)); } if (hash) { setSource(new StreamSourceHashed(source, shards, shardTotal, useLegacyStreamPath)); } log.info("buffering[capacity={};workers={};preopen={};marks={};maxSkip={};shards={}]", buffer, workers, preOpen, markDir, skipSourceExit, Strings.join(shards, ",")); } catch (Exception e) { throw new RuntimeException(e); } queue = new ArrayBlockingQueue<>(buffer); if (workers == 0) { log.error("Either we failed to find any meshy sources or workers was set to 0. Shutting down."); shutdown(); } runningThreadCountDownLatch = new CountDownLatch(workers); int workerId = workers; while (workerId-- > 0) { new Thread(this, "DataSourceMeshyWorker-" + workerId).start(); } } protected void shutdownBody() { // shutdown adds termBundle to queue pushTermBundle(); if (runningThreadCountDownLatch != null) { boolean success = false; log.info("Waiting up to {} seconds for outstanding threads to complete.", DEFAULT_COUNTDOWN_LATCH_TIMEOUT); try { success = runningThreadCountDownLatch.await(DEFAULT_COUNTDOWN_LATCH_TIMEOUT, TimeUnit.SECONDS); } catch (InterruptedException e) { log.warn("", e); } if (success) { log.info("All threads have finished."); } else { log.info("All threads did NOT finish."); } } log.debug("closing wrappers"); closePreOpenedQueue(); log.debug("shutting down mesh"); //we may overwrite the local source variable and in doing so throw away the Persistance flag PersistentStreamFileSource baseSource = getSource(); if (baseSource != null) { try { baseSource.shutdown(); } catch (IOException e) { log.warn("", e); } } else { log.warn("getSource() returned null and no source was shutdown"); } closeMarkDB(); log.info(fileStatsToString("shutdown complete")); } protected void shutdown() { if (!shutdown.getAndSet(true)) { /** * The body of the shutdown method has been moved into its own * method for testing purposes. Tests which must wait until the * shutdown has completed will override the shutdownBody() method * and place synchronization constructs at the end of the overridden * method. */ shutdownBody(); } } @Override public void run() { int threadID = generateThreadIDs.incrementAndGet(); log.debug("worker {} starting", threadID); try { // preopen a number of sources int preOpenSize = Math.max(1, preOpen / workers); for (int i = 0; i < preOpenSize; i++) { Wrap preOpenedWrap = nextWrappedSource(); log.debug("pre-open {}", preOpenedWrap); if (preOpenedWrap == null) { break; } preOpened.put(preOpenedWrap); } //fill already has a while loop that checks done fill(threadID); } catch (Exception e) { log.warn("Exception while running data source meshy worker thread.", e); } finally { log.debug("worker {} exiting done={}", threadID, done); runningThreadCountDownLatch.countDown(); /** * This expression can evaluate to true more than once. * It is OK because the shutdown() method has a guard * to ensure it is invoked at most once. */ if (runningThreadCountDownLatch.getCount() == 0) { log.info("No more workers are running. One or more threads will attempt to call shutdown."); shutdown(); } } } protected class Wrap { final DBKey dbKey; final StreamFile stream; final ValueString sourceName; InputStream input; Bundleizer bundleizer; boolean closed; SimpleMark mark; Wrap(StreamFile stream) throws IOException { fileSizeHisto.update(stream.length()); this.stream = stream; String keyString = stream.getPath(); if (useLegacyStreamPath && keyString.charAt(0) == '/') { keyString = keyString.substring(1); } this.dbKey = new DBKey(magicMarksNumber, keyString); this.sourceName = ValueFactory.create(stream.getPath()); mark = markDB.get(dbKey); String stateValue = Mark.calcValue(stream); if (mark == null) { if (useSimpleMarks) { mark = new SimpleMark().set(stateValue, 0); } else { mark = new Mark().set(stateValue, 0); } log.debug("mark.open {} / {}", mark, stream); opening.inc(); input = stream.getInputStream(); //blocks waiting for network opening.dec(); bundleizer = format.createBundleizer(input, AbstractStreamFileDataSource.this); openNew.inc(); } else { if (mark.getValue().equals(stateValue) && mark.isEnd()) { log.debug("mark.skip {} / {}", mark, stream); openSkip.inc(); if (skipSourceExit > 0) { consecutiveFileSkip.incrementAndGet(); } closed = true; return; } else { openIndex.inc(); if (skipSourceExit > 0) { consecutiveFileSkip.set(0); } } opening.inc(); input = stream.getInputStream(); //blocks waiting for network opening.dec(); bundleizer = format.createBundleizer(input, AbstractStreamFileDataSource.this); long read = mark.getIndex(); int bundlesSkipped = 0; skipping.inc(); while (read > 0) { if (++bundlesSkipped % 100 == 0) { int totalSkip = localBundleSkip.get() + bundlesSkipped; if ((totalSkip / 100) % 250 == 0) { log.info(Objects.toStringHelper(Thread.currentThread().getName() + " bundle skip log") .add("thread-skip", totalSkip) .add("file-skip", bundlesSkipped) .add("file-to-skip", read) .add("global-skip-estimate", bundlesSkipped + globalBundleSkip.count()) .toString()); localBundleSkip.set(totalSkip); globalBundleSkip.inc(bundlesSkipped); bundlesSkipped = 0; } } try { bundleizer.next(); read--; } catch (Exception e) { log.warn("", e); } } skipping.dec(); localBundleSkip.set(localBundleSkip.get() + bundlesSkipped); globalBundleSkip.inc(bundlesSkipped); log.debug("mark.indx {} / {}", mark, stream); } reading.inc(); } void close() throws IOException { if (!closed) { input.close(); mark.update(stream); markDB.put(dbKey, mark); log.debug("mark.save {}:{} / {}", dbKey, mark, stream); closed = true; reading.dec(); } } Bundle next() throws IOException { if (closed) { log.debug("next {} / {} CLOSED returns null", mark, stream); return null; } Bundle next = null; try { next = bundleizer.next(); log.debug("next {} / {} = {}", mark, stream, next); if (next == null) { mark.setEnd(true); close(); } else { mark.setIndex(mark.getIndex() + 1); if (injectSourceField != null) { next.setValue(injectSourceField, sourceName); } } } catch (Exception ex) { log.info("error {} / {}", mark, stream, ex); mark.setError(mark.getError() + 1); close(); } return next; } } private Wrap nextWrappedSource() throws IOException { StreamFile stream; synchronized (nextSourceLock) { stream = source.nextSource(); } if (stream == null) { return null; } return new Wrap(stream); } private boolean multiFill(Wrap wrap, int fillCount) throws Exception { for (int i = 0; i < fillCount; i++) { Bundle next = wrap.next(); if (next == null) //is source exhausted? { return false; } // looks like we can drop a bundle on done == true while (!queue.offer(next, 1, TimeUnit.SECONDS) && !done.get()) { } if (jmxMetrics) { queueSizeHisto.update(queue.size()); } // may get called multiple times but only first call matters if (!localInitialized) { initialized.countDown(); localInitialized = true; } } return true; } private void fill(int threadID) { Wrap wrap = null; try { while (!done.get() && !exiting.get()) { wrap = preOpened.poll(); //take if immediately available if (wrap == null) { return; //exits worker thread } if (!multiFill(wrap, multiBundleReads)) { wrap = nextWrappedSource(); } if (wrap != null) //May be null from nextWrappedSource -> decreases size of preOpened { preOpened.put(wrap); } wrap = null; } log.debug("[{}] read", threadID); } catch (Exception ex) { log.warn("", ex); } finally { if (wrap != null) { try { wrap.close(); } catch (Exception ex) { log.warn("", ex); } } } } @Override public Bundle next() throws DataChannelError { if (skipSourceExit > 0 && consecutiveFileSkip.get() >= skipSourceExit) { throw new DataChannelError("skipped too many sources: " + skipSourceExit + ". please check your job config."); } try { int countdown = POLL_COUNTDOWN; while ((localInitialized || waitForInitialized()) && POLL_COUNTDOWN == 0 || countdown-- > 0) { long startTime = jmxMetrics ? System.currentTimeMillis() : 0; Bundle next = queue.poll(POLL_INTERVAL, TimeUnit.MILLISECONDS); if (jmxMetrics) { readTimer.update(System.currentTimeMillis() - startTime, TimeUnit.MILLISECONDS); } if (next == termBundle) { // re-add TERM bundle in case there are more consumers if (!queue.offer(next)) { log.info("next offer TERM fail. queue={}", queue.size()); } return null; } else if (next != null) { return next; } if (done.get()) { return null; } if (POLL_COUNTDOWN > 0) { log.info("next polled null, retrying {} more times. done={} exiting={}", countdown, done.get(), exiting.get()); } log.info(fileStatsToString("null poll ")); } if (countdown < 0) { log.info("exit with no data during poll countdown"); } return null; } catch (Exception ex) { throw new DataChannelError(ex); } } private String fileStatsToString(String reason) { return Objects.toStringHelper(reason) .add("reading", reading.count()) .add("opening", opening.count()) .add("unseen", openNew.count()) .add("continued", openIndex.count()) .add("skipping", skipping.count()) .add("skipped", openSkip.count()) .add("bundles-skipped", globalBundleSkip.count()) .add("median-size", fileSizeHisto.getSnapshot().getMedian()) .toString(); } @Override public Bundle peek() throws DataChannelError { if (localInitialized || waitForInitialized()) { try { Bundle peek = queue.peek(); return peek == termBundle ? null : peek; } catch (Exception ex) { throw new DataChannelError(ex); } } return null; } private boolean waitForInitialized() { try { while (!localInitialized && !initialized.await(3, TimeUnit.SECONDS) && !done.get()) { log.info(fileStatsToString("waiting for initialization")); } log.info(fileStatsToString("initialized")); localInitialized = true; return true; } catch (InterruptedException ignored) { log.info("interrupted while waiting for initialization to be true"); return false; } } @Override public void close() { if (log.isDebugEnabled() || done.get()) { log.info("close() called. done={} queue={}", done, queue.size()); } done.set(true); } @Override public Bundle createBundle() { return new ListBundle(bundleFormat); } /** * chops inputstreams into bundles */ public interface Bundleizer { public Bundle next() throws Exception; } /** * Specifies the conversion into bundles (this is specific to mesh2). * <p>The following factories are available: * <ul> * <li>{@link ChannelBundleizer channel}</li> * <li>{@link com.addthis.hydra.task.source.bundleizer.ColumnBundleizer column}</li> * <li>{@link com.addthis.hydra.task.source.bundleizer.KVBundleizer kv}</li> * </ul> * * @user-reference */ @Codec.Set(classMapFactory = BundleizerFactory.CMAP.class) public abstract static class BundleizerFactory implements Codec.Codable { private static Codec.ClassMap cmap = new Codec.ClassMap() { @Override public String getClassField() { return "type"; } }; public static class CMAP implements Codec.ClassMapFactory { public Codec.ClassMap getClassMap() { return cmap; } } static { PluginReader.registerPlugin("-stream-bundleizer.classmap", cmap, BundleizerFactory.class); } @SuppressWarnings("unused") public static void registerBundleizer(String name, Class<? extends BundleizerFactory> clazz) { cmap.add(name, clazz); } public abstract Bundleizer createBundleizer(InputStream input, BundleFactory factory); } }
imperfect thread refactor in asfds better than nothing and tired of it sitting in my git status
hydra-task/src/main/java/com/addthis/hydra/task/source/AbstractStreamFileDataSource.java
imperfect thread refactor in asfds better than nothing and tired of it sitting in my git status
<ide><path>ydra-task/src/main/java/com/addthis/hydra/task/source/AbstractStreamFileDataSource.java <ide> import java.util.concurrent.ArrayBlockingQueue; <ide> import java.util.concurrent.BlockingQueue; <ide> import java.util.concurrent.CountDownLatch; <add>import java.util.concurrent.ExecutorService; <add>import java.util.concurrent.Executors; <ide> import java.util.concurrent.LinkedBlockingQueue; <ide> import java.util.concurrent.TimeUnit; <ide> import java.util.concurrent.atomic.AtomicBoolean; <ide> import com.addthis.hydra.task.stream.StreamSourceHashed; <ide> <ide> import com.google.common.base.Objects; <add>import com.google.common.util.concurrent.ThreadFactoryBuilder; <ide> <ide> import com.yammer.metrics.Metrics; <ide> import com.yammer.metrics.core.Counter; <ide> * have already been completely consumed. The data is maintained in a KV data <ide> * store using the source file name as the key. <ide> */ <del>public abstract class AbstractStreamFileDataSource extends TaskDataSource implements BundleFactory, Runnable { <add>public abstract class AbstractStreamFileDataSource extends TaskDataSource implements BundleFactory { <ide> <ide> private static final Logger log = LoggerFactory.getLogger(AbstractStreamFileDataSource.class); <ide> <ide> <ide> private final ListBundleFormat bundleFormat = new ListBundleFormat(); <ide> private final Bundle termBundle = new ListBundle(bundleFormat); <add> private final SourceWorker sourceWorker = new SourceWorker(); <add> private final ExecutorService workerThreadPool = <add> Executors.newCachedThreadPool(new ThreadFactoryBuilder().setNameFormat("streamSourceWorker-%d").build()); <ide> <ide> /* metrics */ <ide> private Histogram queueSizeHisto = Metrics.newHistogram(getClass(), "queueSizeHisto"); <ide> private final Counter opening = Metrics.newCounter(getClass(), "opening"); <ide> <ide> // Concurrency provisions <del> private final AtomicInteger generateThreadIDs = new AtomicInteger(0); <ide> private CountDownLatch runningThreadCountDownLatch = null; <ide> private final Counter globalBundleSkip = Metrics.newCounter(getClass(), "globalBundleSkip"); <ide> private final AtomicInteger consecutiveFileSkip = new AtomicInteger(); <ide> } <ide> <ide> public void setSource(StreamFileSource source) { <del> this.source = source; <add> AbstractStreamFileDataSource.this.source = source; <ide> } <ide> <ide> protected abstract PersistentStreamFileSource getSource(); <ide> runningThreadCountDownLatch = new CountDownLatch(workers); <ide> int workerId = workers; <ide> while (workerId-- > 0) { <del> new Thread(this, "DataSourceMeshyWorker-" + workerId).start(); <add> workerThreadPool.execute(sourceWorker); <ide> } <ide> } <ide> <ide> * method. <ide> */ <ide> shutdownBody(); <del> } <del> } <del> <del> @Override <del> public void run() { <del> int threadID = generateThreadIDs.incrementAndGet(); <del> log.debug("worker {} starting", threadID); <del> try { <del> // preopen a number of sources <del> int preOpenSize = Math.max(1, preOpen / workers); <del> for (int i = 0; i < preOpenSize; i++) { <del> Wrap preOpenedWrap = nextWrappedSource(); <del> log.debug("pre-open {}", preOpenedWrap); <del> if (preOpenedWrap == null) { <del> break; <del> } <del> preOpened.put(preOpenedWrap); <del> } <del> <del> //fill already has a while loop that checks done <del> fill(threadID); <del> } catch (Exception e) { <del> log.warn("Exception while running data source meshy worker thread.", e); <del> } finally { <del> log.debug("worker {} exiting done={}", threadID, done); <del> runningThreadCountDownLatch.countDown(); <del> <del> /** <del> * This expression can evaluate to true more than once. <del> * It is OK because the shutdown() method has a guard <del> * to ensure it is invoked at most once. <del> */ <del> if (runningThreadCountDownLatch.getCount() == 0) { <del> log.info("No more workers are running. One or more threads will attempt to call shutdown."); <del> shutdown(); <del> } <ide> } <ide> } <ide> <ide> public abstract Bundleizer createBundleizer(InputStream input, BundleFactory factory); <ide> } <ide> <add> private class SourceWorker implements Runnable { <add> <add> private final AtomicInteger generateThreadIDs = new AtomicInteger(0); <add> <add> @Override <add> public void run() { <add> int threadID = generateThreadIDs.incrementAndGet(); <add> log.debug("worker {} starting", threadID); <add> try { <add> // preopen a number of sources <add> int preOpenSize = Math.max(1, preOpen / workers); <add> for (int i = 0; i < preOpenSize; i++) { <add> Wrap preOpenedWrap = nextWrappedSource(); <add> log.debug("pre-open {}", preOpenedWrap); <add> if (preOpenedWrap == null) { <add> break; <add> } <add> preOpened.put(preOpenedWrap); <add> } <add> <add> //fill already has a while loop that checks done <add> fill(threadID); <add> } catch (Exception e) { <add> log.warn("Exception while running data source meshy worker thread.", e); <add> } finally { <add> log.debug("worker {} exiting done={}", threadID, done); <add> runningThreadCountDownLatch.countDown(); <add> <add> /** <add> * This expression can evaluate to true more than once. <add> * It is OK because the shutdown() method has a guard <add> * to ensure it is invoked at most once. <add> */ <add> if (runningThreadCountDownLatch.getCount() == 0) { <add> log.info("No more workers are running. One or more threads will attempt to call shutdown."); <add> shutdown(); <add> } <add> } <add> } <add> } <ide> }
Java
apache-2.0
b5e23452413e3f8f9d9449c116cbd554c6e257fe
0
masterproject-reimbursement/reimbursement-server,masterproject-reimbursement/reimbursement-server
package ch.uzh.csg.reimbursement.configuration; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.http.HttpMethod; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.authentication.configurers.GlobalAuthenticationConfigurerAdapter; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.web.csrf.CsrfFilter; import org.springframework.security.web.csrf.CsrfTokenRepository; import org.springframework.security.web.csrf.HttpSessionCsrfTokenRepository; import org.springframework.web.multipart.commons.CommonsMultipartResolver; import ch.uzh.csg.reimbursement.security.CsrfHeaderFilter; import ch.uzh.csg.reimbursement.security.FormLoginFailureHandler; import ch.uzh.csg.reimbursement.security.FormLoginSuccessHandler; import ch.uzh.csg.reimbursement.security.HttpAuthenticationEntryPoint; import ch.uzh.csg.reimbursement.security.HttpLogoutSuccessHandler; @Configuration @EnableWebSecurity @ComponentScan({ "ch.uzh.csg.reimbursement.security" }) public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter { @Autowired private FormLoginSuccessHandler authSuccessHandler; @Autowired private HttpAuthenticationEntryPoint authenticationEntryPoint; @Autowired private FormLoginFailureHandler authFailureHandler; @Autowired private HttpLogoutSuccessHandler logoutSuccessHandler; /* JSON - Object mapper for use in the authHandlers */ @Bean public MappingJackson2HttpMessageConverter mappingJackson2HttpMessageConverter() { return new MappingJackson2HttpMessageConverter(); } /* Enables File Upload through REST */ @Bean public CommonsMultipartResolver filterMultipartResolver() { CommonsMultipartResolver resolver = new CommonsMultipartResolver(); resolver.setMaxUploadSize(20000000); return resolver; } /* Token Repo for use with CsrfHeaderFilter */ private CsrfTokenRepository csrfTokenRepository() { HttpSessionCsrfTokenRepository repository = new HttpSessionCsrfTokenRepository(); repository.setHeaderName("X-XSRF-TOKEN"); return repository; } @Override protected void configure(HttpSecurity http) throws Exception { http.csrf() .csrfTokenRepository(csrfTokenRepository()) .and() .addFilterAfter(new CsrfHeaderFilter(), CsrfFilter.class); http.exceptionHandling() .authenticationEntryPoint(authenticationEntryPoint) .and().authorizeRequests() // allow front-end folders located in src/main/webapp/static .antMatchers("/static/**").permitAll() // allow CORS's options call on logout .antMatchers(HttpMethod.OPTIONS,"/api/logout").permitAll() // allow specific rest resources .antMatchers("/api/mobile/**").permitAll() .antMatchers("/api/expense/**").permitAll() //TODO Chrigi remove if not used anymore - also remove the csrfToken page from frontend .antMatchers("/testingpublic/**").permitAll() .antMatchers("/api-docs/**", "/swagger-ui/**").permitAll() // block everything else .anyRequest().fullyAuthenticated() .and() .formLogin().permitAll() .loginProcessingUrl("/api/login") .successHandler(authSuccessHandler) .failureHandler(authFailureHandler) .and() .logout().permitAll() .logoutUrl("/api/logout") .logoutSuccessHandler(logoutSuccessHandler) .and() .sessionManagement().maximumSessions(1); } @Configuration protected static class AuthenticationConfiguration extends GlobalAuthenticationConfigurerAdapter { @Override public void init(AuthenticationManagerBuilder auth) throws Exception { // TODO Chrigi remove if not used anymore // Howto link: https://github.com/spring-projects/spring-security-javaconfig/blob/master/spring-security-javaconfig/src/test/groovy/org/springframework/security/config/annotation/authentication/ldap/NamespaceLdapAuthenticationProviderTestsConfigs.java // auth. // ldapAuthentication() // // .userDnPattern only used for direct binding to the user -> userSearchFilter for searching // .userDnPatterns("uid={0}") // .contextSource() // .url("ldap://ldap.forumsys.com:389/dc=example,dc=com"); auth .ldapAuthentication() .userSearchFilter("uid={0}") .groupSearchBase("ou=Groups") .contextSource() .ldif("classpath:test-server.ldif") .root("dc=ifi,dc=uzh,dc=ch"); } } }
src/main/java/ch/uzh/csg/reimbursement/configuration/WebSecurityConfiguration.java
package ch.uzh.csg.reimbursement.configuration; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.Configuration; import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; import org.springframework.security.config.annotation.authentication.configurers.GlobalAuthenticationConfigurerAdapter; import org.springframework.security.config.annotation.web.builders.HttpSecurity; import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; import org.springframework.security.web.csrf.CsrfFilter; import org.springframework.security.web.csrf.CsrfTokenRepository; import org.springframework.security.web.csrf.HttpSessionCsrfTokenRepository; import org.springframework.web.multipart.commons.CommonsMultipartResolver; import ch.uzh.csg.reimbursement.security.CsrfHeaderFilter; import ch.uzh.csg.reimbursement.security.FormLoginFailureHandler; import ch.uzh.csg.reimbursement.security.FormLoginSuccessHandler; import ch.uzh.csg.reimbursement.security.HttpAuthenticationEntryPoint; import ch.uzh.csg.reimbursement.security.HttpLogoutSuccessHandler; @Configuration @EnableWebSecurity @ComponentScan({ "ch.uzh.csg.reimbursement.security" }) public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter { @Autowired private FormLoginSuccessHandler authSuccessHandler; @Autowired private HttpAuthenticationEntryPoint authenticationEntryPoint; @Autowired private FormLoginFailureHandler authFailureHandler; @Autowired private HttpLogoutSuccessHandler logoutSuccessHandler; /* JSON - Object mapper for use in the authHandlers */ @Bean public MappingJackson2HttpMessageConverter mappingJackson2HttpMessageConverter() { return new MappingJackson2HttpMessageConverter(); } /* Enables File Upload through REST */ @Bean public CommonsMultipartResolver filterMultipartResolver() { CommonsMultipartResolver resolver = new CommonsMultipartResolver(); resolver.setMaxUploadSize(20000000); return resolver; } /* Token Repo for use with CsrfHeaderFilter */ private CsrfTokenRepository csrfTokenRepository() { HttpSessionCsrfTokenRepository repository = new HttpSessionCsrfTokenRepository(); repository.setHeaderName("X-XSRF-TOKEN"); return repository; } @Override protected void configure(HttpSecurity http) throws Exception { http.csrf() // .disable(); .csrfTokenRepository(csrfTokenRepository()) .and() .addFilterAfter(new CsrfHeaderFilter(), CsrfFilter.class); http.exceptionHandling() .authenticationEntryPoint(authenticationEntryPoint) .and().authorizeRequests() // allow front-end folders located in src/main/webapp/static .antMatchers("/static/**").permitAll() // allow specific rest resources .antMatchers("/api/user/**").permitAll() .antMatchers("/api/expense/**").permitAll() //TODO Chrigi remove if not used anymore - also remove the csrfToken page from frontend .antMatchers("/testingpublic/**").permitAll() .antMatchers("/api-docs/**", "/swagger-ui/**").permitAll() // block everything else .anyRequest().fullyAuthenticated() .and() .formLogin().permitAll() .loginProcessingUrl("/api/login") .successHandler(authSuccessHandler) .failureHandler(authFailureHandler) .and() .logout().permitAll() .logoutUrl("/api/logout") .logoutSuccessHandler(logoutSuccessHandler) .and() .sessionManagement().maximumSessions(1); } @Configuration protected static class AuthenticationConfiguration extends GlobalAuthenticationConfigurerAdapter { @Override public void init(AuthenticationManagerBuilder auth) throws Exception { // TODO Chrigi remove if not used anymore // Howto link: https://github.com/spring-projects/spring-security-javaconfig/blob/master/spring-security-javaconfig/src/test/groovy/org/springframework/security/config/annotation/authentication/ldap/NamespaceLdapAuthenticationProviderTestsConfigs.java // auth. // ldapAuthentication() // // .userDnPattern only used for direct binding to the user -> userSearchFilter for searching // .userDnPatterns("uid={0}") // .contextSource() // .url("ldap://ldap.forumsys.com:389/dc=example,dc=com"); auth .ldapAuthentication() .userSearchFilter("uid={0}") .groupSearchBase("ou=Groups") .contextSource() .ldif("classpath:test-server.ldif") .root("dc=ifi,dc=uzh,dc=ch"); } } }
Added http.antMatcher to allow OPTIONS from POST in CORS scenarios
src/main/java/ch/uzh/csg/reimbursement/configuration/WebSecurityConfiguration.java
Added http.antMatcher to allow OPTIONS from POST in CORS scenarios
<ide><path>rc/main/java/ch/uzh/csg/reimbursement/configuration/WebSecurityConfiguration.java <ide> import org.springframework.context.annotation.Bean; <ide> import org.springframework.context.annotation.ComponentScan; <ide> import org.springframework.context.annotation.Configuration; <add>import org.springframework.http.HttpMethod; <ide> import org.springframework.http.converter.json.MappingJackson2HttpMessageConverter; <ide> import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; <ide> import org.springframework.security.config.annotation.authentication.configurers.GlobalAuthenticationConfigurerAdapter; <ide> @Override <ide> protected void configure(HttpSecurity http) throws Exception { <ide> http.csrf() <del> // .disable(); <ide> .csrfTokenRepository(csrfTokenRepository()) <ide> .and() <ide> .addFilterAfter(new CsrfHeaderFilter(), CsrfFilter.class); <ide> .and().authorizeRequests() <ide> // allow front-end folders located in src/main/webapp/static <ide> .antMatchers("/static/**").permitAll() <add> // allow CORS's options call on logout <add> .antMatchers(HttpMethod.OPTIONS,"/api/logout").permitAll() <ide> // allow specific rest resources <del> .antMatchers("/api/user/**").permitAll() <add> .antMatchers("/api/mobile/**").permitAll() <ide> .antMatchers("/api/expense/**").permitAll() <ide> //TODO Chrigi remove if not used anymore - also remove the csrfToken page from frontend <ide> .antMatchers("/testingpublic/**").permitAll()
Java
apache-2.0
ebcc37bd06386ff652c6794630a9494a11b5a2a0
0
sunjincheng121/flink,StephanEwen/incubator-flink,tillrohrmann/flink,darionyaphet/flink,clarkyzl/flink,GJL/flink,shaoxuan-wang/flink,tillrohrmann/flink,aljoscha/flink,zhangminglei/flink,tzulitai/flink,shaoxuan-wang/flink,twalthr/flink,kaibozhou/flink,apache/flink,hequn8128/flink,shaoxuan-wang/flink,tony810430/flink,lincoln-lil/flink,rmetzger/flink,bowenli86/flink,lincoln-lil/flink,jinglining/flink,jinglining/flink,aljoscha/flink,tillrohrmann/flink,tillrohrmann/flink,mylog00/flink,yew1eb/flink,mylog00/flink,tony810430/flink,wwjiang007/flink,lincoln-lil/flink,mylog00/flink,rmetzger/flink,mbode/flink,zentol/flink,fhueske/flink,lincoln-lil/flink,StephanEwen/incubator-flink,godfreyhe/flink,kl0u/flink,aljoscha/flink,xccui/flink,zentol/flink,mbode/flink,StephanEwen/incubator-flink,rmetzger/flink,shaoxuan-wang/flink,kl0u/flink,tillrohrmann/flink,ueshin/apache-flink,darionyaphet/flink,bowenli86/flink,zhangminglei/flink,bowenli86/flink,twalthr/flink,twalthr/flink,jinglining/flink,tzulitai/flink,gyfora/flink,greghogan/flink,hequn8128/flink,apache/flink,greghogan/flink,kaibozhou/flink,zhangminglei/flink,zjureel/flink,zhangminglei/flink,godfreyhe/flink,wwjiang007/flink,zentol/flink,GJL/flink,wwjiang007/flink,tony810430/flink,mbode/flink,wwjiang007/flink,xccui/flink,GJL/flink,apache/flink,mylog00/flink,tillrohrmann/flink,ueshin/apache-flink,kaibozhou/flink,godfreyhe/flink,apache/flink,hequn8128/flink,gyfora/flink,tzulitai/flink,ueshin/apache-flink,fhueske/flink,darionyaphet/flink,kl0u/flink,yew1eb/flink,StephanEwen/incubator-flink,jinglining/flink,wwjiang007/flink,kaibozhou/flink,StephanEwen/incubator-flink,sunjincheng121/flink,bowenli86/flink,godfreyhe/flink,clarkyzl/flink,aljoscha/flink,kl0u/flink,gyfora/flink,hequn8128/flink,kl0u/flink,godfreyhe/flink,rmetzger/flink,kaibozhou/flink,zjureel/flink,zjureel/flink,greghogan/flink,tzulitai/flink,tzulitai/flink,mylog00/flink,GJL/flink,zentol/flink,zentol/flink,twalthr/flink,lincoln-lil/flink,zjureel/flink,lincoln-lil/flink,wwjiang007/flink,GJL/flink,greghogan/flink,jinglining/flink,mbode/flink,kaibozhou/flink,xccui/flink,lincoln-lil/flink,darionyaphet/flink,aljoscha/flink,aljoscha/flink,clarkyzl/flink,godfreyhe/flink,zjureel/flink,gyfora/flink,clarkyzl/flink,tzulitai/flink,GJL/flink,fhueske/flink,xccui/flink,apache/flink,darionyaphet/flink,rmetzger/flink,zentol/flink,clarkyzl/flink,shaoxuan-wang/flink,yew1eb/flink,xccui/flink,gyfora/flink,mbode/flink,jinglining/flink,rmetzger/flink,fhueske/flink,shaoxuan-wang/flink,apache/flink,bowenli86/flink,zjureel/flink,tony810430/flink,xccui/flink,yew1eb/flink,twalthr/flink,yew1eb/flink,ueshin/apache-flink,fhueske/flink,gyfora/flink,tony810430/flink,zentol/flink,bowenli86/flink,sunjincheng121/flink,godfreyhe/flink,tillrohrmann/flink,tony810430/flink,fhueske/flink,zjureel/flink,wwjiang007/flink,xccui/flink,apache/flink,sunjincheng121/flink,hequn8128/flink,greghogan/flink,zhangminglei/flink,ueshin/apache-flink,rmetzger/flink,StephanEwen/incubator-flink,greghogan/flink,hequn8128/flink,kl0u/flink,gyfora/flink,tony810430/flink,twalthr/flink,sunjincheng121/flink,twalthr/flink,sunjincheng121/flink
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.yarn.cli; import org.apache.flink.client.cli.AbstractCustomCommandLine; import org.apache.flink.client.cli.CliArgsException; import org.apache.flink.client.cli.CliFrontend; import org.apache.flink.client.cli.CliFrontendParser; import org.apache.flink.client.deployment.ClusterSpecification; import org.apache.flink.client.program.ClusterClient; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.CoreOptions; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.configuration.TaskManagerOptions; import org.apache.flink.runtime.clusterframework.ApplicationStatus; import org.apache.flink.runtime.clusterframework.messages.GetClusterStatusResponse; import org.apache.flink.runtime.concurrent.ScheduledExecutorServiceAdapter; import org.apache.flink.runtime.security.SecurityConfiguration; import org.apache.flink.runtime.security.SecurityUtils; import org.apache.flink.runtime.util.LeaderConnectionInfo; import org.apache.flink.util.ExecutorUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.Preconditions; import org.apache.flink.yarn.AbstractYarnClusterDescriptor; import org.apache.flink.yarn.Flip6YarnClusterDescriptor; import org.apache.flink.yarn.YarnClusterDescriptor; import org.apache.flink.yarn.configuration.YarnConfigOptions; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import static org.apache.flink.configuration.HighAvailabilityOptions.HA_CLUSTER_ID; /** * Class handling the command line interface to the YARN session. */ public class FlinkYarnSessionCli extends AbstractCustomCommandLine<ApplicationId> { private static final Logger LOG = LoggerFactory.getLogger(FlinkYarnSessionCli.class); //------------------------------------ Constants ------------------------- public static final String CONFIG_FILE_LOGBACK_NAME = "logback.xml"; public static final String CONFIG_FILE_LOG4J_NAME = "log4j.properties"; private static final long CLIENT_POLLING_INTERVAL_MS = 3000L; /** The id for the CommandLine interface. */ private static final String ID = "yarn-cluster"; // YARN-session related constants private static final String YARN_PROPERTIES_FILE = ".yarn-properties-"; private static final String YARN_APPLICATION_ID_KEY = "applicationID"; private static final String YARN_PROPERTIES_PARALLELISM = "parallelism"; private static final String YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING = "dynamicPropertiesString"; private static final String YARN_DYNAMIC_PROPERTIES_SEPARATOR = "@@"; // this has to be a regex for String.split() private static final String YARN_SESSION_HELP = "Available commands:\n" + "help - show these commands\n" + "stop - stop the YARN session"; //------------------------------------ Command Line argument options ------------------------- // the prefix transformation is used by the CliFrontend static constructor. private final Option query; // --- or --- private final Option applicationId; // --- or --- private final Option queue; private final Option shipPath; private final Option flinkJar; private final Option jmMemory; private final Option tmMemory; private final Option container; private final Option slots; private final Option detached; private final Option zookeeperNamespace; private final Option help; /** * @deprecated Streaming mode has been deprecated without replacement. Set the * {@link TaskManagerOptions#MANAGED_MEMORY_PRE_ALLOCATE} configuration * key to true to get the previous batch mode behaviour. */ @Deprecated private final Option streaming; private final Option name; private final Options allOptions; /** * Dynamic properties allow the user to specify additional configuration values with -D, such as * <tt> -Dfs.overwrite-files=true -Dtaskmanager.network.memory.min=536346624</tt>. */ private final Option dynamicproperties; private final boolean acceptInteractiveInput; private final String configurationDirectory; private final Properties yarnPropertiesFile; private final ApplicationId yarnApplicationIdFromYarnProperties; private final String yarnPropertiesFileLocation; private final boolean flip6; private final YarnConfiguration yarnConfiguration; //------------------------------------ Internal fields ------------------------- private boolean detachedMode = false; public FlinkYarnSessionCli( Configuration configuration, String configurationDirectory, String shortPrefix, String longPrefix) throws FlinkException { this(configuration, configurationDirectory, shortPrefix, longPrefix, true); } public FlinkYarnSessionCli( Configuration configuration, String configurationDirectory, String shortPrefix, String longPrefix, boolean acceptInteractiveInput) throws FlinkException { super(configuration); this.configurationDirectory = Preconditions.checkNotNull(configurationDirectory); this.acceptInteractiveInput = acceptInteractiveInput; this.flip6 = configuration.getString(CoreOptions.MODE).equalsIgnoreCase(CoreOptions.FLIP6_MODE); // Create the command line options query = new Option(shortPrefix + "q", longPrefix + "query", false, "Display available YARN resources (memory, cores)"); applicationId = new Option(shortPrefix + "id", longPrefix + "applicationId", true, "Attach to running YARN session"); queue = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue."); shipPath = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)"); flinkJar = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file"); jmMemory = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]"); tmMemory = new Option(shortPrefix + "tm", longPrefix + "taskManagerMemory", true, "Memory per TaskManager Container [in MB]"); container = new Option(shortPrefix + "n", longPrefix + "container", true, "Number of YARN container to allocate (=Number of Task Managers)"); slots = new Option(shortPrefix + "s", longPrefix + "slots", true, "Number of slots per TaskManager"); dynamicproperties = Option.builder(shortPrefix + "D") .argName("property=value") .numberOfArgs(2) .valueSeparator() .desc("use value for given property") .build(); detached = new Option(shortPrefix + "d", longPrefix + "detached", false, "Start detached"); streaming = new Option(shortPrefix + "st", longPrefix + "streaming", false, "Start Flink in streaming mode"); name = new Option(shortPrefix + "nm", longPrefix + "name", true, "Set a custom name for the application on YARN"); zookeeperNamespace = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode"); help = new Option(shortPrefix + "h", longPrefix + "help", false, "Help for the Yarn session CLI."); allOptions = new Options(); allOptions.addOption(flinkJar); allOptions.addOption(jmMemory); allOptions.addOption(tmMemory); allOptions.addOption(container); allOptions.addOption(queue); allOptions.addOption(query); allOptions.addOption(shipPath); allOptions.addOption(slots); allOptions.addOption(dynamicproperties); allOptions.addOption(detached); allOptions.addOption(streaming); allOptions.addOption(name); allOptions.addOption(applicationId); allOptions.addOption(zookeeperNamespace); allOptions.addOption(help); // try loading a potential yarn properties file this.yarnPropertiesFileLocation = configuration.getString(YarnConfigOptions.PROPERTIES_FILE_LOCATION); final File yarnPropertiesLocation = getYarnPropertiesLocation(yarnPropertiesFileLocation); yarnPropertiesFile = new Properties(); if (yarnPropertiesLocation.exists()) { LOG.info("Found Yarn properties file under {}.", yarnPropertiesLocation.getAbsolutePath()); try (InputStream is = new FileInputStream(yarnPropertiesLocation)) { yarnPropertiesFile.load(is); } catch (IOException ioe) { throw new FlinkException("Could not read the Yarn properties file " + yarnPropertiesLocation + ". Please delete the file at " + yarnPropertiesLocation.getAbsolutePath() + '.', ioe); } final String yarnApplicationIdString = yarnPropertiesFile.getProperty(YARN_APPLICATION_ID_KEY); if (yarnApplicationIdString == null) { throw new FlinkException("Yarn properties file found but doesn't contain a " + "Yarn application id. Please delete the file at " + yarnPropertiesLocation.getAbsolutePath()); } try { // try converting id to ApplicationId yarnApplicationIdFromYarnProperties = ConverterUtils.toApplicationId(yarnApplicationIdString); } catch (Exception e) { throw new FlinkException("YARN properties contains an invalid entry for " + "application id: " + yarnApplicationIdString + ". Please delete the file at " + yarnPropertiesLocation.getAbsolutePath(), e); } } else { yarnApplicationIdFromYarnProperties = null; } this.yarnConfiguration = new YarnConfiguration(); } private AbstractYarnClusterDescriptor createDescriptor( Configuration configuration, YarnConfiguration yarnConfiguration, String configurationDirectory, String defaultApplicationName, CommandLine cmd) { AbstractYarnClusterDescriptor yarnClusterDescriptor = getClusterDescriptor( configuration, yarnConfiguration, configurationDirectory); // Jar Path final Path localJarPath; if (cmd.hasOption(flinkJar.getOpt())) { String userPath = cmd.getOptionValue(flinkJar.getOpt()); if (!userPath.startsWith("file://")) { userPath = "file://" + userPath; } localJarPath = new Path(userPath); } else { LOG.info("No path for the flink jar passed. Using the location of " + yarnClusterDescriptor.getClass() + " to locate the jar"); String encodedJarPath = yarnClusterDescriptor.getClass().getProtectionDomain().getCodeSource().getLocation().getPath(); final String decodedPath; try { // we have to decode the url encoded parts of the path decodedPath = URLDecoder.decode(encodedJarPath, Charset.defaultCharset().name()); } catch (UnsupportedEncodingException e) { throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath + " Please supply a path manually via the -" + flinkJar.getOpt() + " option."); } // check whether it's actually a jar file --> when testing we execute this class without a flink-dist jar if (decodedPath.endsWith(".jar")) { localJarPath = new Path(new File(decodedPath).toURI()); } else { localJarPath = null; } } if (localJarPath != null) { yarnClusterDescriptor.setLocalJarPath(localJarPath); } List<File> shipFiles = new ArrayList<>(); // path to directory to ship if (cmd.hasOption(shipPath.getOpt())) { String shipPath = cmd.getOptionValue(this.shipPath.getOpt()); File shipDir = new File(shipPath); if (shipDir.isDirectory()) { shipFiles.add(shipDir); } else { LOG.warn("Ship directory is not a directory. Ignoring it."); } } yarnClusterDescriptor.addShipFiles(shipFiles); // queue if (cmd.hasOption(queue.getOpt())) { yarnClusterDescriptor.setQueue(cmd.getOptionValue(queue.getOpt())); } final Properties properties = cmd.getOptionProperties(dynamicproperties.getOpt()); String[] dynamicProperties = properties.stringPropertyNames().stream() .flatMap( (String key) -> { final String value = properties.getProperty(key); if (value != null) { return Stream.of(key + dynamicproperties.getValueSeparator() + value); } else { return Stream.empty(); } }) .toArray(String[]::new); String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR); yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded); if (cmd.hasOption(detached.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) { this.detachedMode = true; yarnClusterDescriptor.setDetachedMode(true); } if (cmd.hasOption(name.getOpt())) { yarnClusterDescriptor.setName(cmd.getOptionValue(name.getOpt())); } else { // set the default application name, if none is specified if (defaultApplicationName != null) { yarnClusterDescriptor.setName(defaultApplicationName); } } if (cmd.hasOption(zookeeperNamespace.getOpt())) { String zookeeperNamespaceValue = cmd.getOptionValue(this.zookeeperNamespace.getOpt()); yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespaceValue); } return yarnClusterDescriptor; } private ClusterSpecification createClusterSpecification(Configuration configuration, CommandLine cmd) { if (!flip6 && !cmd.hasOption(container.getOpt())) { // number of containers is required option! LOG.error("Missing required argument {}", container.getOpt()); printUsage(); throw new IllegalArgumentException("Missing required argument " + container.getOpt()); } // TODO: The number of task manager should be deprecated soon final int numberTaskManagers; if (cmd.hasOption(container.getOpt())) { numberTaskManagers = Integer.valueOf(cmd.getOptionValue(container.getOpt())); } else { numberTaskManagers = 1; } // JobManager Memory final int jobManagerMemoryMB = configuration.getInteger(JobManagerOptions.JOB_MANAGER_HEAP_MEMORY); // Task Managers memory final int taskManagerMemoryMB = configuration.getInteger(TaskManagerOptions.TASK_MANAGER_HEAP_MEMORY); int slotsPerTaskManager = configuration.getInteger(TaskManagerOptions.NUM_TASK_SLOTS); return new ClusterSpecification.ClusterSpecificationBuilder() .setMasterMemoryMB(jobManagerMemoryMB) .setTaskManagerMemoryMB(taskManagerMemoryMB) .setNumberTaskManagers(numberTaskManagers) .setSlotsPerTaskManager(slotsPerTaskManager) .createClusterSpecification(); } private void printUsage() { System.out.println("Usage:"); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(200); formatter.setLeftPadding(5); formatter.setSyntaxPrefix(" Required"); Options req = new Options(); req.addOption(container); formatter.printHelp(" ", req); formatter.setSyntaxPrefix(" Optional"); Options options = new Options(); addGeneralOptions(options); addRunOptions(options); formatter.printHelp(" ", options); } @Override public boolean isActive(CommandLine commandLine) { String jobManagerOption = commandLine.getOptionValue(addressOption.getOpt(), null); boolean yarnJobManager = ID.equals(jobManagerOption); boolean yarnAppId = commandLine.hasOption(applicationId.getOpt()); return yarnJobManager || yarnAppId || (isYarnPropertiesFileMode(commandLine) && yarnApplicationIdFromYarnProperties != null); } @Override public String getId() { return ID; } @Override public void addRunOptions(Options baseOptions) { super.addRunOptions(baseOptions); for (Object option : allOptions.getOptions()) { baseOptions.addOption((Option) option); } } @Override public void addGeneralOptions(Options baseOptions) { super.addGeneralOptions(baseOptions); baseOptions.addOption(applicationId); } @Override public AbstractYarnClusterDescriptor createClusterDescriptor(CommandLine commandLine) throws FlinkException { final Configuration effectiveConfiguration = applyCommandLineOptionsToConfiguration(commandLine); return createDescriptor( effectiveConfiguration, yarnConfiguration, configurationDirectory, null, commandLine); } @Override @Nullable public ApplicationId getClusterId(CommandLine commandLine) { if (commandLine.hasOption(applicationId.getOpt())) { return ConverterUtils.toApplicationId(commandLine.getOptionValue(applicationId.getOpt())); } else if (isYarnPropertiesFileMode(commandLine)) { return yarnApplicationIdFromYarnProperties; } else { return null; } } @Override public ClusterSpecification getClusterSpecification(CommandLine commandLine) throws FlinkException { final Configuration effectiveConfiguration = applyCommandLineOptionsToConfiguration(commandLine); return createClusterSpecification(effectiveConfiguration, commandLine); } @Override protected Configuration applyCommandLineOptionsToConfiguration(CommandLine commandLine) throws FlinkException { // we ignore the addressOption because it can only contain "yarn-cluster" final Configuration effectiveConfiguration = new Configuration(configuration); if (commandLine.hasOption(zookeeperNamespaceOption.getOpt())) { String zkNamespace = commandLine.getOptionValue(zookeeperNamespaceOption.getOpt()); effectiveConfiguration.setString(HA_CLUSTER_ID, zkNamespace); } final ApplicationId applicationId = getClusterId(commandLine); if (applicationId != null) { final String zooKeeperNamespace; if (commandLine.hasOption(zookeeperNamespace.getOpt())){ zooKeeperNamespace = commandLine.getOptionValue(zookeeperNamespace.getOpt()); } else { zooKeeperNamespace = effectiveConfiguration.getString(HA_CLUSTER_ID, applicationId.toString()); } effectiveConfiguration.setString(HA_CLUSTER_ID, zooKeeperNamespace); } if (commandLine.hasOption(jmMemory.getOpt())) { effectiveConfiguration.setInteger(JobManagerOptions.JOB_MANAGER_HEAP_MEMORY, Integer.parseInt(commandLine.getOptionValue(jmMemory.getOpt()))); } if (commandLine.hasOption(tmMemory.getOpt())) { effectiveConfiguration.setInteger(TaskManagerOptions.TASK_MANAGER_HEAP_MEMORY, Integer.parseInt(commandLine.getOptionValue(tmMemory.getOpt()))); } if (commandLine.hasOption(slots.getOpt())) { effectiveConfiguration.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, Integer.parseInt(commandLine.getOptionValue(slots.getOpt()))); } if (isYarnPropertiesFileMode(commandLine)) { return applyYarnProperties(effectiveConfiguration); } else { return effectiveConfiguration; } } private boolean isYarnPropertiesFileMode(CommandLine commandLine) { boolean canApplyYarnProperties = !commandLine.hasOption(addressOption.getOpt()); for (Option option : commandLine.getOptions()) { if (allOptions.hasOption(option.getOpt())) { if (!option.getOpt().equals(detached.getOpt())) { // don't resume from properties file if yarn options have been specified canApplyYarnProperties = false; break; } } } return canApplyYarnProperties; } private Configuration applyYarnProperties(Configuration configuration) throws FlinkException { final Configuration effectiveConfiguration = new Configuration(configuration); // configure the default parallelism from YARN String propParallelism = yarnPropertiesFile.getProperty(YARN_PROPERTIES_PARALLELISM); if (propParallelism != null) { // maybe the property is not set try { int parallelism = Integer.parseInt(propParallelism); effectiveConfiguration.setInteger(CoreOptions.DEFAULT_PARALLELISM, parallelism); logAndSysout("YARN properties set default parallelism to " + parallelism); } catch (NumberFormatException e) { throw new FlinkException("Error while parsing the YARN properties: " + "Property " + YARN_PROPERTIES_PARALLELISM + " is not an integer.", e); } } // handle the YARN client's dynamic properties String dynamicPropertiesEncoded = yarnPropertiesFile.getProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING); Map<String, String> dynamicProperties = getDynamicProperties(dynamicPropertiesEncoded); for (Map.Entry<String, String> dynamicProperty : dynamicProperties.entrySet()) { effectiveConfiguration.setString(dynamicProperty.getKey(), dynamicProperty.getValue()); } return effectiveConfiguration; } public int run(String[] args) throws CliArgsException, FlinkException { // // Command Line Options // final CommandLine cmd = parseCommandLineOptions(args, true); if (cmd.hasOption(help.getOpt())) { printUsage(); return 0; } final AbstractYarnClusterDescriptor yarnClusterDescriptor = createClusterDescriptor(cmd); try { // Query cluster for metrics if (cmd.hasOption(query.getOpt())) { final String description = yarnClusterDescriptor.getClusterDescription(); System.out.println(description); return 0; } else { final ClusterClient<ApplicationId> clusterClient; final ApplicationId yarnApplicationId; if (cmd.hasOption(applicationId.getOpt())) { yarnApplicationId = ConverterUtils.toApplicationId(cmd.getOptionValue(applicationId.getOpt())); clusterClient = yarnClusterDescriptor.retrieve(yarnApplicationId); } else { final ClusterSpecification clusterSpecification = getClusterSpecification(cmd); clusterClient = yarnClusterDescriptor.deploySessionCluster(clusterSpecification); //------------------ ClusterClient deployed, handle connection details yarnApplicationId = clusterClient.getClusterId(); try { final LeaderConnectionInfo connectionInfo = clusterClient.getClusterConnectionInfo(); System.out.println("Flink JobManager is now running on " + connectionInfo.getHostname() + ':' + connectionInfo.getPort() + " with leader id " + connectionInfo.getLeaderSessionID() + '.'); System.out.println("JobManager Web Interface: " + clusterClient.getWebInterfaceURL()); writeYarnPropertiesFile( yarnApplicationId, clusterSpecification.getNumberTaskManagers() * clusterSpecification.getSlotsPerTaskManager(), yarnClusterDescriptor.getDynamicPropertiesEncoded()); } catch (Exception e) { try { clusterClient.shutdown(); } catch (Exception ex) { LOG.info("Could not properly shutdown cluster client.", ex); } try { yarnClusterDescriptor.terminateCluster(yarnApplicationId); } catch (FlinkException fe) { LOG.info("Could not properly terminate the Flink cluster.", fe); } throw new FlinkException("Could not write the Yarn connection information.", e); } } if (detachedMode) { LOG.info("The Flink YARN client has been started in detached mode. In order to stop " + "Flink on YARN, use the following command or a YARN web interface to stop it:\n" + "yarn application -kill " + applicationId.getOpt()); } else { ScheduledExecutorService scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); final YarnApplicationStatusMonitor yarnApplicationStatusMonitor = new YarnApplicationStatusMonitor( yarnClusterDescriptor.getYarnClient(), yarnApplicationId, new ScheduledExecutorServiceAdapter(scheduledExecutorService)); try { runInteractiveCli( clusterClient, yarnApplicationStatusMonitor, acceptInteractiveInput); } finally { try { yarnApplicationStatusMonitor.close(); } catch (Exception e) { LOG.info("Could not properly close the Yarn application status monitor.", e); } try { clusterClient.shutdown(); } catch (Exception e) { LOG.info("Could not properly shutdown cluster client.", e); } try { yarnClusterDescriptor.terminateCluster(yarnApplicationId); } catch (FlinkException e) { LOG.info("Could not properly terminate the Flink cluster.", e); } // shut down the scheduled executor service ExecutorUtils.gracefulShutdown( 1000L, TimeUnit.MILLISECONDS, scheduledExecutorService); deleteYarnPropertiesFile(); ApplicationReport applicationReport; try { applicationReport = yarnClusterDescriptor .getYarnClient() .getApplicationReport(yarnApplicationId); } catch (YarnException | IOException e) { LOG.info("Could not log the final application report.", e); applicationReport = null; } if (applicationReport != null) { logFinalApplicationReport(applicationReport); } } } } } finally { try { yarnClusterDescriptor.close(); } catch (Exception e) { LOG.info("Could not properly close the yarn cluster descriptor.", e); } } return 0; } private void logFinalApplicationReport(ApplicationReport appReport) { LOG.info("Application " + appReport.getApplicationId() + " finished with state " + appReport .getYarnApplicationState() + " and final state " + appReport .getFinalApplicationStatus() + " at " + appReport.getFinishTime()); if (appReport.getYarnApplicationState() == YarnApplicationState.FAILED) { LOG.warn("Application failed. Diagnostics " + appReport.getDiagnostics()); LOG.warn("If log aggregation is activated in the Hadoop cluster, we recommend to retrieve " + "the full application log using this command:" + System.lineSeparator() + "\tyarn logs -applicationId " + appReport.getApplicationId() + System.lineSeparator() + "(It sometimes takes a few seconds until the logs are aggregated)"); } } private void deleteYarnPropertiesFile() { // try to clean up the old yarn properties file try { File propertiesFile = getYarnPropertiesLocation(yarnPropertiesFileLocation); if (propertiesFile.isFile()) { if (propertiesFile.delete()) { LOG.info("Deleted Yarn properties file at {}", propertiesFile.getAbsoluteFile()); } else { LOG.warn("Couldn't delete Yarn properties file at {}", propertiesFile.getAbsoluteFile()); } } } catch (Exception e) { LOG.warn("Exception while deleting the JobManager address file", e); } } private void writeYarnPropertiesFile( ApplicationId yarnApplicationId, int parallelism, @Nullable String dynamicProperties) { // file that we write into the conf/ dir containing the jobManager address and the dop. final File yarnPropertiesFile = getYarnPropertiesLocation(yarnPropertiesFileLocation); Properties yarnProps = new Properties(); yarnProps.setProperty(YARN_APPLICATION_ID_KEY, yarnApplicationId.toString()); if (parallelism > 0) { yarnProps.setProperty(YARN_PROPERTIES_PARALLELISM, Integer.toString(parallelism)); } // add dynamic properties if (dynamicProperties != null) { yarnProps.setProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING, dynamicProperties); } writeYarnProperties(yarnProps, yarnPropertiesFile); } private void logAndSysout(String message) { LOG.info(message); System.out.println(message); } public static Map<String, String> getDynamicProperties(String dynamicPropertiesEncoded) { if (dynamicPropertiesEncoded != null && dynamicPropertiesEncoded.length() > 0) { Map<String, String> properties = new HashMap<>(); String[] propertyLines = dynamicPropertiesEncoded.split(YARN_DYNAMIC_PROPERTIES_SEPARATOR); for (String propLine : propertyLines) { if (propLine == null) { continue; } int firstEquals = propLine.indexOf("="); if (firstEquals >= 0) { String key = propLine.substring(0, firstEquals).trim(); String value = propLine.substring(firstEquals + 1, propLine.length()).trim(); if (!key.isEmpty()) { properties.put(key, value); } } } return properties; } else { return Collections.emptyMap(); } } public static void main(final String[] args) { final String configurationDirectory = CliFrontend.getConfigurationDirectoryFromEnv(); final Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); int retCode; try { final FlinkYarnSessionCli cli = new FlinkYarnSessionCli( flinkConfiguration, configurationDirectory, "", ""); // no prefix for the YARN session SecurityUtils.install(new SecurityConfiguration(flinkConfiguration)); retCode = SecurityUtils.getInstalledContext().runSecured(() -> cli.run(args)); } catch (CliArgsException e) { retCode = handleCliArgsException(e); } catch (Exception e) { retCode = handleError(e); } System.exit(retCode); } private static void runInteractiveCli( ClusterClient<?> clusterClient, YarnApplicationStatusMonitor yarnApplicationStatusMonitor, boolean readConsoleInput) { try (BufferedReader in = new BufferedReader(new InputStreamReader(System.in))) { boolean continueRepl = true; int numTaskmanagers = 0; boolean isLastStatusUnknown = true; long unknownStatusSince = System.nanoTime(); while (continueRepl) { final ApplicationStatus applicationStatus = yarnApplicationStatusMonitor.getApplicationStatusNow(); switch (applicationStatus) { case FAILED: case CANCELED: System.err.println("The Flink Yarn cluster has failed."); continueRepl = false; break; case UNKNOWN: if (!isLastStatusUnknown) { unknownStatusSince = System.nanoTime(); isLastStatusUnknown = true; } if ((System.nanoTime() - unknownStatusSince) > 5L * CLIENT_POLLING_INTERVAL_MS * 1_000_000L) { System.err.println("The Flink Yarn cluster is in an unknown state. Please check the Yarn cluster."); continueRepl = false; } else { continueRepl = repStep(in, readConsoleInput); } break; case SUCCEEDED: if (isLastStatusUnknown) { isLastStatusUnknown = false; } // ------------------ check if there are updates by the cluster ----------- try { final GetClusterStatusResponse status = clusterClient.getClusterStatus(); if (status != null && numTaskmanagers != status.numRegisteredTaskManagers()) { System.err.println("Number of connected TaskManagers changed to " + status.numRegisteredTaskManagers() + ". " + "Slots available: " + status.totalNumberOfSlots()); numTaskmanagers = status.numRegisteredTaskManagers(); } } catch (Exception e) { LOG.warn("Could not retrieve the current cluster status. Skipping current retrieval attempt ...", e); } printClusterMessages(clusterClient); continueRepl = repStep(in, readConsoleInput); } } } catch (Exception e) { LOG.warn("Exception while running the interactive command line interface.", e); } } private static void printClusterMessages(ClusterClient clusterClient) { final List<String> messages = clusterClient.getNewMessages(); if (!messages.isEmpty()) { System.err.println("New messages from the YARN cluster: "); for (String msg : messages) { System.err.println(msg); } } } /** * Read-Evaluate-Print step for the REPL. * * @param in to read from * @param readConsoleInput true if console input has to be read * @return true if the REPL shall be continued, otherwise false * @throws IOException * @throws InterruptedException */ private static boolean repStep( BufferedReader in, boolean readConsoleInput) throws IOException, InterruptedException { // wait until CLIENT_POLLING_INTERVAL is over or the user entered something. long startTime = System.currentTimeMillis(); while ((System.currentTimeMillis() - startTime) < CLIENT_POLLING_INTERVAL_MS && (!readConsoleInput || !in.ready())) { Thread.sleep(200L); } //------------- handle interactive command by user. ---------------------- if (readConsoleInput && in.ready()) { String command = in.readLine(); switch (command) { case "quit": case "stop": return false; case "help": System.err.println(YARN_SESSION_HELP); break; default: System.err.println("Unknown command '" + command + "'. Showing help:"); System.err.println(YARN_SESSION_HELP); break; } } return true; } private static void writeYarnProperties(Properties properties, File propertiesFile) { try (final OutputStream out = new FileOutputStream(propertiesFile)) { properties.store(out, "Generated YARN properties file"); } catch (IOException e) { throw new RuntimeException("Error writing the properties file", e); } propertiesFile.setReadable(true, false); // readable for all. } private static int handleCliArgsException(CliArgsException e) { LOG.error("Could not parse the command line arguments.", e); System.out.println(e.getMessage()); System.out.println(); System.out.println("Use the help option (-h or --help) to get help on the command."); return 1; } private static int handleError(Exception e) { LOG.error("Error while running the Flink Yarn session.", e); System.err.println(); System.err.println("------------------------------------------------------------"); System.err.println(" The program finished with the following exception:"); System.err.println(); e.printStackTrace(); return 1; } public static File getYarnPropertiesLocation(@Nullable String yarnPropertiesFileLocation) { final String propertiesFileLocation; if (yarnPropertiesFileLocation != null) { propertiesFileLocation = yarnPropertiesFileLocation; } else { propertiesFileLocation = System.getProperty("java.io.tmpdir"); } String currentUser = System.getProperty("user.name"); return new File(propertiesFileLocation, YARN_PROPERTIES_FILE + currentUser); } private AbstractYarnClusterDescriptor getClusterDescriptor( Configuration configuration, YarnConfiguration yarnConfiguration, String configurationDirectory) { final YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(yarnConfiguration); yarnClient.start(); if (flip6) { return new Flip6YarnClusterDescriptor( configuration, yarnConfiguration, configurationDirectory, yarnClient, false); } else { return new YarnClusterDescriptor( configuration, yarnConfiguration, configurationDirectory, yarnClient, false); } } }
flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.yarn.cli; import org.apache.flink.client.cli.AbstractCustomCommandLine; import org.apache.flink.client.cli.CliArgsException; import org.apache.flink.client.cli.CliFrontend; import org.apache.flink.client.cli.CliFrontendParser; import org.apache.flink.client.deployment.ClusterSpecification; import org.apache.flink.client.program.ClusterClient; import org.apache.flink.configuration.ConfigConstants; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.CoreOptions; import org.apache.flink.configuration.GlobalConfiguration; import org.apache.flink.configuration.JobManagerOptions; import org.apache.flink.configuration.TaskManagerOptions; import org.apache.flink.runtime.clusterframework.ApplicationStatus; import org.apache.flink.runtime.clusterframework.messages.GetClusterStatusResponse; import org.apache.flink.runtime.concurrent.ScheduledExecutorServiceAdapter; import org.apache.flink.runtime.security.SecurityConfiguration; import org.apache.flink.runtime.security.SecurityUtils; import org.apache.flink.runtime.util.LeaderConnectionInfo; import org.apache.flink.util.ExecutorUtils; import org.apache.flink.util.FlinkException; import org.apache.flink.util.Preconditions; import org.apache.flink.yarn.AbstractYarnClusterDescriptor; import org.apache.flink.yarn.Flip6YarnClusterDescriptor; import org.apache.flink.yarn.YarnClusterDescriptor; import org.apache.flink.yarn.configuration.YarnConfigOptions; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.Path; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.YarnApplicationState; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.util.ConverterUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.annotation.Nullable; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.UnsupportedEncodingException; import java.net.URLDecoder; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; import static org.apache.flink.configuration.HighAvailabilityOptions.HA_CLUSTER_ID; /** * Class handling the command line interface to the YARN session. */ public class FlinkYarnSessionCli extends AbstractCustomCommandLine<ApplicationId> { private static final Logger LOG = LoggerFactory.getLogger(FlinkYarnSessionCli.class); //------------------------------------ Constants ------------------------- public static final String CONFIG_FILE_LOGBACK_NAME = "logback.xml"; public static final String CONFIG_FILE_LOG4J_NAME = "log4j.properties"; private static final long CLIENT_POLLING_INTERVAL_MS = 3000L; /** The id for the CommandLine interface. */ private static final String ID = "yarn-cluster"; // YARN-session related constants private static final String YARN_PROPERTIES_FILE = ".yarn-properties-"; private static final String YARN_APPLICATION_ID_KEY = "applicationID"; private static final String YARN_PROPERTIES_PARALLELISM = "parallelism"; private static final String YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING = "dynamicPropertiesString"; private static final String YARN_DYNAMIC_PROPERTIES_SEPARATOR = "@@"; // this has to be a regex for String.split() private static final String YARN_SESSION_HELP = "Available commands:\n" + "help - show these commands\n" + "stop - stop the YARN session"; //------------------------------------ Command Line argument options ------------------------- // the prefix transformation is used by the CliFrontend static constructor. private final Option query; // --- or --- private final Option applicationId; // --- or --- private final Option queue; private final Option shipPath; private final Option flinkJar; private final Option jmMemory; private final Option tmMemory; private final Option container; private final Option slots; private final Option detached; private final Option zookeeperNamespace; private final Option help; /** * @deprecated Streaming mode has been deprecated without replacement. Set the * {@link TaskManagerOptions#MANAGED_MEMORY_PRE_ALLOCATE} configuration * key to true to get the previous batch mode behaviour. */ @Deprecated private final Option streaming; private final Option name; private final Options allOptions; /** * Dynamic properties allow the user to specify additional configuration values with -D, such as * <tt> -Dfs.overwrite-files=true -Dtaskmanager.network.memory.min=536346624</tt>. */ private final Option dynamicproperties; private final boolean acceptInteractiveInput; private final String configurationDirectory; private final Properties yarnPropertiesFile; private final ApplicationId yarnApplicationIdFromYarnProperties; private final String yarnPropertiesFileLocation; private final boolean flip6; private final YarnConfiguration yarnConfiguration; //------------------------------------ Internal fields ------------------------- private boolean detachedMode = false; public FlinkYarnSessionCli( Configuration configuration, String configurationDirectory, String shortPrefix, String longPrefix) throws FlinkException { this(configuration, configurationDirectory, shortPrefix, longPrefix, true); } public FlinkYarnSessionCli( Configuration configuration, String configurationDirectory, String shortPrefix, String longPrefix, boolean acceptInteractiveInput) throws FlinkException { super(configuration); this.configurationDirectory = Preconditions.checkNotNull(configurationDirectory); this.acceptInteractiveInput = acceptInteractiveInput; this.flip6 = configuration.getString(CoreOptions.MODE).equalsIgnoreCase(CoreOptions.FLIP6_MODE); // Create the command line options query = new Option(shortPrefix + "q", longPrefix + "query", false, "Display available YARN resources (memory, cores)"); applicationId = new Option(shortPrefix + "id", longPrefix + "applicationId", true, "Attach to running YARN session"); queue = new Option(shortPrefix + "qu", longPrefix + "queue", true, "Specify YARN queue."); shipPath = new Option(shortPrefix + "t", longPrefix + "ship", true, "Ship files in the specified directory (t for transfer)"); flinkJar = new Option(shortPrefix + "j", longPrefix + "jar", true, "Path to Flink jar file"); jmMemory = new Option(shortPrefix + "jm", longPrefix + "jobManagerMemory", true, "Memory for JobManager Container [in MB]"); tmMemory = new Option(shortPrefix + "tm", longPrefix + "taskManagerMemory", true, "Memory per TaskManager Container [in MB]"); container = new Option(shortPrefix + "n", longPrefix + "container", true, "Number of YARN container to allocate (=Number of Task Managers)"); slots = new Option(shortPrefix + "s", longPrefix + "slots", true, "Number of slots per TaskManager"); dynamicproperties = Option.builder(shortPrefix + "D") .argName("property=value") .numberOfArgs(2) .valueSeparator() .desc("use value for given property") .build(); detached = new Option(shortPrefix + "d", longPrefix + "detached", false, "Start detached"); streaming = new Option(shortPrefix + "st", longPrefix + "streaming", false, "Start Flink in streaming mode"); name = new Option(shortPrefix + "nm", longPrefix + "name", true, "Set a custom name for the application on YARN"); zookeeperNamespace = new Option(shortPrefix + "z", longPrefix + "zookeeperNamespace", true, "Namespace to create the Zookeeper sub-paths for high availability mode"); help = new Option(shortPrefix + "h", longPrefix + "help", false, "Help for the Yarn session CLI."); allOptions = new Options(); allOptions.addOption(flinkJar); allOptions.addOption(jmMemory); allOptions.addOption(tmMemory); allOptions.addOption(container); allOptions.addOption(queue); allOptions.addOption(query); allOptions.addOption(shipPath); allOptions.addOption(slots); allOptions.addOption(dynamicproperties); allOptions.addOption(detached); allOptions.addOption(streaming); allOptions.addOption(name); allOptions.addOption(applicationId); allOptions.addOption(zookeeperNamespace); allOptions.addOption(help); // try loading a potential yarn properties file this.yarnPropertiesFileLocation = configuration.getString(YarnConfigOptions.PROPERTIES_FILE_LOCATION); final File yarnPropertiesLocation = getYarnPropertiesLocation(yarnPropertiesFileLocation); yarnPropertiesFile = new Properties(); if (yarnPropertiesLocation.exists()) { LOG.info("Found Yarn properties file under {}.", yarnPropertiesLocation.getAbsolutePath()); try (InputStream is = new FileInputStream(yarnPropertiesLocation)) { yarnPropertiesFile.load(is); } catch (IOException ioe) { throw new FlinkException("Could not read the Yarn properties file " + yarnPropertiesLocation + ". Please delete the file at " + yarnPropertiesLocation.getAbsolutePath() + '.', ioe); } final String yarnApplicationIdString = yarnPropertiesFile.getProperty(YARN_APPLICATION_ID_KEY); if (yarnApplicationIdString == null) { throw new FlinkException("Yarn properties file found but doesn't contain a " + "Yarn application id. Please delete the file at " + yarnPropertiesLocation.getAbsolutePath()); } try { // try converting id to ApplicationId yarnApplicationIdFromYarnProperties = ConverterUtils.toApplicationId(yarnApplicationIdString); } catch (Exception e) { throw new FlinkException("YARN properties contains an invalid entry for " + "application id: " + yarnApplicationIdString + ". Please delete the file at " + yarnPropertiesLocation.getAbsolutePath(), e); } } else { yarnApplicationIdFromYarnProperties = null; } this.yarnConfiguration = new YarnConfiguration(); } private AbstractYarnClusterDescriptor createDescriptor( Configuration configuration, YarnConfiguration yarnConfiguration, String configurationDirectory, String defaultApplicationName, CommandLine cmd) { AbstractYarnClusterDescriptor yarnClusterDescriptor = getClusterDescriptor( configuration, yarnConfiguration, configurationDirectory); // Jar Path final Path localJarPath; if (cmd.hasOption(flinkJar.getOpt())) { String userPath = cmd.getOptionValue(flinkJar.getOpt()); if (!userPath.startsWith("file://")) { userPath = "file://" + userPath; } localJarPath = new Path(userPath); } else { LOG.info("No path for the flink jar passed. Using the location of " + yarnClusterDescriptor.getClass() + " to locate the jar"); String encodedJarPath = yarnClusterDescriptor.getClass().getProtectionDomain().getCodeSource().getLocation().getPath(); final String decodedPath; try { // we have to decode the url encoded parts of the path decodedPath = URLDecoder.decode(encodedJarPath, Charset.defaultCharset().name()); } catch (UnsupportedEncodingException e) { throw new RuntimeException("Couldn't decode the encoded Flink dist jar path: " + encodedJarPath + " Please supply a path manually via the -" + flinkJar.getOpt() + " option."); } // check whether it's actually a jar file --> when testing we execute this class without a flink-dist jar if (decodedPath.endsWith(".jar")) { localJarPath = new Path(new File(decodedPath).toURI()); } else { localJarPath = null; } } if (localJarPath != null) { yarnClusterDescriptor.setLocalJarPath(localJarPath); } List<File> shipFiles = new ArrayList<>(); // path to directory to ship if (cmd.hasOption(shipPath.getOpt())) { String shipPath = cmd.getOptionValue(this.shipPath.getOpt()); File shipDir = new File(shipPath); if (shipDir.isDirectory()) { shipFiles.add(shipDir); } else { LOG.warn("Ship directory is not a directory. Ignoring it."); } } yarnClusterDescriptor.addShipFiles(shipFiles); // queue if (cmd.hasOption(queue.getOpt())) { yarnClusterDescriptor.setQueue(cmd.getOptionValue(queue.getOpt())); } final Properties properties = cmd.getOptionProperties(dynamicproperties.getOpt()); String[] dynamicProperties = properties.stringPropertyNames().stream() .flatMap( (String key) -> { final String value = properties.getProperty(key); if (value != null) { return Stream.of(key + dynamicproperties.getValueSeparator() + value); } else { return Stream.empty(); } }) .toArray(String[]::new); String dynamicPropertiesEncoded = StringUtils.join(dynamicProperties, YARN_DYNAMIC_PROPERTIES_SEPARATOR); yarnClusterDescriptor.setDynamicPropertiesEncoded(dynamicPropertiesEncoded); if (cmd.hasOption(detached.getOpt()) || cmd.hasOption(CliFrontendParser.DETACHED_OPTION.getOpt())) { this.detachedMode = true; yarnClusterDescriptor.setDetachedMode(true); } if (cmd.hasOption(name.getOpt())) { yarnClusterDescriptor.setName(cmd.getOptionValue(name.getOpt())); } else { // set the default application name, if none is specified if (defaultApplicationName != null) { yarnClusterDescriptor.setName(defaultApplicationName); } } if (cmd.hasOption(zookeeperNamespace.getOpt())) { String zookeeperNamespaceValue = cmd.getOptionValue(this.zookeeperNamespace.getOpt()); yarnClusterDescriptor.setZookeeperNamespace(zookeeperNamespaceValue); } return yarnClusterDescriptor; } private ClusterSpecification createClusterSpecification(Configuration configuration, CommandLine cmd) { if (!flip6 && !cmd.hasOption(container.getOpt())) { // number of containers is required option! LOG.error("Missing required argument {}", container.getOpt()); printUsage(); throw new IllegalArgumentException("Missing required argument " + container.getOpt()); } int numberTaskManagers = Integer.valueOf(cmd.getOptionValue(container.getOpt())); // JobManager Memory final int jobManagerMemoryMB = configuration.getInteger(JobManagerOptions.JOB_MANAGER_HEAP_MEMORY); // Task Managers memory final int taskManagerMemoryMB = configuration.getInteger(TaskManagerOptions.TASK_MANAGER_HEAP_MEMORY); int slotsPerTaskManager = configuration.getInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 1); return new ClusterSpecification.ClusterSpecificationBuilder() .setMasterMemoryMB(jobManagerMemoryMB) .setTaskManagerMemoryMB(taskManagerMemoryMB) .setNumberTaskManagers(numberTaskManagers) .setSlotsPerTaskManager(slotsPerTaskManager) .createClusterSpecification(); } private void printUsage() { System.out.println("Usage:"); HelpFormatter formatter = new HelpFormatter(); formatter.setWidth(200); formatter.setLeftPadding(5); formatter.setSyntaxPrefix(" Required"); Options req = new Options(); req.addOption(container); formatter.printHelp(" ", req); formatter.setSyntaxPrefix(" Optional"); Options options = new Options(); addGeneralOptions(options); addRunOptions(options); formatter.printHelp(" ", options); } @Override public boolean isActive(CommandLine commandLine) { String jobManagerOption = commandLine.getOptionValue(addressOption.getOpt(), null); boolean yarnJobManager = ID.equals(jobManagerOption); boolean yarnAppId = commandLine.hasOption(applicationId.getOpt()); return yarnJobManager || yarnAppId || (isYarnPropertiesFileMode(commandLine) && yarnApplicationIdFromYarnProperties != null); } @Override public String getId() { return ID; } @Override public void addRunOptions(Options baseOptions) { super.addRunOptions(baseOptions); for (Object option : allOptions.getOptions()) { baseOptions.addOption((Option) option); } } @Override public void addGeneralOptions(Options baseOptions) { super.addGeneralOptions(baseOptions); baseOptions.addOption(applicationId); } @Override public AbstractYarnClusterDescriptor createClusterDescriptor(CommandLine commandLine) throws FlinkException { final Configuration effectiveConfiguration = applyCommandLineOptionsToConfiguration(commandLine); return createDescriptor( effectiveConfiguration, yarnConfiguration, configurationDirectory, null, commandLine); } @Override @Nullable public ApplicationId getClusterId(CommandLine commandLine) { if (commandLine.hasOption(applicationId.getOpt())) { return ConverterUtils.toApplicationId(commandLine.getOptionValue(applicationId.getOpt())); } else if (isYarnPropertiesFileMode(commandLine)) { return yarnApplicationIdFromYarnProperties; } else { return null; } } @Override public ClusterSpecification getClusterSpecification(CommandLine commandLine) throws FlinkException { final Configuration effectiveConfiguration = applyCommandLineOptionsToConfiguration(commandLine); return createClusterSpecification(effectiveConfiguration, commandLine); } @Override protected Configuration applyCommandLineOptionsToConfiguration(CommandLine commandLine) throws FlinkException { // we ignore the addressOption because it can only contain "yarn-cluster" final Configuration effectiveConfiguration = new Configuration(configuration); if (commandLine.hasOption(zookeeperNamespaceOption.getOpt())) { String zkNamespace = commandLine.getOptionValue(zookeeperNamespaceOption.getOpt()); effectiveConfiguration.setString(HA_CLUSTER_ID, zkNamespace); } final ApplicationId applicationId = getClusterId(commandLine); if (applicationId != null) { final String zooKeeperNamespace; if (commandLine.hasOption(zookeeperNamespace.getOpt())){ zooKeeperNamespace = commandLine.getOptionValue(zookeeperNamespace.getOpt()); } else { zooKeeperNamespace = effectiveConfiguration.getString(HA_CLUSTER_ID, applicationId.toString()); } effectiveConfiguration.setString(HA_CLUSTER_ID, zooKeeperNamespace); } if (commandLine.hasOption(jmMemory.getOpt())) { effectiveConfiguration.setInteger(JobManagerOptions.JOB_MANAGER_HEAP_MEMORY, Integer.parseInt(commandLine.getOptionValue(jmMemory.getOpt()))); } if (commandLine.hasOption(tmMemory.getOpt())) { effectiveConfiguration.setInteger(TaskManagerOptions.TASK_MANAGER_HEAP_MEMORY, Integer.parseInt(commandLine.getOptionValue(tmMemory.getOpt()))); } if (commandLine.hasOption(slots.getOpt())) { effectiveConfiguration.setInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, Integer.parseInt(commandLine.getOptionValue(slots.getOpt()))); } if (isYarnPropertiesFileMode(commandLine)) { return applyYarnProperties(effectiveConfiguration); } else { return effectiveConfiguration; } } private boolean isYarnPropertiesFileMode(CommandLine commandLine) { boolean canApplyYarnProperties = !commandLine.hasOption(addressOption.getOpt()); for (Option option : commandLine.getOptions()) { if (allOptions.hasOption(option.getOpt())) { if (!option.getOpt().equals(detached.getOpt())) { // don't resume from properties file if yarn options have been specified canApplyYarnProperties = false; break; } } } return canApplyYarnProperties; } private Configuration applyYarnProperties(Configuration configuration) throws FlinkException { final Configuration effectiveConfiguration = new Configuration(configuration); // configure the default parallelism from YARN String propParallelism = yarnPropertiesFile.getProperty(YARN_PROPERTIES_PARALLELISM); if (propParallelism != null) { // maybe the property is not set try { int parallelism = Integer.parseInt(propParallelism); effectiveConfiguration.setInteger(CoreOptions.DEFAULT_PARALLELISM, parallelism); logAndSysout("YARN properties set default parallelism to " + parallelism); } catch (NumberFormatException e) { throw new FlinkException("Error while parsing the YARN properties: " + "Property " + YARN_PROPERTIES_PARALLELISM + " is not an integer.", e); } } // handle the YARN client's dynamic properties String dynamicPropertiesEncoded = yarnPropertiesFile.getProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING); Map<String, String> dynamicProperties = getDynamicProperties(dynamicPropertiesEncoded); for (Map.Entry<String, String> dynamicProperty : dynamicProperties.entrySet()) { effectiveConfiguration.setString(dynamicProperty.getKey(), dynamicProperty.getValue()); } return effectiveConfiguration; } public int run(String[] args) throws CliArgsException, FlinkException { // // Command Line Options // final CommandLine cmd = parseCommandLineOptions(args, true); if (cmd.hasOption(help.getOpt())) { printUsage(); return 0; } final AbstractYarnClusterDescriptor yarnClusterDescriptor = createClusterDescriptor(cmd); try { // Query cluster for metrics if (cmd.hasOption(query.getOpt())) { final String description = yarnClusterDescriptor.getClusterDescription(); System.out.println(description); return 0; } else { final ClusterClient<ApplicationId> clusterClient; final ApplicationId yarnApplicationId; if (cmd.hasOption(applicationId.getOpt())) { yarnApplicationId = ConverterUtils.toApplicationId(cmd.getOptionValue(applicationId.getOpt())); clusterClient = yarnClusterDescriptor.retrieve(yarnApplicationId); } else { final ClusterSpecification clusterSpecification = getClusterSpecification(cmd); clusterClient = yarnClusterDescriptor.deploySessionCluster(clusterSpecification); //------------------ ClusterClient deployed, handle connection details yarnApplicationId = clusterClient.getClusterId(); try { final LeaderConnectionInfo connectionInfo = clusterClient.getClusterConnectionInfo(); System.out.println("Flink JobManager is now running on " + connectionInfo.getHostname() + ':' + connectionInfo.getPort() + " with leader id " + connectionInfo.getLeaderSessionID() + '.'); System.out.println("JobManager Web Interface: " + clusterClient.getWebInterfaceURL()); writeYarnPropertiesFile( yarnApplicationId, clusterSpecification.getNumberTaskManagers() * clusterSpecification.getSlotsPerTaskManager(), yarnClusterDescriptor.getDynamicPropertiesEncoded()); } catch (Exception e) { try { clusterClient.shutdown(); } catch (Exception ex) { LOG.info("Could not properly shutdown cluster client.", ex); } try { yarnClusterDescriptor.terminateCluster(yarnApplicationId); } catch (FlinkException fe) { LOG.info("Could not properly terminate the Flink cluster.", fe); } throw new FlinkException("Could not write the Yarn connection information.", e); } } if (detachedMode) { LOG.info("The Flink YARN client has been started in detached mode. In order to stop " + "Flink on YARN, use the following command or a YARN web interface to stop it:\n" + "yarn application -kill " + applicationId.getOpt()); } else { ScheduledExecutorService scheduledExecutorService = Executors.newSingleThreadScheduledExecutor(); final YarnApplicationStatusMonitor yarnApplicationStatusMonitor = new YarnApplicationStatusMonitor( yarnClusterDescriptor.getYarnClient(), yarnApplicationId, new ScheduledExecutorServiceAdapter(scheduledExecutorService)); try { runInteractiveCli( clusterClient, yarnApplicationStatusMonitor, acceptInteractiveInput); } finally { try { yarnApplicationStatusMonitor.close(); } catch (Exception e) { LOG.info("Could not properly close the Yarn application status monitor.", e); } try { clusterClient.shutdown(); } catch (Exception e) { LOG.info("Could not properly shutdown cluster client.", e); } try { yarnClusterDescriptor.terminateCluster(yarnApplicationId); } catch (FlinkException e) { LOG.info("Could not properly terminate the Flink cluster.", e); } // shut down the scheduled executor service ExecutorUtils.gracefulShutdown( 1000L, TimeUnit.MILLISECONDS, scheduledExecutorService); deleteYarnPropertiesFile(); ApplicationReport applicationReport; try { applicationReport = yarnClusterDescriptor .getYarnClient() .getApplicationReport(yarnApplicationId); } catch (YarnException | IOException e) { LOG.info("Could not log the final application report.", e); applicationReport = null; } if (applicationReport != null) { logFinalApplicationReport(applicationReport); } } } } } finally { try { yarnClusterDescriptor.close(); } catch (Exception e) { LOG.info("Could not properly close the yarn cluster descriptor.", e); } } return 0; } private void logFinalApplicationReport(ApplicationReport appReport) { LOG.info("Application " + appReport.getApplicationId() + " finished with state " + appReport .getYarnApplicationState() + " and final state " + appReport .getFinalApplicationStatus() + " at " + appReport.getFinishTime()); if (appReport.getYarnApplicationState() == YarnApplicationState.FAILED) { LOG.warn("Application failed. Diagnostics " + appReport.getDiagnostics()); LOG.warn("If log aggregation is activated in the Hadoop cluster, we recommend to retrieve " + "the full application log using this command:" + System.lineSeparator() + "\tyarn logs -applicationId " + appReport.getApplicationId() + System.lineSeparator() + "(It sometimes takes a few seconds until the logs are aggregated)"); } } private void deleteYarnPropertiesFile() { // try to clean up the old yarn properties file try { File propertiesFile = getYarnPropertiesLocation(yarnPropertiesFileLocation); if (propertiesFile.isFile()) { if (propertiesFile.delete()) { LOG.info("Deleted Yarn properties file at {}", propertiesFile.getAbsoluteFile()); } else { LOG.warn("Couldn't delete Yarn properties file at {}", propertiesFile.getAbsoluteFile()); } } } catch (Exception e) { LOG.warn("Exception while deleting the JobManager address file", e); } } private void writeYarnPropertiesFile( ApplicationId yarnApplicationId, int parallelism, @Nullable String dynamicProperties) { // file that we write into the conf/ dir containing the jobManager address and the dop. final File yarnPropertiesFile = getYarnPropertiesLocation(yarnPropertiesFileLocation); Properties yarnProps = new Properties(); yarnProps.setProperty(YARN_APPLICATION_ID_KEY, yarnApplicationId.toString()); if (parallelism > 0) { yarnProps.setProperty(YARN_PROPERTIES_PARALLELISM, Integer.toString(parallelism)); } // add dynamic properties if (dynamicProperties != null) { yarnProps.setProperty(YARN_PROPERTIES_DYNAMIC_PROPERTIES_STRING, dynamicProperties); } writeYarnProperties(yarnProps, yarnPropertiesFile); } private void logAndSysout(String message) { LOG.info(message); System.out.println(message); } public static Map<String, String> getDynamicProperties(String dynamicPropertiesEncoded) { if (dynamicPropertiesEncoded != null && dynamicPropertiesEncoded.length() > 0) { Map<String, String> properties = new HashMap<>(); String[] propertyLines = dynamicPropertiesEncoded.split(YARN_DYNAMIC_PROPERTIES_SEPARATOR); for (String propLine : propertyLines) { if (propLine == null) { continue; } int firstEquals = propLine.indexOf("="); if (firstEquals >= 0) { String key = propLine.substring(0, firstEquals).trim(); String value = propLine.substring(firstEquals + 1, propLine.length()).trim(); if (!key.isEmpty()) { properties.put(key, value); } } } return properties; } else { return Collections.emptyMap(); } } public static void main(final String[] args) { final String configurationDirectory = CliFrontend.getConfigurationDirectoryFromEnv(); final Configuration flinkConfiguration = GlobalConfiguration.loadConfiguration(); int retCode; try { final FlinkYarnSessionCli cli = new FlinkYarnSessionCli( flinkConfiguration, configurationDirectory, "", ""); // no prefix for the YARN session SecurityUtils.install(new SecurityConfiguration(flinkConfiguration)); retCode = SecurityUtils.getInstalledContext().runSecured(() -> cli.run(args)); } catch (CliArgsException e) { retCode = handleCliArgsException(e); } catch (Exception e) { retCode = handleError(e); } System.exit(retCode); } private static void runInteractiveCli( ClusterClient<?> clusterClient, YarnApplicationStatusMonitor yarnApplicationStatusMonitor, boolean readConsoleInput) { try (BufferedReader in = new BufferedReader(new InputStreamReader(System.in))) { boolean continueRepl = true; int numTaskmanagers = 0; boolean isLastStatusUnknown = true; long unknownStatusSince = System.nanoTime(); while (continueRepl) { final ApplicationStatus applicationStatus = yarnApplicationStatusMonitor.getApplicationStatusNow(); switch (applicationStatus) { case FAILED: case CANCELED: System.err.println("The Flink Yarn cluster has failed."); continueRepl = false; break; case UNKNOWN: if (!isLastStatusUnknown) { unknownStatusSince = System.nanoTime(); isLastStatusUnknown = true; } if ((System.nanoTime() - unknownStatusSince) > 5L * CLIENT_POLLING_INTERVAL_MS * 1_000_000L) { System.err.println("The Flink Yarn cluster is in an unknown state. Please check the Yarn cluster."); continueRepl = false; } else { continueRepl = repStep(in, readConsoleInput); } break; case SUCCEEDED: if (isLastStatusUnknown) { isLastStatusUnknown = false; } // ------------------ check if there are updates by the cluster ----------- try { final GetClusterStatusResponse status = clusterClient.getClusterStatus(); if (status != null && numTaskmanagers != status.numRegisteredTaskManagers()) { System.err.println("Number of connected TaskManagers changed to " + status.numRegisteredTaskManagers() + ". " + "Slots available: " + status.totalNumberOfSlots()); numTaskmanagers = status.numRegisteredTaskManagers(); } } catch (Exception e) { LOG.warn("Could not retrieve the current cluster status. Skipping current retrieval attempt ...", e); } printClusterMessages(clusterClient); continueRepl = repStep(in, readConsoleInput); } } } catch (Exception e) { LOG.warn("Exception while running the interactive command line interface.", e); } } private static void printClusterMessages(ClusterClient clusterClient) { final List<String> messages = clusterClient.getNewMessages(); if (!messages.isEmpty()) { System.err.println("New messages from the YARN cluster: "); for (String msg : messages) { System.err.println(msg); } } } /** * Read-Evaluate-Print step for the REPL. * * @param in to read from * @param readConsoleInput true if console input has to be read * @return true if the REPL shall be continued, otherwise false * @throws IOException * @throws InterruptedException */ private static boolean repStep( BufferedReader in, boolean readConsoleInput) throws IOException, InterruptedException { // wait until CLIENT_POLLING_INTERVAL is over or the user entered something. long startTime = System.currentTimeMillis(); while ((System.currentTimeMillis() - startTime) < CLIENT_POLLING_INTERVAL_MS && (!readConsoleInput || !in.ready())) { Thread.sleep(200L); } //------------- handle interactive command by user. ---------------------- if (readConsoleInput && in.ready()) { String command = in.readLine(); switch (command) { case "quit": case "stop": return false; case "help": System.err.println(YARN_SESSION_HELP); break; default: System.err.println("Unknown command '" + command + "'. Showing help:"); System.err.println(YARN_SESSION_HELP); break; } } return true; } private static void writeYarnProperties(Properties properties, File propertiesFile) { try (final OutputStream out = new FileOutputStream(propertiesFile)) { properties.store(out, "Generated YARN properties file"); } catch (IOException e) { throw new RuntimeException("Error writing the properties file", e); } propertiesFile.setReadable(true, false); // readable for all. } private static int handleCliArgsException(CliArgsException e) { LOG.error("Could not parse the command line arguments.", e); System.out.println(e.getMessage()); System.out.println(); System.out.println("Use the help option (-h or --help) to get help on the command."); return 1; } private static int handleError(Exception e) { LOG.error("Error while running the Flink Yarn session.", e); System.err.println(); System.err.println("------------------------------------------------------------"); System.err.println(" The program finished with the following exception:"); System.err.println(); e.printStackTrace(); return 1; } public static File getYarnPropertiesLocation(@Nullable String yarnPropertiesFileLocation) { final String propertiesFileLocation; if (yarnPropertiesFileLocation != null) { propertiesFileLocation = yarnPropertiesFileLocation; } else { propertiesFileLocation = System.getProperty("java.io.tmpdir"); } String currentUser = System.getProperty("user.name"); return new File(propertiesFileLocation, YARN_PROPERTIES_FILE + currentUser); } private AbstractYarnClusterDescriptor getClusterDescriptor( Configuration configuration, YarnConfiguration yarnConfiguration, String configurationDirectory) { final YarnClient yarnClient = YarnClient.createYarnClient(); yarnClient.init(yarnConfiguration); yarnClient.start(); if (flip6) { return new Flip6YarnClusterDescriptor( configuration, yarnConfiguration, configurationDirectory, yarnClient, false); } else { return new YarnClusterDescriptor( configuration, yarnConfiguration, configurationDirectory, yarnClient, false); } } }
[hotfix] Set default number of TaskManagers in FlinkYarnSessionCli for Flip6
flink-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java
[hotfix] Set default number of TaskManagers in FlinkYarnSessionCli for Flip6
<ide><path>link-yarn/src/main/java/org/apache/flink/yarn/cli/FlinkYarnSessionCli.java <ide> throw new IllegalArgumentException("Missing required argument " + container.getOpt()); <ide> } <ide> <del> int numberTaskManagers = Integer.valueOf(cmd.getOptionValue(container.getOpt())); <add> // TODO: The number of task manager should be deprecated soon <add> final int numberTaskManagers; <add> <add> if (cmd.hasOption(container.getOpt())) { <add> numberTaskManagers = Integer.valueOf(cmd.getOptionValue(container.getOpt())); <add> } else { <add> numberTaskManagers = 1; <add> } <ide> <ide> // JobManager Memory <ide> final int jobManagerMemoryMB = configuration.getInteger(JobManagerOptions.JOB_MANAGER_HEAP_MEMORY); <ide> // Task Managers memory <ide> final int taskManagerMemoryMB = configuration.getInteger(TaskManagerOptions.TASK_MANAGER_HEAP_MEMORY); <ide> <del> int slotsPerTaskManager = configuration.getInteger(ConfigConstants.TASK_MANAGER_NUM_TASK_SLOTS, 1); <add> int slotsPerTaskManager = configuration.getInteger(TaskManagerOptions.NUM_TASK_SLOTS); <ide> <ide> return new ClusterSpecification.ClusterSpecificationBuilder() <ide> .setMasterMemoryMB(jobManagerMemoryMB)
JavaScript
mit
94e4d26c806cf7ccab7766060b76d9658c1e3465
0
RoyalIcing/react-organism
import expect from 'expect' import React from 'react' import {render, unmountComponentAtNode} from 'react-dom' import ReactTestUtils from 'react-dom/test-utils' import makeOrganism from 'src/' const waitMs = duration => new Promise(resolve => setTimeout(resolve, duration)) function Counter({ count, handlers: { increment, decrement, initial } }) { return ( <div> <button id='decrement' onClick={ decrement } children='−' /> <span>{ count }</span> <button id='increment' onClick={ increment } children='+' /> <button id='initial' onClick={ initial } children='Reset' /> </div> ) } describe('makeOrganism', () => { let node beforeEach(() => { node = document.createElement('div') }) afterEach(() => { unmountComponentAtNode(node) }) it('Sends click events', (done) => { let changeCount = 0 const CounterOrganism = makeOrganism(Counter, { initial: ({ initialCount = 0 }) => ({ count: initialCount }), increment: () => ({ count }) => ({ count: count + 1 }), decrement: () => ({ count }) => ({ count: count - 1 }) }, { onChange() { changeCount++ } }) const $ = (selector) => node.querySelector(selector) render(<CounterOrganism initialCount={ 2 } />, node, () => { expect(node.innerHTML).toContain('2') // Click increment ReactTestUtils.Simulate.click($('#increment')) expect(node.innerHTML).toContain('3') // Click decrement ReactTestUtils.Simulate.click($('#decrement')) expect(node.innerHTML).toContain('2') expect(changeCount).toBe(2) done() }) }) it('Calls load handler', (done) => { let changeCount = 0 const loadWait = 35 const CounterOrganism = makeOrganism(Counter, { initial: ({ initialCount = 0 }) => ({ count: initialCount }), load: async ({ loadedCount }, prevProps) => { if (!prevProps || loadedCount !== prevProps.loadedCount) { await waitMs(loadWait) return { count: loadedCount * 2 } // Multiply to be sure we are using this loaded value } }, increment: () => ({ count }) => ({ count: count + 1 }), decrement: () => ({ count }) => ({ count: count - 1 }) }, { onChange() { changeCount++ } }) const $ = (selector) => node.querySelector(selector) render(<CounterOrganism initialCount={ 2 } loadedCount={ 7 } />, node, () => { expect(node.innerHTML).toContain('2') // Click increment ReactTestUtils.Simulate.click($('#increment')) expect(node.innerHTML).toContain('3') // Click decrement ReactTestUtils.Simulate.click($('#decrement')) expect(node.innerHTML).toContain('2') expect(changeCount).toBe(2) setTimeout(() => { expect(node.innerHTML).toContain(14) expect(changeCount).toBe(3) render(<CounterOrganism initialCount={ 22 } loadedCount={ 7 } />, node, () => { expect(node.innerHTML).toContain(14) expect(changeCount).toBe(3) render(<CounterOrganism initialCount={ 22 } loadedCount={ 9 } />, node, () => { setTimeout(() => { expect(node.innerHTML).toContain(18) expect(changeCount).toBe(4) done() }, loadWait + 5) }) }) }, loadWait + 5) }) }) })
tests/index-test.js
import expect from 'expect' import React from 'react' import {render, unmountComponentAtNode} from 'react-dom' import ReactTestUtils from 'react-dom/test-utils' import makeOrganism from 'src/' function Counter({ count, handlers: { increment, decrement, initial } }) { return ( <div> <button id='decrement' onClick={ decrement } children='−' /> <span>{ count }</span> <button id='increment' onClick={ increment } children='+' /> <button id='initial' onClick={ initial } children='Reset' /> </div> ) } describe('makeOrganism', () => { let node beforeEach(() => { node = document.createElement('div') }) afterEach(() => { unmountComponentAtNode(node) }) it('Sends click events', (done) => { const CounterOrganism = makeOrganism(Counter, { initial: ({ initialCount = 0 }) => ({ count: initialCount }), increment: () => ({ count }) => ({ count: count + 1 }), decrement: () => ({ count }) => ({ count: count - 1 }) }) const $ = (selector) => node.querySelector(selector) render(<CounterOrganism initialCount={ 2 } />, node, () => { expect(node.innerHTML).toContain('2') // Click increment ReactTestUtils.Simulate.click($('#increment')) expect(node.innerHTML).toContain('3') // Click decrement ReactTestUtils.Simulate.click($('#decrement')) expect(node.innerHTML).toContain('2') done() }) }) })
Test onChange and load
tests/index-test.js
Test onChange and load
<ide><path>ests/index-test.js <ide> import ReactTestUtils from 'react-dom/test-utils' <ide> <ide> import makeOrganism from 'src/' <add> <add>const waitMs = duration => new Promise(resolve => setTimeout(resolve, duration)) <ide> <ide> function Counter({ <ide> count, <ide> }) <ide> <ide> it('Sends click events', (done) => { <add> let changeCount = 0 <add> <ide> const CounterOrganism = makeOrganism(Counter, { <ide> initial: ({ initialCount = 0 }) => ({ count: initialCount }), <ide> increment: () => ({ count }) => ({ count: count + 1 }), <ide> decrement: () => ({ count }) => ({ count: count - 1 }) <add> }, { <add> onChange() { <add> changeCount++ <add> } <ide> }) <ide> const $ = (selector) => node.querySelector(selector) <ide> render(<CounterOrganism initialCount={ 2 } />, node, () => { <ide> ReactTestUtils.Simulate.click($('#decrement')) <ide> expect(node.innerHTML).toContain('2') <ide> <add> expect(changeCount).toBe(2) <add> <ide> done() <ide> }) <ide> }) <ide> <add> it('Calls load handler', (done) => { <add> let changeCount = 0 <add> const loadWait = 35 <add> <add> const CounterOrganism = makeOrganism(Counter, { <add> initial: ({ initialCount = 0 }) => ({ count: initialCount }), <add> load: async ({ loadedCount }, prevProps) => { <add> if (!prevProps || loadedCount !== prevProps.loadedCount) { <add> await waitMs(loadWait) <add> return { count: loadedCount * 2 } // Multiply to be sure we are using this loaded value <add> } <add> }, <add> increment: () => ({ count }) => ({ count: count + 1 }), <add> decrement: () => ({ count }) => ({ count: count - 1 }) <add> }, { <add> onChange() { <add> changeCount++ <add> } <add> }) <add> const $ = (selector) => node.querySelector(selector) <add> render(<CounterOrganism initialCount={ 2 } loadedCount={ 7 } />, node, () => { <add> expect(node.innerHTML).toContain('2') <add> <add> // Click increment <add> ReactTestUtils.Simulate.click($('#increment')) <add> expect(node.innerHTML).toContain('3') <add> <add> // Click decrement <add> ReactTestUtils.Simulate.click($('#decrement')) <add> expect(node.innerHTML).toContain('2') <add> <add> expect(changeCount).toBe(2) <add> <add> setTimeout(() => { <add> expect(node.innerHTML).toContain(14) <add> expect(changeCount).toBe(3) <add> <add> render(<CounterOrganism initialCount={ 22 } loadedCount={ 7 } />, node, () => { <add> expect(node.innerHTML).toContain(14) <add> expect(changeCount).toBe(3) <add> <add> render(<CounterOrganism initialCount={ 22 } loadedCount={ 9 } />, node, () => { <add> setTimeout(() => { <add> expect(node.innerHTML).toContain(18) <add> expect(changeCount).toBe(4) <add> <add> done() <add> }, loadWait + 5) <add> }) <add> }) <add> }, loadWait + 5) <add> }) <add> }) <add> <ide> })
Java
apache-2.0
db7ad1e22b33f543d5273bb9e86b01a69ad0a94c
0
jonathanmtran/uPortal,jl1955/uPortal5,jl1955/uPortal5,joansmith/uPortal,Mines-Albi/esup-uportal,jameswennmacher/uPortal,EdiaEducationTechnology/uPortal,kole9273/uPortal,vertein/uPortal,Mines-Albi/esup-uportal,phillips1021/uPortal,EsupPortail/esup-uportal,jhelmer-unicon/uPortal,timlevett/uPortal,groybal/uPortal,chasegawa/uPortal,stalele/uPortal,MichaelVose2/uPortal,Jasig/uPortal-start,drewwills/uPortal,joansmith/uPortal,Mines-Albi/esup-uportal,andrewstuart/uPortal,vertein/uPortal,stalele/uPortal,Jasig/SSP-Platform,kole9273/uPortal,ASU-Capstone/uPortal-Forked,GIP-RECIA/esco-portail,doodelicious/uPortal,mgillian/uPortal,Jasig/uPortal,doodelicious/uPortal,mgillian/uPortal,chasegawa/uPortal,ChristianMurphy/uPortal,andrewstuart/uPortal,EsupPortail/esup-uportal,jhelmer-unicon/uPortal,MichaelVose2/uPortal,Jasig/uPortal-start,phillips1021/uPortal,EsupPortail/esup-uportal,Jasig/SSP-Platform,ASU-Capstone/uPortal-Forked,ASU-Capstone/uPortal,Jasig/SSP-Platform,pspaude/uPortal,GIP-RECIA/esup-uportal,ASU-Capstone/uPortal,andrewstuart/uPortal,pspaude/uPortal,MichaelVose2/uPortal,drewwills/uPortal,jl1955/uPortal5,Mines-Albi/esup-uportal,joansmith/uPortal,GIP-RECIA/esco-portail,apetro/uPortal,mgillian/uPortal,doodelicious/uPortal,EdiaEducationTechnology/uPortal,andrewstuart/uPortal,EdiaEducationTechnology/uPortal,joansmith/uPortal,ASU-Capstone/uPortal-Forked,jhelmer-unicon/uPortal,ASU-Capstone/uPortal,vbonamy/esup-uportal,timlevett/uPortal,bjagg/uPortal,stalele/uPortal,kole9273/uPortal,apetro/uPortal,jhelmer-unicon/uPortal,EsupPortail/esup-uportal,cousquer/uPortal,pspaude/uPortal,apetro/uPortal,doodelicious/uPortal,vbonamy/esup-uportal,jameswennmacher/uPortal,bjagg/uPortal,cousquer/uPortal,cousquer/uPortal,Jasig/SSP-Platform,jonathanmtran/uPortal,drewwills/uPortal,apetro/uPortal,GIP-RECIA/esup-uportal,jhelmer-unicon/uPortal,Mines-Albi/esup-uportal,GIP-RECIA/esup-uportal,timlevett/uPortal,groybal/uPortal,drewwills/uPortal,chasegawa/uPortal,vertein/uPortal,ASU-Capstone/uPortal-Forked,MichaelVose2/uPortal,groybal/uPortal,EsupPortail/esup-uportal,jl1955/uPortal5,vbonamy/esup-uportal,jameswennmacher/uPortal,GIP-RECIA/esup-uportal,vertein/uPortal,ChristianMurphy/uPortal,phillips1021/uPortal,Jasig/uPortal,Jasig/uPortal,stalele/uPortal,vbonamy/esup-uportal,Jasig/SSP-Platform,jameswennmacher/uPortal,jl1955/uPortal5,pspaude/uPortal,GIP-RECIA/esco-portail,apetro/uPortal,jonathanmtran/uPortal,vbonamy/esup-uportal,groybal/uPortal,doodelicious/uPortal,jameswennmacher/uPortal,ASU-Capstone/uPortal,EdiaEducationTechnology/uPortal,phillips1021/uPortal,chasegawa/uPortal,MichaelVose2/uPortal,chasegawa/uPortal,timlevett/uPortal,ChristianMurphy/uPortal,andrewstuart/uPortal,phillips1021/uPortal,ASU-Capstone/uPortal-Forked,stalele/uPortal,kole9273/uPortal,bjagg/uPortal,kole9273/uPortal,ASU-Capstone/uPortal,joansmith/uPortal,GIP-RECIA/esup-uportal,groybal/uPortal
/** * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.layout.simple; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Vector; import javax.annotation.Resource; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jasig.portal.EntityIdentifier; import org.jasig.portal.IUserProfile; import org.jasig.portal.UserProfile; import org.jasig.portal.i18n.ILocaleStore; import org.jasig.portal.i18n.LocaleManager; import org.jasig.portal.layout.IUserLayoutStore; import org.jasig.portal.layout.LayoutStructure; import org.jasig.portal.layout.dao.IStylesheetDescriptorDao; import org.jasig.portal.portlet.registry.IPortletDefinitionRegistry; import org.jasig.portal.rdbm.DatabaseMetaDataImpl; import org.jasig.portal.rdbm.IDatabaseMetadata; import org.jasig.portal.rdbm.IJoinQueryString; import org.jasig.portal.security.IPerson; import org.jasig.portal.security.IPersonManager; import org.jasig.portal.security.ISecurityContext; import org.jasig.portal.security.provider.PersonImpl; import org.jasig.portal.spring.locator.CounterStoreLocator; import org.jasig.portal.utils.DocumentFactory; import org.jasig.portal.utils.ICounterStore; import org.jasig.portal.utils.Tuple; import org.jasig.portal.utils.threading.SingletonDoubleCheckedCreator; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.dao.DataAccessException; import org.springframework.jdbc.core.ConnectionCallback; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; import org.springframework.jdbc.support.SQLExceptionTranslator; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.DefaultTransactionDefinition; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionOperations; import org.springframework.transaction.support.TransactionTemplate; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import com.google.common.cache.Cache; /** * SQL implementation for the 2.x relational database model. * * Prior to uPortal 2.5, this class existed in the org.jasig.portal package. It was * moved to its present package to express that it is part of the * Simple Layout Manager implementation. * * @author George Lindholm * @version $Revision$ $Date$ */ public abstract class RDBMUserLayoutStore implements IUserLayoutStore, InitializingBean { protected final Log log = LogFactory.getLog(getClass()); private static String PROFILE_TABLE = "UP_USER_PROFILE"; protected static final String DEFAULT_LAYOUT_FNAME = "default"; //This class is instantiated ONCE so NO class variables can be used to keep state between calls protected static final String channelPrefix = "n"; protected static final String folderPrefix = "s"; protected TransactionOperations transactionOperations; protected TransactionOperations nextStructTransactionOperations; protected JdbcOperations jdbcOperations; private ILocaleStore localeStore; protected IDatabaseMetadata databaseMetadata; protected IPersonManager personManager; protected ICounterStore counterStore; protected IPortletDefinitionRegistry portletDefinitionRegistry; protected IStylesheetDescriptorDao stylesheetDescriptorDao; protected SQLExceptionTranslator exceptionTranslator; // I18n property protected static final boolean localeAware = LocaleManager.isLocaleAware(); @Autowired public void setLocaleStore(ILocaleStore localeStore) { this.localeStore = localeStore; } @Autowired public void setStylesheetDescriptorDao(IStylesheetDescriptorDao stylesheetDescriptorDao) { this.stylesheetDescriptorDao = stylesheetDescriptorDao; } @Autowired public void setPlatformTransactionManager(@Qualifier("PortalDb") PlatformTransactionManager platformTransactionManager) { this.transactionOperations = new TransactionTemplate(platformTransactionManager); final DefaultTransactionDefinition nextStructTransactionDefinition = new DefaultTransactionDefinition(); nextStructTransactionDefinition.setPropagationBehavior(DefaultTransactionDefinition.PROPAGATION_REQUIRES_NEW); this.nextStructTransactionOperations = new TransactionTemplate(platformTransactionManager, nextStructTransactionDefinition); } @Resource(name="PortalDb") public void setDataSource(DataSource dataSource) { this.jdbcOperations = new JdbcTemplate(dataSource); this.exceptionTranslator = new SQLErrorCodeSQLExceptionTranslator(dataSource); } @Autowired public void setDatabaseMetadata(IDatabaseMetadata databaseMetadata) { this.databaseMetadata = databaseMetadata; } @Autowired public void setPersonManager(IPersonManager personManager) { this.personManager = personManager; } @Autowired public void setCounterStore(ICounterStore counterStore) { this.counterStore = counterStore; } @Autowired public void setPortletDefinitionRegistry(IPortletDefinitionRegistry portletDefinitionRegistry) { this.portletDefinitionRegistry = portletDefinitionRegistry; } @Override public void afterPropertiesSet() throws Exception { if (this.databaseMetadata.supportsOuterJoins()) { final IJoinQueryString joinQuery = this.databaseMetadata.getJoinQuery(); if (joinQuery instanceof DatabaseMetaDataImpl.JdbcDb) { joinQuery.addQuery("layout", "{oj UP_LAYOUT_STRUCT ULS LEFT OUTER JOIN UP_LAYOUT_PARAM USP ON ULS.USER_ID = USP.USER_ID AND ULS.STRUCT_ID = USP.STRUCT_ID} WHERE"); joinQuery.addQuery("ss_struct", "{oj UP_SS_STRUCT USS LEFT OUTER JOIN UP_SS_STRUCT_PAR USP ON USS.SS_ID=USP.SS_ID} WHERE"); joinQuery.addQuery("ss_theme", "{oj UP_SS_THEME UTS LEFT OUTER JOIN UP_SS_THEME_PARM UTP ON UTS.SS_ID=UTP.SS_ID} WHERE"); } else if (joinQuery instanceof DatabaseMetaDataImpl.PostgreSQLDb) { joinQuery.addQuery("layout", "UP_LAYOUT_STRUCT ULS LEFT OUTER JOIN UP_LAYOUT_PARAM USP ON ULS.USER_ID = USP.USER_ID AND ULS.STRUCT_ID = USP.STRUCT_ID WHERE"); joinQuery.addQuery("ss_struct", "UP_SS_STRUCT USS LEFT OUTER JOIN UP_SS_STRUCT_PAR USP ON USS.SS_ID=USP.SS_ID WHERE"); joinQuery.addQuery("ss_theme", "UP_SS_THEME UTS LEFT OUTER JOIN UP_SS_THEME_PARM UTP ON UTS.SS_ID=UTP.SS_ID WHERE"); } else if (joinQuery instanceof DatabaseMetaDataImpl.OracleDb) { joinQuery.addQuery("layout", "UP_LAYOUT_STRUCT ULS, UP_LAYOUT_PARAM USP WHERE ULS.STRUCT_ID = USP.STRUCT_ID(+) AND ULS.USER_ID = USP.USER_ID(+) AND"); joinQuery.addQuery("ss_struct", "UP_SS_STRUCT USS, UP_SS_STRUCT_PAR USP WHERE USS.SS_ID=USP.SS_ID(+) AND"); joinQuery.addQuery("ss_theme", "UP_SS_THEME UTS, UP_SS_THEME_PARM UTP WHERE UTS.SS_ID=UTP.SS_ID(+) AND"); } else { throw new RuntimeException("Unknown database driver"); } } } private final SingletonDoubleCheckedCreator<IPerson> systemPersonCreator = new SingletonDoubleCheckedCreator<IPerson>() { protected IPerson createSingleton(Object... args) { // be sure we only do this once... // Load the "system" user id from the database final int systemUserId = jdbcOperations.queryForInt("SELECT USER_ID FROM UP_USER WHERE USER_NAME = 'system'"); log.info("Found user id " + systemUserId + " for the 'system' user."); return new SystemUser(systemUserId); } }; private final IPerson getSystemUser() { return this.systemPersonCreator.get(); } /** * Add a user profile * @param person * @param profile * @return userProfile * @exception Exception */ public UserProfile addUserProfile (final IPerson person, final IUserProfile profile) { final int userId = person.getID(); // generate an id for this profile return this.jdbcOperations.execute(new ConnectionCallback<UserProfile>() { @Override public UserProfile doInConnection(Connection con) throws SQLException, DataAccessException { String sQuery = null; PreparedStatement pstmt = con.prepareStatement("INSERT INTO UP_USER_PROFILE " + "(USER_ID,PROFILE_ID,PROFILE_FNAME,PROFILE_NAME,STRUCTURE_SS_ID,THEME_SS_ID," + "DESCRIPTION, LAYOUT_ID) VALUES (?,?,?,?,?,?,?,?)"); int profileId = getNextKey(); pstmt.setInt(1, userId); pstmt.setInt(2, profileId); pstmt.setString(3, profile.getProfileFname()); pstmt.setString(4, profile.getProfileName()); pstmt.setInt(5, profile.getStructureStylesheetId()); pstmt.setInt(6, profile.getThemeStylesheetId()); pstmt.setString(7, profile.getProfileDescription()); pstmt.setInt(8, profile.getLayoutId()); sQuery = "INSERT INTO UP_USER_PROFILE (USER_ID,PROFILE_ID,PROFILE_FNAME,PROFILE_NAME,STRUCTURE_SS_ID,THEME_SS_ID,DESCRIPTION, LAYOUT_ID) VALUES (" + userId + ",'" + profileId + ",'" + profile.getProfileFname() + "','" + profile.getProfileName() + "'," + profile.getStructureStylesheetId() + "," + profile.getThemeStylesheetId() + ",'" + profile.getProfileDescription() + "', "+profile.getLayoutId()+")"; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::addUserProfile(): " + sQuery); try { pstmt.executeUpdate(); UserProfile newProfile = new UserProfile(); newProfile.setProfileId(profileId); newProfile.setLayoutId(profile.getLayoutId()); newProfile.setLocaleManager(profile.getLocaleManager()); newProfile.setProfileDescription(profile.getProfileDescription()); newProfile.setProfileFname(profile.getProfileFname()); newProfile.setProfileName(profile.getProfileName()); newProfile.setStructureStylesheetId(profile.getStructureStylesheetId()); newProfile.setSystemProfile(false); newProfile.setThemeStylesheetId(profile.getThemeStylesheetId()); return newProfile; } finally { pstmt.close(); } } }); } private int getNextKey() { return CounterStoreLocator.getCounterStore().getNextId(PROFILE_TABLE); } /** * Checks if a channel has been approved * @param approvedDate * @return boolean Channel is approved */ protected static boolean channelApproved(java.util.Date approvedDate) { java.util.Date rightNow = new java.util.Date(); return (approvedDate != null && rightNow.after(approvedDate)); } /** * Create a layout * @param layoutStructure * @param doc * @param root * @param structId * @exception java.sql.SQLException */ protected final void createLayout (HashMap layoutStructure, Document doc, Element root, int structId) throws java.sql.SQLException { while (structId != 0) { LayoutStructure ls = (LayoutStructure) layoutStructure.get(new Integer(structId)); // replaced with call to method in containing class to allow overriding // by subclasses of RDBMUserLayoutStore. // Element structure = ls.getStructureDocument(doc); Element structure = getStructure(doc, ls); root.appendChild(structure); String id = structure.getAttribute("ID"); if (id != null && ! id.equals("")) { structure.setIdAttribute("ID", true); } createLayout(layoutStructure, doc, structure, ls.getChildId()); structId = ls.getNextId(); } } /** * convert true/false into Y/N for database * @param value to check * @result boolean */ protected static final boolean xmlBool (String value) { return (value != null && value.equals("true") ? true : false); } public void deleteUserProfile(IPerson person, int profileId) { int userId = person.getID(); deleteUserProfile(userId,profileId); } private void deleteUserProfile(final int userId, final int profileId) { this.jdbcOperations.execute(new ConnectionCallback<Object>() { @Override public Object doInConnection(Connection con) throws SQLException, DataAccessException { Statement stmt = con.createStatement(); try { String sQuery = "DELETE FROM UP_USER_PROFILE WHERE USER_ID=" + userId + " AND PROFILE_ID=" + Integer.toString(profileId); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::deleteUserProfile() : " + sQuery); stmt.executeUpdate(sQuery); // remove profile mappings sQuery= "DELETE FROM UP_USER_UA_MAP WHERE USER_ID=" + userId + " AND PROFILE_ID=" + Integer.toString(profileId); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::deleteUserProfile() : " + sQuery); stmt.executeUpdate(sQuery); // remove parameter information sQuery= "DELETE FROM UP_SS_USER_PARM WHERE USER_ID=" + userId + " AND PROFILE_ID=" + Integer.toString(profileId); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::deleteUserProfile() : " + sQuery); stmt.executeUpdate(sQuery); sQuery= "DELETE FROM UP_SS_USER_ATTS WHERE USER_ID=" + userId + " AND PROFILE_ID=" + Integer.toString(profileId); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::deleteUserProfile() : " + sQuery); stmt.executeUpdate(sQuery); } finally { stmt.close(); } return null; } }); } /** * Dump a document tree structure on stdout * @param node * @param indent */ public static final void dumpDoc (Node node, String indent) { if (node == null) { return; } if (node instanceof Element) { System.err.print(indent + "element: tag=" + ((Element)node).getTagName() + " "); } else if (node instanceof Document) { System.err.print("document:"); } else { System.err.print(indent + "node:"); } System.err.println("name=" + node.getNodeName() + " value=" + node.getNodeValue()); NamedNodeMap nm = node.getAttributes(); if (nm != null) { for (int i = 0; i < nm.getLength(); i++) { System.err.println(indent + " " + nm.item(i).getNodeName() + ": '" + nm.item(i).getNodeValue() + "'"); } System.err.println(indent + "--"); } if (node.hasChildNodes()) { dumpDoc(node.getFirstChild(), indent + " "); } dumpDoc(node.getNextSibling(), indent); } /** * Return the next available channel structure id for a user * @param person * @return the next available channel structure id */ public String generateNewChannelSubscribeId (IPerson person) { return getNextStructId(person, channelPrefix); } /** * Return the next available folder structure id for a user * @param person * @return a <code>String</code> that is the next free structure ID * @exception Exception */ public String generateNewFolderId (IPerson person) { return getNextStructId(person, folderPrefix); } /** * Return the next available structure id for a user * @param person * @param prefix * @return next free structure ID * @exception Exception */ protected String getNextStructId(final IPerson person, final String prefix) { final int userId = person.getID(); return this.nextStructTransactionOperations.execute(new TransactionCallback<String>() { @Override public String doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<String>() { @Override public String doInConnection(Connection con) throws SQLException, DataAccessException { Statement stmt = con.createStatement(); try { String sQuery = "SELECT NEXT_STRUCT_ID FROM UP_USER WHERE USER_ID=" + userId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getNextStructId(): " + sQuery); ResultSet rs = stmt.executeQuery(sQuery); int currentStructId; try { if (rs.next()) { currentStructId = rs.getInt(1); } else { throw new SQLException("no rows returned for query [" + sQuery + "]"); } } finally { rs.close(); } int nextStructId = currentStructId + 1; String sUpdate = "UPDATE UP_USER SET NEXT_STRUCT_ID=" + nextStructId + " WHERE USER_ID=" + userId + " AND NEXT_STRUCT_ID=" + currentStructId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getNextStructId(): " + sUpdate); stmt.executeUpdate(sUpdate); return prefix + nextStructId; } finally { stmt.close(); } } }); } }); } /** * Return the Structure ID tag * @param structId * @param chanId * @return ID tag */ protected String getStructId(int structId, int chanId) { if (chanId == 0) { return folderPrefix + structId; } else { return channelPrefix + structId; } } // private helper modules that retreive information from the DOM structure of the description files private String getName (Document descr) { NodeList names = descr.getElementsByTagName("name"); Node name = null; for (int i = names.getLength() - 1; i >= 0; i--) { name = names.item(i); if (name.getParentNode().getNodeName().equals("stylesheetdescription")) break; else name = null; } if (name != null) { return this.getTextChildNodeValue(name); } else { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getName() : no \"name\" element was found under the \"stylesheetdescription\" node!"); return null; } } private String getRootElementTextValue (Document descr, String elementName) { NodeList names = descr.getElementsByTagName(elementName); Node name = null; for (int i = names.getLength() - 1; i >= 0; i--) { name = names.item(i); if (name.getParentNode().getNodeName().equals("stylesheetdescription")) break; else name = null; } if (name != null) { return this.getTextChildNodeValue(name); } else { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getRootElementTextValue() : no \"" + elementName + "\" element was found under the \"stylesheetdescription\" node!"); return null; } } private String getDescription (Document descr) { NodeList descriptions = descr.getElementsByTagName("description"); Node description = null; for (int i = descriptions.getLength() - 1; i >= 0; i--) { description = descriptions.item(i); if (description.getParentNode().getNodeName().equals("stylesheetdescription")) break; else description = null; } if (description != null) { return this.getTextChildNodeValue(description); } else { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getDescription() : no \"description\" element was found under the \"stylesheetdescription\" node!"); return null; } } private Vector getVectorOfSimpleTextElementValues (Document descr, String elementName) { Vector v = new Vector(); // find "stylesheetdescription" node, take the first one Element stylesheetdescriptionElement = (Element)(descr.getElementsByTagName("stylesheetdescription")).item(0); if (stylesheetdescriptionElement == null) { log.error( "Could not obtain <stylesheetdescription> element"); return null; } NodeList elements = stylesheetdescriptionElement.getElementsByTagName(elementName); for (int i = elements.getLength() - 1; i >= 0; i--) { v.add(this.getTextChildNodeValue(elements.item(i))); } return v; } private String getTextChildNodeValue (Node node) { if (node == null) return null; NodeList children = node.getChildNodes(); for (int i = children.getLength() - 1; i >= 0; i--) { Node child = children.item(i); if (child.getNodeType() == Node.TEXT_NODE) return child.getNodeValue(); } return null; } /** * UserPreferences */ private String getUserBrowserMapping (final IPerson person, final String userAgentArg) { final int userId = person.getID(); return jdbcOperations.execute(new ConnectionCallback<String>() { @Override public String doInConnection(Connection con) throws SQLException, DataAccessException { final String userAgent; if (userAgentArg.length() > 255){ userAgent = userAgentArg.substring(0,254); log.debug("userAgent trimmed to 255 characters. userAgent: "+userAgentArg); } else { userAgent = userAgentArg; } String sQuery = "SELECT PROFILE_FNAME " + "FROM UP_USER_UA_MAP LEFT JOIN UP_USER_PROFILE ON " + "UP_USER_UA_MAP.PROFILE_ID=UP_USER_PROFILE.PROFILE_ID WHERE UP_USER_UA_MAP.USER_ID=? AND USER_AGENT=?"; PreparedStatement pstmt = con.prepareStatement(sQuery); try { pstmt.setInt(1, userId); pstmt.setString(2, userAgent); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserBrowserMapping(): '" + sQuery + "' userId: " + userId + " userAgent: " + userAgent); ResultSet rs = pstmt.executeQuery(); try { if (rs.next()) { return rs.getString("PROFILE_FNAME"); } } finally { rs.close(); } } finally { pstmt.close(); } return null; } }); } protected Document getPersonalUserLayout (final IPerson person, final IUserProfile profile) { final LocaleManager localeManager = profile.getLocaleManager(); return jdbcOperations.execute(new ConnectionCallback<Document>() { @Override public Document doInConnection(Connection con) throws SQLException, DataAccessException { ResultSet rs; int userId = person.getID(); final int realUserId = userId; Document doc = DocumentFactory.getThreadDocument(); Element root = doc.createElement("layout"); final Statement stmt = con.createStatement(); // A separate statement is needed so as not to interfere with ResultSet // of statements used for queries Statement insertStmt = con.createStatement(); try { long startTime = System.currentTimeMillis(); // eventually, we need to fix template layout implementations so you can just do this: // int layoutId=profile.getLayoutId(); // but for now: int layoutId = getLayoutID(userId, profile.getProfileId()); if (layoutId == 0) { // First time, grab the default layout for this user final Tuple<Integer, Integer> userLayoutIds = transactionOperations.execute(new TransactionCallback<Tuple<Integer, Integer>>() { @Override public Tuple<Integer, Integer> doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<Tuple<Integer, Integer>>() { @Override public Tuple<Integer, Integer> doInConnection(Connection con) throws SQLException, DataAccessException { int newLayoutId; int newUserId; String sQuery = "SELECT USER_DFLT_USR_ID, USER_DFLT_LAY_ID FROM UP_USER WHERE USER_ID=" + realUserId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); ResultSet rs = stmt.executeQuery(sQuery); try { boolean hasRow = rs.next(); newUserId = rs.getInt(1); newLayoutId = rs.getInt(2); } finally { rs.close(); } // Make sure the next struct id is set in case the user adds a channel sQuery = "SELECT NEXT_STRUCT_ID FROM UP_USER WHERE USER_ID=" + newUserId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); int nextStructId; rs = stmt.executeQuery(sQuery); try { boolean hasRow = rs.next(); nextStructId = rs.getInt(1); } finally { rs.close(); } int realNextStructId = 0; if (realUserId != newUserId) { // But never make the existing value SMALLER, change it only to make it LARGER // (so, get existing value) sQuery = "SELECT NEXT_STRUCT_ID FROM UP_USER WHERE USER_ID=" + realUserId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); rs = stmt.executeQuery(sQuery); try { boolean hasRow = rs.next(); realNextStructId = rs.getInt(1); } finally { rs.close(); } } if (nextStructId > realNextStructId) { sQuery = "UPDATE UP_USER SET NEXT_STRUCT_ID=" + nextStructId + " WHERE USER_ID=" + realUserId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); stmt.executeUpdate(sQuery); } return new Tuple<Integer, Integer>(newUserId, newLayoutId); } }); } }); userId = userLayoutIds.first; layoutId = userLayoutIds.second; } int firstStructId = -1; //Flags to enable a default layout lookup if it's needed boolean foundLayout = false; boolean triedDefault = false; //This loop is used to ensure a layout is found for a user. It tries //looking up the layout for the current userID. If one isn't found //the userID is replaced with the template user ID for this user and //the layout is searched for again. This loop should only ever loop once. do { String sQuery = "SELECT INIT_STRUCT_ID FROM UP_USER_LAYOUT WHERE USER_ID=" + userId + " AND LAYOUT_ID = " + layoutId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); rs = stmt.executeQuery(sQuery); try { if (rs.next()) { firstStructId = rs.getInt(1); } else { throw new RuntimeException("RDBMUserLayoutStore::getUserLayout(): No INIT_STRUCT_ID in UP_USER_LAYOUT for USER_ID: " + userId + " and LAYOUT_ID: " + layoutId); } } finally { rs.close(); } String sql; if (localeAware) { // This needs to be changed to get the localized strings sql = "SELECT ULS.STRUCT_ID,ULS.NEXT_STRUCT_ID,ULS.CHLD_STRUCT_ID,ULS.CHAN_ID,ULS.NAME,ULS.TYPE,ULS.HIDDEN,"+ "ULS.UNREMOVABLE,ULS.IMMUTABLE"; } else { sql = "SELECT ULS.STRUCT_ID,ULS.NEXT_STRUCT_ID,ULS.CHLD_STRUCT_ID,ULS.CHAN_ID,ULS.NAME,ULS.TYPE,ULS.HIDDEN,"+ "ULS.UNREMOVABLE,ULS.IMMUTABLE"; } if (databaseMetadata.supportsOuterJoins()) { sql += ",USP.STRUCT_PARM_NM,USP.STRUCT_PARM_VAL FROM " + databaseMetadata.getJoinQuery().getQuery("layout"); } else { sql += " FROM UP_LAYOUT_STRUCT ULS WHERE "; } sql += " ULS.USER_ID=" + userId + " AND ULS.LAYOUT_ID=" + layoutId + " ORDER BY ULS.STRUCT_ID"; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sql); rs = stmt.executeQuery(sql); //check for rows in the result set foundLayout = rs.next(); if (!foundLayout && !triedDefault && userId == realUserId) { //If we didn't find any rows and we haven't tried the default user yet triedDefault = true; rs.close(); //Get the default user ID and layout ID sQuery = "SELECT USER_DFLT_USR_ID, USER_DFLT_LAY_ID FROM UP_USER WHERE USER_ID=" + userId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); rs = stmt.executeQuery(sQuery); try { rs.next(); userId = rs.getInt(1); layoutId = rs.getInt(2); } finally { rs.close(); } } else { //We tried the default or actually found a layout break; } } while (!foundLayout); HashMap layoutStructure = new HashMap(); StringBuffer structChanIds = new StringBuffer(); try { int lastStructId = 0; LayoutStructure ls = null; String sepChar = ""; if (foundLayout) { int structId = rs.getInt(1); // Result Set returns 0 by default if structId was null // Except if you are using poolman 2.0.4 in which case you get -1 back if (rs.wasNull()) { structId = 0; } readLayout: while (true) { int nextId = rs.getInt(2); if (rs.wasNull()) { nextId = 0; } int childId = rs.getInt(3); if (rs.wasNull()) { childId = 0; } int chanId = rs.getInt(4); if (rs.wasNull()) { chanId = 0; } String temp5=rs.getString(5); // Some JDBC drivers require columns accessed in order String temp6=rs.getString(6); // Access 5 and 6 now, save till needed. // uPortal i18n int name_index, value_index; if (localeAware) { Locale[] locales = localeManager.getLocales(); String locale = locales[0].toString(); ls = new LayoutStructure( structId, nextId, childId, chanId, rs.getString(7),rs.getString(8),rs.getString(9), locale); name_index=10; value_index=11; } else { ls = new LayoutStructure(structId, nextId, childId, chanId, rs.getString(7),rs.getString(8),rs.getString(9)); name_index=10; value_index=11; } layoutStructure.put(new Integer(structId), ls); lastStructId = structId; if (!ls.isChannel()) { ls.addFolderData(temp5, temp6); // Plug in saved column values } if (databaseMetadata.supportsOuterJoins()) { do { String name = rs.getString(name_index); String value = rs.getString(value_index); // Oracle JDBC requires us to do this for longs if (name != null) { // may not be there because of the join ls.addParameter(name, value); } if (!rs.next()) { break readLayout; } structId = rs.getInt(1); if (rs.wasNull()) { structId = 0; } } while (structId == lastStructId); } else { // Do second SELECT later on for structure parameters if (ls.isChannel()) { structChanIds.append(sepChar + ls.getChanId()); sepChar = ","; } if (rs.next()) { structId = rs.getInt(1); if (rs.wasNull()) { structId = 0; } } else { break readLayout; } } } // while } } finally { rs.close(); } if (!databaseMetadata.supportsOuterJoins()) { // Pick up structure parameters // first, get the struct ids for the channels String sql = "SELECT STRUCT_ID FROM UP_LAYOUT_STRUCT WHERE USER_ID=" + userId + " AND LAYOUT_ID=" + layoutId + " AND CHAN_ID IN (" + structChanIds.toString() + ") ORDER BY STRUCT_ID"; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sql); StringBuffer structIdsSB = new StringBuffer( "" ); String sep = ""; rs = stmt.executeQuery(sql); try { // use the results to build a correct list of struct ids to look for while( rs.next()) { structIdsSB.append(sep + rs.getString(1)); sep = ","; }// while } finally { rs.close(); } // be a good doobie sql = "SELECT STRUCT_ID, STRUCT_PARM_NM,STRUCT_PARM_VAL FROM UP_LAYOUT_PARAM WHERE USER_ID=" + userId + " AND LAYOUT_ID=" + layoutId + " AND STRUCT_ID IN (" + structIdsSB.toString() + ") ORDER BY STRUCT_ID"; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sql); rs = stmt.executeQuery(sql); try { if (rs.next()) { int structId = rs.getInt(1); readParm: while(true) { LayoutStructure ls = (LayoutStructure)layoutStructure.get(new Integer(structId)); int lastStructId = structId; do { ls.addParameter(rs.getString(2), rs.getString(3)); if (!rs.next()) { break readParm; } } while ((structId = rs.getInt(1)) == lastStructId); } } } finally { rs.close(); } } if (layoutStructure.size() > 0) { // We have a layout to work with createLayout(layoutStructure, doc, root, firstStructId); layoutStructure.clear(); if (log.isDebugEnabled()) { long stopTime = System.currentTimeMillis(); log.debug("RDBMUserLayoutStore::getUserLayout(): Layout document for user " + userId + " took " + (stopTime - startTime) + " milliseconds to create"); } doc.appendChild(root); } } finally { stmt.close(); insertStmt.close(); } return doc; } }); } public IUserProfile getUserProfileById (final IPerson person, final int profileId) { final int userId = person.getID(); return jdbcOperations.execute(new ConnectionCallback<IUserProfile>() { @Override public IUserProfile doInConnection(Connection con) throws SQLException, DataAccessException { Statement stmt = con.createStatement(); try { String sQuery = "SELECT USER_ID, PROFILE_ID, PROFILE_FNAME, PROFILE_NAME, DESCRIPTION, LAYOUT_ID, STRUCTURE_SS_ID, THEME_SS_ID FROM UP_USER_PROFILE WHERE USER_ID=" + userId + " AND PROFILE_ID=" + profileId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserProfileById(): " + sQuery); ResultSet rs = stmt.executeQuery(sQuery); try { if (rs.next()) { String temp2 = rs.getString(3); String temp3 = rs.getString(4); String temp4 = rs.getString(5); int layoutId = rs.getInt(6); if (rs.wasNull()) { layoutId = 0; } int structSsId = rs.getInt(7); if (rs.wasNull()) { // This is probably a data issue and probably an export operation; defer to the system user... if (!person.equals(getSystemUser())) { structSsId = getSystemProfileByFname(temp2).getStructureStylesheetId(); } else { String msg = "The system user profile has no structure stylesheet Id."; throw new IllegalStateException(msg); } } int themeSsId = rs.getInt(8); if (rs.wasNull()) { // This is probably a data issue and probably an export operation; defer to the system user... if (!person.equals(getSystemUser())) { themeSsId = getSystemProfileByFname(temp2).getThemeStylesheetId(); } else { String msg = "The system user profile has no theme stylesheet Id."; throw new IllegalStateException(msg); } } IUserProfile userProfile = new UserProfile(profileId, temp2, temp3,temp4, layoutId, structSsId, themeSsId); final Locale[] userLocales = localeStore.getUserLocales(person); userProfile.setLocaleManager(new LocaleManager(person, userLocales)); return userProfile; } else { throw new RuntimeException("Unable to find User Profile for user " + userId + " and profile " + profileId); } } finally { rs.close(); } } finally { stmt.close(); } } }); } private final ThreadLocal<Cache<Tuple<String, String>, UserProfile>> profileCacheHolder = new ThreadLocal<Cache<Tuple<String,String>,UserProfile>>(); /** * Cache used during import/export operations */ public void setProfileImportExportCache(Cache<Tuple<String, String>, UserProfile> profileCache) { if (profileCache == null) { this.profileCacheHolder.remove(); } else { this.profileCacheHolder.set(profileCache); } } private Cache<Tuple<String, String>, UserProfile> getProfileImportExportCache() { return this.profileCacheHolder.get(); } public UserProfile getUserProfileByFname (final IPerson person, final String profileFname) { Tuple<String, String> key = null; final Cache<Tuple<String, String>, UserProfile> profileCache = getProfileImportExportCache(); if (profileCache != null) { key = new Tuple<String, String>(person.getUserName(), profileFname); final UserProfile profile = profileCache.getIfPresent(key); if (profile != null) { return profile; } } log.debug("Getting profile " + profileFname + " for user " + person.getID()); final int userId = person.getID(); final UserProfile userProfile = jdbcOperations.execute(new ConnectionCallback<UserProfile>() { @Override public UserProfile doInConnection(Connection con) throws SQLException, DataAccessException { String query = "SELECT USER_ID, PROFILE_ID, PROFILE_NAME, DESCRIPTION, " + "LAYOUT_ID, STRUCTURE_SS_ID, THEME_SS_ID FROM UP_USER_PROFILE WHERE " + "USER_ID=? AND PROFILE_FNAME=?"; PreparedStatement pstmt = con.prepareStatement(query); pstmt.setInt(1, userId); pstmt.setString(2, profileFname); try { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserProfileByFname(): " + query + " userId: " + userId + " profileFname: " + profileFname); ResultSet rs = pstmt.executeQuery(); try { if (rs.next()) { int profileId = rs.getInt(2); String profileName = rs.getString(3); String profileDesc = rs.getString(4); int layoutId = rs.getInt(5); if (rs.wasNull()) { layoutId = 0; } int structSsId = rs.getInt(6); if (rs.wasNull()) { // This is probably a data issue and probably an export operation; defer to the system user... if (!person.equals(getSystemUser())) { structSsId = getSystemProfileByFname(profileFname).getStructureStylesheetId(); } else { String msg = "The system user profile has no structure stylesheet Id."; throw new IllegalStateException(msg); } } int themeSsId = rs.getInt(7); if (rs.wasNull()) { // This is probably a data issue and probably an export operation; defer to the system user... if (!person.equals(getSystemUser())) { themeSsId = getSystemProfileByFname(profileFname).getThemeStylesheetId(); } else { String msg = "The system user profile has no theme stylesheet Id."; throw new IllegalStateException(msg); } } UserProfile userProfile = new UserProfile(profileId, profileFname, profileName, profileDesc, layoutId, structSsId, themeSsId); final Locale[] userLocales = localeStore.getUserLocales(person); userProfile.setLocaleManager(new LocaleManager(person, userLocales)); return userProfile; } /* Try to copy the template profile. */ log.debug("Copying template profile " + profileFname + " to user " + person.getID()); rs.close(); pstmt.close(); pstmt = con.prepareStatement("SELECT USER_DFLT_USR_ID FROM UP_USER WHERE USER_ID=?"); pstmt.setInt(1, person.getID()); rs = pstmt.executeQuery(); if(rs.next()) { int defaultProfileUser = rs.getInt(1); if (rs.wasNull()) { throw new RuntimeException("Need to clone the '" + profileFname + "' profile from template user for " + person + " but they have no template user"); } IPerson defaultProfilePerson = new PersonImpl(); defaultProfilePerson.setID(defaultProfileUser); if(defaultProfilePerson.getID() != person.getID()) { UserProfile templateProfile = getUserProfileByFname(defaultProfilePerson,profileFname); if(templateProfile != null) { UserProfile newUserProfile = new UserProfile(templateProfile); final Locale[] userLocales = localeStore.getUserLocales(person); newUserProfile.setLayoutId(0); newUserProfile = addUserProfile(person,newUserProfile); newUserProfile.setLocaleManager(new LocaleManager(person, userLocales)); return newUserProfile; } } } throw new RuntimeException("Unable to find User Profile for userId " + userId + " and profile " + profileFname); } finally { rs.close(); } } finally { pstmt.close(); } } }); if (profileCache != null && key != null) { profileCache.put(key, userProfile); } return userProfile; } public Hashtable getUserProfileList (final IPerson person) { final int userId = person.getID(); return jdbcOperations.execute(new ConnectionCallback<Hashtable>() { @Override public Hashtable doInConnection(Connection con) throws SQLException, DataAccessException { Hashtable<Integer,UserProfile> pv = new Hashtable<Integer,UserProfile>(); Statement stmt = con.createStatement(); try { String sQuery = "SELECT USER_ID, PROFILE_ID, PROFILE_FNAME, PROFILE_NAME, DESCRIPTION, LAYOUT_ID, STRUCTURE_SS_ID, THEME_SS_ID FROM UP_USER_PROFILE WHERE USER_ID=" + userId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserProfileList(): " + sQuery); ResultSet rs = stmt.executeQuery(sQuery); try { while (rs.next()) { int layoutId = rs.getInt(6); if (rs.wasNull()) { layoutId = 0; } int structSsId = rs.getInt(7); if (rs.wasNull()) { structSsId = 0; } int themeSsId = rs.getInt(8); if (rs.wasNull()) { themeSsId = 0; } UserProfile upl = new UserProfile(rs.getInt(2), rs.getString(3), rs.getString(4), rs.getString(5), layoutId, structSsId, themeSsId); pv.put(new Integer(upl.getProfileId()), upl); } } finally { rs.close(); } } finally { stmt.close(); } return pv; } }); } protected abstract Element getStructure(Document doc, LayoutStructure ls); protected abstract int saveStructure (Node node, PreparedStatement structStmt, PreparedStatement parmStmt) throws SQLException; public void setUserBrowserMapping (final IPerson person, final String userAgentArg, final int profileId) { final int userId = person.getID(); this.transactionOperations.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<Object>() { @Override public Object doInConnection(Connection con) throws SQLException, DataAccessException { final String userAgent; if (userAgentArg.length() > 255){ userAgent = userAgentArg.substring(0,254); log.debug("userAgent trimmed to 255 characters. userAgent: "+userAgentArg); } else { userAgent = userAgentArg; } // remove the old mapping and add the new one PreparedStatement ps = null; try{ ps = con.prepareStatement("DELETE FROM UP_USER_UA_MAP WHERE USER_ID=? AND USER_AGENT=?"); ps.setInt(1,userId); ps.setString(2,userAgent); ps.executeUpdate(); }finally{ try{ ps.close(); }catch(Exception e){ //ignore } } try{ log.debug("writing to UP_USER_UA_MAP: userId: "+userId+", userAgent: "+userAgent+", profileId: "+profileId); ps = con.prepareStatement("INSERT INTO UP_USER_UA_MAP (USER_ID,USER_AGENT,PROFILE_ID) VALUES (?,?,?)"); ps.setInt(1,userId); ps.setString(2,userAgent); ps.setInt(3,profileId); ps.executeUpdate(); }finally{ try{ ps.close(); }catch(Exception e){ //ignore } } return null; } }); } }); } /** * Save the user layout. * @param person * @param profile * @param layoutXML * @throws Exception */ public void setUserLayout(final IPerson person, final IUserProfile profile, final Document layoutXML, final boolean channelsAdded) { final long startTime = System.currentTimeMillis(); final int userId = person.getID(); final int profileId = profile.getProfileId(); this.transactionOperations.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<Object>() { @Override public Object doInConnection(Connection con) throws SQLException, DataAccessException { int layoutId = 0; ResultSet rs; // Eventually we want to be able to just get layoutId from the // profile, but because of the template user layouts we have to do this for now ... layoutId = getLayoutID(userId, profileId); boolean firstLayout = false; if (layoutId == 0) { // First personal layout for this user/profile layoutId = 1; firstLayout = true; } String sql = "DELETE FROM UP_LAYOUT_PARAM WHERE USER_ID=? AND LAYOUT_ID=?"; PreparedStatement pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); pstmt.setInt(2, layoutId); if (log.isDebugEnabled()) log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } sql = "DELETE FROM UP_LAYOUT_STRUCT WHERE USER_ID=? AND LAYOUT_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); pstmt.setInt(2, layoutId); if (log.isDebugEnabled()) log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } PreparedStatement structStmt = con.prepareStatement("INSERT INTO UP_LAYOUT_STRUCT " + "(USER_ID, LAYOUT_ID, STRUCT_ID, NEXT_STRUCT_ID, CHLD_STRUCT_ID,EXTERNAL_ID,CHAN_ID,NAME,TYPE,HIDDEN,IMMUTABLE,UNREMOVABLE) " + "VALUES (" + userId + "," + layoutId + ",?,?,?,?,?,?,?,?,?,?)"); PreparedStatement parmStmt = con.prepareStatement("INSERT INTO UP_LAYOUT_PARAM " + "(USER_ID, LAYOUT_ID, STRUCT_ID, STRUCT_PARM_NM, STRUCT_PARM_VAL) " + "VALUES (" + userId + "," + layoutId + ",?,?,?)"); int firstStructId; try { firstStructId = saveStructure(layoutXML.getFirstChild().getFirstChild(), structStmt, parmStmt); } finally { structStmt.close(); parmStmt.close(); } //Check to see if the user has a matching layout sql = "SELECT * FROM UP_USER_LAYOUT WHERE USER_ID=? AND LAYOUT_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); pstmt.setInt(2, layoutId); if (log.isDebugEnabled()) log.debug(sql); rs = pstmt.executeQuery(); try { if (!rs.next()) { // If not, the default user is found and the layout rows from the default user are copied for the current user. int defaultUserId; sql = "SELECT USER_DFLT_USR_ID FROM UP_USER WHERE USER_ID=?"; PreparedStatement pstmt2 = con.prepareStatement(sql); try { pstmt2.clearParameters(); pstmt2.setInt(1, userId); if (log.isDebugEnabled()) log.debug(sql); ResultSet rs2 = null; try { rs2 = pstmt2.executeQuery(); rs2.next(); defaultUserId = rs2.getInt(1); } finally { rs2.close(); } } finally { pstmt2.close(); } // Add to UP_USER_LAYOUT sql = "SELECT USER_ID,LAYOUT_ID,LAYOUT_TITLE,INIT_STRUCT_ID FROM UP_USER_LAYOUT WHERE USER_ID=?"; pstmt2 = con.prepareStatement(sql); try { pstmt2.clearParameters(); pstmt2.setInt(1, defaultUserId); if (log.isDebugEnabled()) log.debug(sql); ResultSet rs2 = pstmt2.executeQuery(); try { if (rs2.next()) { // There is a row for this user's template user... sql = "INSERT INTO UP_USER_LAYOUT (USER_ID, LAYOUT_ID, LAYOUT_TITLE, INIT_STRUCT_ID) VALUES (?,?,?,?)"; PreparedStatement pstmt3 = con.prepareStatement(sql); try { pstmt3.clearParameters(); pstmt3.setInt(1, userId); pstmt3.setInt(2, rs2.getInt("LAYOUT_ID")); pstmt3.setString(3, rs2.getString("LAYOUT_TITLE")); pstmt3.setInt(4, rs2.getInt("INIT_STRUCT_ID")); if (log.isDebugEnabled()) log.debug(sql); pstmt3.executeUpdate(); } finally { pstmt3.close(); } } else { // We can't rely on the template user, but we still need a row... sql = "INSERT INTO UP_USER_LAYOUT (USER_ID, LAYOUT_ID, LAYOUT_TITLE, INIT_STRUCT_ID) VALUES (?,?,?,?)"; PreparedStatement pstmt3 = con.prepareStatement(sql); try { pstmt3.clearParameters(); pstmt3.setInt(1, userId); pstmt3.setInt(2, layoutId); pstmt3.setString(3, "default layout"); pstmt3.setInt(4, 1); if (log.isDebugEnabled()) log.debug(sql); pstmt3.executeUpdate(); } finally { pstmt3.close(); } } } finally { rs2.close(); } } finally { pstmt2.close(); } } } finally { rs.close(); } } finally { pstmt.close(); } //Update the users layout with the correct inital structure ID sql = "UPDATE UP_USER_LAYOUT SET INIT_STRUCT_ID=? WHERE USER_ID=? AND LAYOUT_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, firstStructId); pstmt.setInt(2, userId); pstmt.setInt(3, layoutId); if (log.isDebugEnabled()) log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } // Update the last time the user saw the list of available channels if (channelsAdded) { sql = "UPDATE UP_USER SET LST_CHAN_UPDT_DT=? WHERE USER_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setDate(1, new java.sql.Date(System.currentTimeMillis())); pstmt.setInt(2, userId); log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } } if (firstLayout) { int defaultUserId; int defaultLayoutId; // Have to copy some of data over from the default user sql = "SELECT USER_DFLT_USR_ID,USER_DFLT_LAY_ID FROM UP_USER WHERE USER_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); log.debug(sql); rs = pstmt.executeQuery(); try { rs.next(); defaultUserId = rs.getInt(1); defaultLayoutId = rs.getInt(2); } finally { rs.close(); } } finally { pstmt.close(); } sql = "UPDATE UP_USER_PROFILE SET LAYOUT_ID=1 WHERE USER_ID=? AND PROFILE_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); pstmt.setInt(2, profileId); log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } } return null; } }); } }); if (log.isDebugEnabled()) { long stopTime = System.currentTimeMillis(); log.debug("RDBMUserLayoutStore::setUserLayout(): Layout document for user " + userId + " took " + (stopTime - startTime) + " milliseconds to save"); } } public void updateUserProfile (final IPerson person, final IUserProfile profile) { final int userId = person.getID(); this.transactionOperations.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<Object>() { @Override public Object doInConnection(Connection con) throws SQLException, DataAccessException { String query = "UPDATE UP_USER_PROFILE SET LAYOUT_ID=?,THEME_SS_ID=?,STRUCTURE_SS_ID=?," + "DESCRIPTION=?,PROFILE_NAME=?, PROFILE_FNAME=? WHERE USER_ID=? AND PROFILE_ID=?"; PreparedStatement pstmt = con.prepareStatement(query); pstmt.setInt(1, profile.getLayoutId()); pstmt.setInt(2, profile.getThemeStylesheetId()); pstmt.setInt(3, profile.getStructureStylesheetId()); pstmt.setString(4, profile.getProfileDescription()); pstmt.setString(5, profile.getProfileName()); pstmt.setString(6, profile.getProfileFname()); pstmt.setInt(7, userId); pstmt.setInt(8, profile.getProfileId()); try { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::updateUserProfile() : " + query + " layout_id: " + profile.getLayoutId() + " theme_ss_id: " + profile.getThemeStylesheetId() + " structure_ss_id: " + profile.getStructureStylesheetId() + " description: " + profile.getProfileDescription() + " name: " + profile.getProfileName() + " user_id: " + userId + " fname: " + profile.getProfileFname()); pstmt.execute(); } finally { pstmt.close(); } return null; } }); } }); } public void setSystemBrowserMapping (String userAgent, int profileId) { this.setUserBrowserMapping(this.getSystemUser(), userAgent, profileId); } private String getSystemBrowserMapping (String userAgent) { return getUserBrowserMapping(this.getSystemUser(), userAgent); } public IUserProfile getUserProfile (IPerson person, String userAgent) { String profileFname = getUserBrowserMapping(person, userAgent); if (profileFname == null) return null; return this.getUserProfileByFname(person, profileFname); } public IUserProfile getSystemProfile (String userAgent) { String profileFname = getSystemBrowserMapping(userAgent); if (profileFname == null) return null; IUserProfile up = this.getUserProfileByFname(this.getSystemUser(), profileFname); up.setSystemProfile(true); return up; } public IUserProfile getSystemProfileById (int profileId) { IUserProfile up = this.getUserProfileById(this.getSystemUser(), profileId); up.setSystemProfile(true); return up; } public IUserProfile getSystemProfileByFname (String profileFname) { IUserProfile up = this.getUserProfileByFname(this.getSystemUser(), profileFname); up.setSystemProfile(true); return up; } public Hashtable getSystemProfileList () { Hashtable pl = this.getUserProfileList(this.getSystemUser()); for (Enumeration e = pl.elements(); e.hasMoreElements();) { IUserProfile up = (IUserProfile)e.nextElement(); up.setSystemProfile(true); } return pl; } public void updateSystemProfile (IUserProfile profile) { this.updateUserProfile(this.getSystemUser(), profile); } public IUserProfile addSystemProfile (IUserProfile profile) { return addUserProfile(this.getSystemUser(), profile); } public void deleteSystemProfile (int profileId) { this.deleteUserProfile(this.getSystemUser(), profileId); } private static class SystemUser implements IPerson { private final int systemUserId; public SystemUser(int systemUserId) { this.systemUserId = systemUserId; } public void setID(int sID) { } public int getID() { return this.systemUserId; } public String getUserName() { return null; } public void setUserName(String userName) { } public void setFullName(String sFullName) { } public String getFullName() { return "uPortal System Account"; } public Object getAttribute(String key) { return null; } public Object[] getAttributeValues(String key) { return null; } public Map<String,List<Object>> getAttributeMap() { return null; } public void setAttribute(String key, Object value) { } public void setAttribute(String key, List<Object> values) { } public void setAttributes(Map attrs) { } public Enumeration getAttributes() { return null; } public Enumeration getAttributeNames() { return null; } public boolean isGuest() { return (false); } public ISecurityContext getSecurityContext() { return (null); } public void setSecurityContext(ISecurityContext context) { } public EntityIdentifier getEntityIdentifier() { return null; } public void setEntityIdentifier(EntityIdentifier ei) { } public String getName() { return null; } } /** * Returns the current layout ID for the user and profile. If the profile doesn't exist or the * layout_id field is null 0 is returned. * * @param userId The userId for the profile * @param profileId The profileId for the profile * @return The layout_id field or 0 if it does not exist or is null * @throws SQLException */ protected int getLayoutID(final int userId, final int profileId) throws SQLException { return jdbcOperations.execute(new ConnectionCallback<Integer>() { @Override public Integer doInConnection(Connection con) throws SQLException, DataAccessException { String query = "SELECT LAYOUT_ID " + "FROM UP_USER_PROFILE " + "WHERE USER_ID=? AND PROFILE_ID=?"; int layoutId = 0; PreparedStatement pstmt = con.prepareStatement(query); try { final int u = userId; final int p = profileId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getLayoutID(userId=" + u + ", profileId=" + p + " ): " + query); pstmt.setInt(1, u); pstmt.setInt(2, p); ResultSet rs = pstmt.executeQuery(); try { if (rs.next()) { layoutId = rs.getInt(1); if (rs.wasNull()) { layoutId = 0; } } // find the layout used by the default profle for this user query = "SELECT LAYOUT_ID FROM UP_USER_PROFILE " + "WHERE USER_ID=? AND PROFILE_FNAME='" + DEFAULT_LAYOUT_FNAME + "'"; pstmt = con.prepareStatement(query); pstmt.setInt(1, userId); rs = pstmt.executeQuery(); int intendedLayoutId = 0; if (rs.next()) { intendedLayoutId = rs.getInt("LAYOUT_ID"); if (rs.wasNull()) { intendedLayoutId = 0; } } // check to see if this profile for the current user // has already used the requested layout query = "SELECT LAYOUT_ID FROM UP_USER_PROFILE WHERE " + "USER_ID=? AND LAYOUT_ID=? AND PROFILE_ID=?"; pstmt = con.prepareStatement(query); pstmt.setInt(1, userId); pstmt.setInt(2, intendedLayoutId); pstmt.setInt(3, profileId); rs = pstmt.executeQuery(); if (!rs.next()) { // if the layout's not already been used, update the profile to // point to that layout query = "UPDATE UP_USER_PROFILE SET LAYOUT_ID=? WHERE " + "USER_ID=? AND PROFILE_ID=?"; pstmt = con.prepareStatement(query); pstmt.setInt(1, intendedLayoutId); pstmt.setInt(2, userId); pstmt.setInt(3, profileId); pstmt.execute(); layoutId = intendedLayoutId; } } finally { rs.close(); } } finally { pstmt.close(); } return layoutId; } }); } /* (non-Javadoc) * @see org.jasig.portal.layout.IUserLayoutStore#importLayout(org.dom4j.Element) */ public abstract void importLayout(org.dom4j.Element layout); }
uportal-war/src/main/java/org/jasig/portal/layout/simple/RDBMUserLayoutStore.java
/** * Licensed to Jasig under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Jasig licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.portal.layout.simple; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Enumeration; import java.util.HashMap; import java.util.Hashtable; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Vector; import javax.annotation.Resource; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jasig.portal.EntityIdentifier; import org.jasig.portal.IUserProfile; import org.jasig.portal.UserProfile; import org.jasig.portal.i18n.ILocaleStore; import org.jasig.portal.i18n.LocaleManager; import org.jasig.portal.layout.IUserLayoutStore; import org.jasig.portal.layout.LayoutStructure; import org.jasig.portal.layout.dao.IStylesheetDescriptorDao; import org.jasig.portal.portlet.registry.IPortletDefinitionRegistry; import org.jasig.portal.rdbm.DatabaseMetaDataImpl; import org.jasig.portal.rdbm.IDatabaseMetadata; import org.jasig.portal.rdbm.IJoinQueryString; import org.jasig.portal.security.IPerson; import org.jasig.portal.security.IPersonManager; import org.jasig.portal.security.ISecurityContext; import org.jasig.portal.security.provider.PersonImpl; import org.jasig.portal.spring.locator.CounterStoreLocator; import org.jasig.portal.utils.DocumentFactory; import org.jasig.portal.utils.ICounterStore; import org.jasig.portal.utils.Tuple; import org.jasig.portal.utils.threading.SingletonDoubleCheckedCreator; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.dao.DataAccessException; import org.springframework.jdbc.core.ConnectionCallback; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.JdbcTemplate; import org.springframework.jdbc.support.SQLErrorCodeSQLExceptionTranslator; import org.springframework.jdbc.support.SQLExceptionTranslator; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.DefaultTransactionDefinition; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionOperations; import org.springframework.transaction.support.TransactionTemplate; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import com.google.common.cache.Cache; /** * SQL implementation for the 2.x relational database model. * * Prior to uPortal 2.5, this class existed in the org.jasig.portal package. It was * moved to its present package to express that it is part of the * Simple Layout Manager implementation. * * @author George Lindholm * @version $Revision$ $Date$ */ public abstract class RDBMUserLayoutStore implements IUserLayoutStore, InitializingBean { protected final Log log = LogFactory.getLog(getClass()); private static String PROFILE_TABLE = "UP_USER_PROFILE"; //This class is instantiated ONCE so NO class variables can be used to keep state between calls protected static final String channelPrefix = "n"; protected static final String folderPrefix = "s"; protected TransactionOperations transactionOperations; protected TransactionOperations nextStructTransactionOperations; protected JdbcOperations jdbcOperations; private ILocaleStore localeStore; protected IDatabaseMetadata databaseMetadata; protected IPersonManager personManager; protected ICounterStore counterStore; protected IPortletDefinitionRegistry portletDefinitionRegistry; protected IStylesheetDescriptorDao stylesheetDescriptorDao; protected SQLExceptionTranslator exceptionTranslator; // I18n property protected static final boolean localeAware = LocaleManager.isLocaleAware(); @Autowired public void setLocaleStore(ILocaleStore localeStore) { this.localeStore = localeStore; } @Autowired public void setStylesheetDescriptorDao(IStylesheetDescriptorDao stylesheetDescriptorDao) { this.stylesheetDescriptorDao = stylesheetDescriptorDao; } @Autowired public void setPlatformTransactionManager(@Qualifier("PortalDb") PlatformTransactionManager platformTransactionManager) { this.transactionOperations = new TransactionTemplate(platformTransactionManager); final DefaultTransactionDefinition nextStructTransactionDefinition = new DefaultTransactionDefinition(); nextStructTransactionDefinition.setPropagationBehavior(DefaultTransactionDefinition.PROPAGATION_REQUIRES_NEW); this.nextStructTransactionOperations = new TransactionTemplate(platformTransactionManager, nextStructTransactionDefinition); } @Resource(name="PortalDb") public void setDataSource(DataSource dataSource) { this.jdbcOperations = new JdbcTemplate(dataSource); this.exceptionTranslator = new SQLErrorCodeSQLExceptionTranslator(dataSource); } @Autowired public void setDatabaseMetadata(IDatabaseMetadata databaseMetadata) { this.databaseMetadata = databaseMetadata; } @Autowired public void setPersonManager(IPersonManager personManager) { this.personManager = personManager; } @Autowired public void setCounterStore(ICounterStore counterStore) { this.counterStore = counterStore; } @Autowired public void setPortletDefinitionRegistry(IPortletDefinitionRegistry portletDefinitionRegistry) { this.portletDefinitionRegistry = portletDefinitionRegistry; } @Override public void afterPropertiesSet() throws Exception { if (this.databaseMetadata.supportsOuterJoins()) { final IJoinQueryString joinQuery = this.databaseMetadata.getJoinQuery(); if (joinQuery instanceof DatabaseMetaDataImpl.JdbcDb) { joinQuery.addQuery("layout", "{oj UP_LAYOUT_STRUCT ULS LEFT OUTER JOIN UP_LAYOUT_PARAM USP ON ULS.USER_ID = USP.USER_ID AND ULS.STRUCT_ID = USP.STRUCT_ID} WHERE"); joinQuery.addQuery("ss_struct", "{oj UP_SS_STRUCT USS LEFT OUTER JOIN UP_SS_STRUCT_PAR USP ON USS.SS_ID=USP.SS_ID} WHERE"); joinQuery.addQuery("ss_theme", "{oj UP_SS_THEME UTS LEFT OUTER JOIN UP_SS_THEME_PARM UTP ON UTS.SS_ID=UTP.SS_ID} WHERE"); } else if (joinQuery instanceof DatabaseMetaDataImpl.PostgreSQLDb) { joinQuery.addQuery("layout", "UP_LAYOUT_STRUCT ULS LEFT OUTER JOIN UP_LAYOUT_PARAM USP ON ULS.USER_ID = USP.USER_ID AND ULS.STRUCT_ID = USP.STRUCT_ID WHERE"); joinQuery.addQuery("ss_struct", "UP_SS_STRUCT USS LEFT OUTER JOIN UP_SS_STRUCT_PAR USP ON USS.SS_ID=USP.SS_ID WHERE"); joinQuery.addQuery("ss_theme", "UP_SS_THEME UTS LEFT OUTER JOIN UP_SS_THEME_PARM UTP ON UTS.SS_ID=UTP.SS_ID WHERE"); } else if (joinQuery instanceof DatabaseMetaDataImpl.OracleDb) { joinQuery.addQuery("layout", "UP_LAYOUT_STRUCT ULS, UP_LAYOUT_PARAM USP WHERE ULS.STRUCT_ID = USP.STRUCT_ID(+) AND ULS.USER_ID = USP.USER_ID(+) AND"); joinQuery.addQuery("ss_struct", "UP_SS_STRUCT USS, UP_SS_STRUCT_PAR USP WHERE USS.SS_ID=USP.SS_ID(+) AND"); joinQuery.addQuery("ss_theme", "UP_SS_THEME UTS, UP_SS_THEME_PARM UTP WHERE UTS.SS_ID=UTP.SS_ID(+) AND"); } else { throw new RuntimeException("Unknown database driver"); } } } private final SingletonDoubleCheckedCreator<IPerson> systemPersonCreator = new SingletonDoubleCheckedCreator<IPerson>() { protected IPerson createSingleton(Object... args) { // be sure we only do this once... // Load the "system" user id from the database final int systemUserId = jdbcOperations.queryForInt("SELECT USER_ID FROM UP_USER WHERE USER_NAME = 'system'"); log.info("Found user id " + systemUserId + " for the 'system' user."); return new SystemUser(systemUserId); } }; private final IPerson getSystemUser() { return this.systemPersonCreator.get(); } /** * Add a user profile * @param person * @param profile * @return userProfile * @exception Exception */ public UserProfile addUserProfile (final IPerson person, final IUserProfile profile) { final int userId = person.getID(); // generate an id for this profile return this.jdbcOperations.execute(new ConnectionCallback<UserProfile>() { @Override public UserProfile doInConnection(Connection con) throws SQLException, DataAccessException { String sQuery = null; PreparedStatement pstmt = con.prepareStatement("INSERT INTO UP_USER_PROFILE " + "(USER_ID,PROFILE_ID,PROFILE_FNAME,PROFILE_NAME,STRUCTURE_SS_ID,THEME_SS_ID," + "DESCRIPTION, LAYOUT_ID) VALUES (?,?,?,?,?,?,?,?)"); int profileId = getNextKey(); pstmt.setInt(1, userId); pstmt.setInt(2, profileId); pstmt.setString(3, profile.getProfileFname()); pstmt.setString(4, profile.getProfileName()); pstmt.setInt(5, profile.getStructureStylesheetId()); pstmt.setInt(6, profile.getThemeStylesheetId()); pstmt.setString(7, profile.getProfileDescription()); pstmt.setInt(8, profile.getLayoutId()); sQuery = "INSERT INTO UP_USER_PROFILE (USER_ID,PROFILE_ID,PROFILE_FNAME,PROFILE_NAME,STRUCTURE_SS_ID,THEME_SS_ID,DESCRIPTION, LAYOUT_ID) VALUES (" + userId + ",'" + profileId + ",'" + profile.getProfileFname() + "','" + profile.getProfileName() + "'," + profile.getStructureStylesheetId() + "," + profile.getThemeStylesheetId() + ",'" + profile.getProfileDescription() + "', "+profile.getLayoutId()+")"; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::addUserProfile(): " + sQuery); try { pstmt.executeUpdate(); UserProfile newProfile = new UserProfile(); newProfile.setProfileId(profileId); newProfile.setLayoutId(profile.getLayoutId()); newProfile.setLocaleManager(profile.getLocaleManager()); newProfile.setProfileDescription(profile.getProfileDescription()); newProfile.setProfileFname(profile.getProfileFname()); newProfile.setProfileName(profile.getProfileName()); newProfile.setStructureStylesheetId(profile.getStructureStylesheetId()); newProfile.setSystemProfile(false); newProfile.setThemeStylesheetId(profile.getThemeStylesheetId()); return newProfile; } finally { pstmt.close(); } } }); } private int getNextKey() { return CounterStoreLocator.getCounterStore().getNextId(PROFILE_TABLE); } /** * Checks if a channel has been approved * @param approvedDate * @return boolean Channel is approved */ protected static boolean channelApproved(java.util.Date approvedDate) { java.util.Date rightNow = new java.util.Date(); return (approvedDate != null && rightNow.after(approvedDate)); } /** * Create a layout * @param layoutStructure * @param doc * @param root * @param structId * @exception java.sql.SQLException */ protected final void createLayout (HashMap layoutStructure, Document doc, Element root, int structId) throws java.sql.SQLException { while (structId != 0) { LayoutStructure ls = (LayoutStructure) layoutStructure.get(new Integer(structId)); // replaced with call to method in containing class to allow overriding // by subclasses of RDBMUserLayoutStore. // Element structure = ls.getStructureDocument(doc); Element structure = getStructure(doc, ls); root.appendChild(structure); String id = structure.getAttribute("ID"); if (id != null && ! id.equals("")) { structure.setIdAttribute("ID", true); } createLayout(layoutStructure, doc, structure, ls.getChildId()); structId = ls.getNextId(); } } /** * convert true/false into Y/N for database * @param value to check * @result boolean */ protected static final boolean xmlBool (String value) { return (value != null && value.equals("true") ? true : false); } public void deleteUserProfile(IPerson person, int profileId) { int userId = person.getID(); deleteUserProfile(userId,profileId); } private void deleteUserProfile(final int userId, final int profileId) { this.jdbcOperations.execute(new ConnectionCallback<Object>() { @Override public Object doInConnection(Connection con) throws SQLException, DataAccessException { Statement stmt = con.createStatement(); try { String sQuery = "DELETE FROM UP_USER_PROFILE WHERE USER_ID=" + userId + " AND PROFILE_ID=" + Integer.toString(profileId); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::deleteUserProfile() : " + sQuery); stmt.executeUpdate(sQuery); // remove profile mappings sQuery= "DELETE FROM UP_USER_UA_MAP WHERE USER_ID=" + userId + " AND PROFILE_ID=" + Integer.toString(profileId); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::deleteUserProfile() : " + sQuery); stmt.executeUpdate(sQuery); // remove parameter information sQuery= "DELETE FROM UP_SS_USER_PARM WHERE USER_ID=" + userId + " AND PROFILE_ID=" + Integer.toString(profileId); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::deleteUserProfile() : " + sQuery); stmt.executeUpdate(sQuery); sQuery= "DELETE FROM UP_SS_USER_ATTS WHERE USER_ID=" + userId + " AND PROFILE_ID=" + Integer.toString(profileId); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::deleteUserProfile() : " + sQuery); stmt.executeUpdate(sQuery); } finally { stmt.close(); } return null; } }); } /** * Dump a document tree structure on stdout * @param node * @param indent */ public static final void dumpDoc (Node node, String indent) { if (node == null) { return; } if (node instanceof Element) { System.err.print(indent + "element: tag=" + ((Element)node).getTagName() + " "); } else if (node instanceof Document) { System.err.print("document:"); } else { System.err.print(indent + "node:"); } System.err.println("name=" + node.getNodeName() + " value=" + node.getNodeValue()); NamedNodeMap nm = node.getAttributes(); if (nm != null) { for (int i = 0; i < nm.getLength(); i++) { System.err.println(indent + " " + nm.item(i).getNodeName() + ": '" + nm.item(i).getNodeValue() + "'"); } System.err.println(indent + "--"); } if (node.hasChildNodes()) { dumpDoc(node.getFirstChild(), indent + " "); } dumpDoc(node.getNextSibling(), indent); } /** * Return the next available channel structure id for a user * @param person * @return the next available channel structure id */ public String generateNewChannelSubscribeId (IPerson person) { return getNextStructId(person, channelPrefix); } /** * Return the next available folder structure id for a user * @param person * @return a <code>String</code> that is the next free structure ID * @exception Exception */ public String generateNewFolderId (IPerson person) { return getNextStructId(person, folderPrefix); } /** * Return the next available structure id for a user * @param person * @param prefix * @return next free structure ID * @exception Exception */ protected String getNextStructId(final IPerson person, final String prefix) { final int userId = person.getID(); return this.nextStructTransactionOperations.execute(new TransactionCallback<String>() { @Override public String doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<String>() { @Override public String doInConnection(Connection con) throws SQLException, DataAccessException { Statement stmt = con.createStatement(); try { String sQuery = "SELECT NEXT_STRUCT_ID FROM UP_USER WHERE USER_ID=" + userId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getNextStructId(): " + sQuery); ResultSet rs = stmt.executeQuery(sQuery); int currentStructId; try { if (rs.next()) { currentStructId = rs.getInt(1); } else { throw new SQLException("no rows returned for query [" + sQuery + "]"); } } finally { rs.close(); } int nextStructId = currentStructId + 1; String sUpdate = "UPDATE UP_USER SET NEXT_STRUCT_ID=" + nextStructId + " WHERE USER_ID=" + userId + " AND NEXT_STRUCT_ID=" + currentStructId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getNextStructId(): " + sUpdate); stmt.executeUpdate(sUpdate); return prefix + nextStructId; } finally { stmt.close(); } } }); } }); } /** * Return the Structure ID tag * @param structId * @param chanId * @return ID tag */ protected String getStructId(int structId, int chanId) { if (chanId == 0) { return folderPrefix + structId; } else { return channelPrefix + structId; } } // private helper modules that retreive information from the DOM structure of the description files private String getName (Document descr) { NodeList names = descr.getElementsByTagName("name"); Node name = null; for (int i = names.getLength() - 1; i >= 0; i--) { name = names.item(i); if (name.getParentNode().getNodeName().equals("stylesheetdescription")) break; else name = null; } if (name != null) { return this.getTextChildNodeValue(name); } else { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getName() : no \"name\" element was found under the \"stylesheetdescription\" node!"); return null; } } private String getRootElementTextValue (Document descr, String elementName) { NodeList names = descr.getElementsByTagName(elementName); Node name = null; for (int i = names.getLength() - 1; i >= 0; i--) { name = names.item(i); if (name.getParentNode().getNodeName().equals("stylesheetdescription")) break; else name = null; } if (name != null) { return this.getTextChildNodeValue(name); } else { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getRootElementTextValue() : no \"" + elementName + "\" element was found under the \"stylesheetdescription\" node!"); return null; } } private String getDescription (Document descr) { NodeList descriptions = descr.getElementsByTagName("description"); Node description = null; for (int i = descriptions.getLength() - 1; i >= 0; i--) { description = descriptions.item(i); if (description.getParentNode().getNodeName().equals("stylesheetdescription")) break; else description = null; } if (description != null) { return this.getTextChildNodeValue(description); } else { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getDescription() : no \"description\" element was found under the \"stylesheetdescription\" node!"); return null; } } private Vector getVectorOfSimpleTextElementValues (Document descr, String elementName) { Vector v = new Vector(); // find "stylesheetdescription" node, take the first one Element stylesheetdescriptionElement = (Element)(descr.getElementsByTagName("stylesheetdescription")).item(0); if (stylesheetdescriptionElement == null) { log.error( "Could not obtain <stylesheetdescription> element"); return null; } NodeList elements = stylesheetdescriptionElement.getElementsByTagName(elementName); for (int i = elements.getLength() - 1; i >= 0; i--) { v.add(this.getTextChildNodeValue(elements.item(i))); } return v; } private String getTextChildNodeValue (Node node) { if (node == null) return null; NodeList children = node.getChildNodes(); for (int i = children.getLength() - 1; i >= 0; i--) { Node child = children.item(i); if (child.getNodeType() == Node.TEXT_NODE) return child.getNodeValue(); } return null; } /** * UserPreferences */ private String getUserBrowserMapping (final IPerson person, final String userAgentArg) { final int userId = person.getID(); return jdbcOperations.execute(new ConnectionCallback<String>() { @Override public String doInConnection(Connection con) throws SQLException, DataAccessException { final String userAgent; if (userAgentArg.length() > 255){ userAgent = userAgentArg.substring(0,254); log.debug("userAgent trimmed to 255 characters. userAgent: "+userAgentArg); } else { userAgent = userAgentArg; } String sQuery = "SELECT PROFILE_FNAME " + "FROM UP_USER_UA_MAP LEFT JOIN UP_USER_PROFILE ON " + "UP_USER_UA_MAP.PROFILE_ID=UP_USER_PROFILE.PROFILE_ID WHERE UP_USER_UA_MAP.USER_ID=? AND USER_AGENT=?"; PreparedStatement pstmt = con.prepareStatement(sQuery); try { pstmt.setInt(1, userId); pstmt.setString(2, userAgent); if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserBrowserMapping(): '" + sQuery + "' userId: " + userId + " userAgent: " + userAgent); ResultSet rs = pstmt.executeQuery(); try { if (rs.next()) { return rs.getString("PROFILE_FNAME"); } } finally { rs.close(); } } finally { pstmt.close(); } return null; } }); } protected Document getPersonalUserLayout (final IPerson person, final IUserProfile profile) { final LocaleManager localeManager = profile.getLocaleManager(); return jdbcOperations.execute(new ConnectionCallback<Document>() { @Override public Document doInConnection(Connection con) throws SQLException, DataAccessException { ResultSet rs; int userId = person.getID(); final int realUserId = userId; Document doc = DocumentFactory.getThreadDocument(); Element root = doc.createElement("layout"); final Statement stmt = con.createStatement(); // A separate statement is needed so as not to interfere with ResultSet // of statements used for queries Statement insertStmt = con.createStatement(); try { long startTime = System.currentTimeMillis(); // eventually, we need to fix template layout implementations so you can just do this: // int layoutId=profile.getLayoutId(); // but for now: int layoutId = getLayoutID(userId, profile.getProfileId()); if (layoutId == 0) { // First time, grab the default layout for this user final Tuple<Integer, Integer> userLayoutIds = transactionOperations.execute(new TransactionCallback<Tuple<Integer, Integer>>() { @Override public Tuple<Integer, Integer> doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<Tuple<Integer, Integer>>() { @Override public Tuple<Integer, Integer> doInConnection(Connection con) throws SQLException, DataAccessException { int newLayoutId; int newUserId; String sQuery = "SELECT USER_DFLT_USR_ID, USER_DFLT_LAY_ID FROM UP_USER WHERE USER_ID=" + realUserId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); ResultSet rs = stmt.executeQuery(sQuery); try { boolean hasRow = rs.next(); newUserId = rs.getInt(1); newLayoutId = rs.getInt(2); } finally { rs.close(); } // Make sure the next struct id is set in case the user adds a channel sQuery = "SELECT NEXT_STRUCT_ID FROM UP_USER WHERE USER_ID=" + newUserId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); int nextStructId; rs = stmt.executeQuery(sQuery); try { boolean hasRow = rs.next(); nextStructId = rs.getInt(1); } finally { rs.close(); } int realNextStructId = 0; if (realUserId != newUserId) { // But never make the existing value SMALLER, change it only to make it LARGER // (so, get existing value) sQuery = "SELECT NEXT_STRUCT_ID FROM UP_USER WHERE USER_ID=" + realUserId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); rs = stmt.executeQuery(sQuery); try { boolean hasRow = rs.next(); realNextStructId = rs.getInt(1); } finally { rs.close(); } } if (nextStructId > realNextStructId) { sQuery = "UPDATE UP_USER SET NEXT_STRUCT_ID=" + nextStructId + " WHERE USER_ID=" + realUserId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); stmt.executeUpdate(sQuery); } return new Tuple<Integer, Integer>(newUserId, newLayoutId); } }); } }); userId = userLayoutIds.first; layoutId = userLayoutIds.second; } int firstStructId = -1; //Flags to enable a default layout lookup if it's needed boolean foundLayout = false; boolean triedDefault = false; //This loop is used to ensure a layout is found for a user. It tries //looking up the layout for the current userID. If one isn't found //the userID is replaced with the template user ID for this user and //the layout is searched for again. This loop should only ever loop once. do { String sQuery = "SELECT INIT_STRUCT_ID FROM UP_USER_LAYOUT WHERE USER_ID=" + userId + " AND LAYOUT_ID = " + layoutId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); rs = stmt.executeQuery(sQuery); try { if (rs.next()) { firstStructId = rs.getInt(1); } else { throw new RuntimeException("RDBMUserLayoutStore::getUserLayout(): No INIT_STRUCT_ID in UP_USER_LAYOUT for USER_ID: " + userId + " and LAYOUT_ID: " + layoutId); } } finally { rs.close(); } String sql; if (localeAware) { // This needs to be changed to get the localized strings sql = "SELECT ULS.STRUCT_ID,ULS.NEXT_STRUCT_ID,ULS.CHLD_STRUCT_ID,ULS.CHAN_ID,ULS.NAME,ULS.TYPE,ULS.HIDDEN,"+ "ULS.UNREMOVABLE,ULS.IMMUTABLE"; } else { sql = "SELECT ULS.STRUCT_ID,ULS.NEXT_STRUCT_ID,ULS.CHLD_STRUCT_ID,ULS.CHAN_ID,ULS.NAME,ULS.TYPE,ULS.HIDDEN,"+ "ULS.UNREMOVABLE,ULS.IMMUTABLE"; } if (databaseMetadata.supportsOuterJoins()) { sql += ",USP.STRUCT_PARM_NM,USP.STRUCT_PARM_VAL FROM " + databaseMetadata.getJoinQuery().getQuery("layout"); } else { sql += " FROM UP_LAYOUT_STRUCT ULS WHERE "; } sql += " ULS.USER_ID=" + userId + " AND ULS.LAYOUT_ID=" + layoutId + " ORDER BY ULS.STRUCT_ID"; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sql); rs = stmt.executeQuery(sql); //check for rows in the result set foundLayout = rs.next(); if (!foundLayout && !triedDefault && userId == realUserId) { //If we didn't find any rows and we haven't tried the default user yet triedDefault = true; rs.close(); //Get the default user ID and layout ID sQuery = "SELECT USER_DFLT_USR_ID, USER_DFLT_LAY_ID FROM UP_USER WHERE USER_ID=" + userId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sQuery); rs = stmt.executeQuery(sQuery); try { rs.next(); userId = rs.getInt(1); layoutId = rs.getInt(2); } finally { rs.close(); } } else { //We tried the default or actually found a layout break; } } while (!foundLayout); HashMap layoutStructure = new HashMap(); StringBuffer structChanIds = new StringBuffer(); try { int lastStructId = 0; LayoutStructure ls = null; String sepChar = ""; if (foundLayout) { int structId = rs.getInt(1); // Result Set returns 0 by default if structId was null // Except if you are using poolman 2.0.4 in which case you get -1 back if (rs.wasNull()) { structId = 0; } readLayout: while (true) { int nextId = rs.getInt(2); if (rs.wasNull()) { nextId = 0; } int childId = rs.getInt(3); if (rs.wasNull()) { childId = 0; } int chanId = rs.getInt(4); if (rs.wasNull()) { chanId = 0; } String temp5=rs.getString(5); // Some JDBC drivers require columns accessed in order String temp6=rs.getString(6); // Access 5 and 6 now, save till needed. // uPortal i18n int name_index, value_index; if (localeAware) { Locale[] locales = localeManager.getLocales(); String locale = locales[0].toString(); ls = new LayoutStructure( structId, nextId, childId, chanId, rs.getString(7),rs.getString(8),rs.getString(9), locale); name_index=10; value_index=11; } else { ls = new LayoutStructure(structId, nextId, childId, chanId, rs.getString(7),rs.getString(8),rs.getString(9)); name_index=10; value_index=11; } layoutStructure.put(new Integer(structId), ls); lastStructId = structId; if (!ls.isChannel()) { ls.addFolderData(temp5, temp6); // Plug in saved column values } if (databaseMetadata.supportsOuterJoins()) { do { String name = rs.getString(name_index); String value = rs.getString(value_index); // Oracle JDBC requires us to do this for longs if (name != null) { // may not be there because of the join ls.addParameter(name, value); } if (!rs.next()) { break readLayout; } structId = rs.getInt(1); if (rs.wasNull()) { structId = 0; } } while (structId == lastStructId); } else { // Do second SELECT later on for structure parameters if (ls.isChannel()) { structChanIds.append(sepChar + ls.getChanId()); sepChar = ","; } if (rs.next()) { structId = rs.getInt(1); if (rs.wasNull()) { structId = 0; } } else { break readLayout; } } } // while } } finally { rs.close(); } if (!databaseMetadata.supportsOuterJoins()) { // Pick up structure parameters // first, get the struct ids for the channels String sql = "SELECT STRUCT_ID FROM UP_LAYOUT_STRUCT WHERE USER_ID=" + userId + " AND LAYOUT_ID=" + layoutId + " AND CHAN_ID IN (" + structChanIds.toString() + ") ORDER BY STRUCT_ID"; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sql); StringBuffer structIdsSB = new StringBuffer( "" ); String sep = ""; rs = stmt.executeQuery(sql); try { // use the results to build a correct list of struct ids to look for while( rs.next()) { structIdsSB.append(sep + rs.getString(1)); sep = ","; }// while } finally { rs.close(); } // be a good doobie sql = "SELECT STRUCT_ID, STRUCT_PARM_NM,STRUCT_PARM_VAL FROM UP_LAYOUT_PARAM WHERE USER_ID=" + userId + " AND LAYOUT_ID=" + layoutId + " AND STRUCT_ID IN (" + structIdsSB.toString() + ") ORDER BY STRUCT_ID"; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserLayout(): " + sql); rs = stmt.executeQuery(sql); try { if (rs.next()) { int structId = rs.getInt(1); readParm: while(true) { LayoutStructure ls = (LayoutStructure)layoutStructure.get(new Integer(structId)); int lastStructId = structId; do { ls.addParameter(rs.getString(2), rs.getString(3)); if (!rs.next()) { break readParm; } } while ((structId = rs.getInt(1)) == lastStructId); } } } finally { rs.close(); } } if (layoutStructure.size() > 0) { // We have a layout to work with createLayout(layoutStructure, doc, root, firstStructId); layoutStructure.clear(); if (log.isDebugEnabled()) { long stopTime = System.currentTimeMillis(); log.debug("RDBMUserLayoutStore::getUserLayout(): Layout document for user " + userId + " took " + (stopTime - startTime) + " milliseconds to create"); } doc.appendChild(root); } } finally { stmt.close(); insertStmt.close(); } return doc; } }); } public IUserProfile getUserProfileById (final IPerson person, final int profileId) { final int userId = person.getID(); return jdbcOperations.execute(new ConnectionCallback<IUserProfile>() { @Override public IUserProfile doInConnection(Connection con) throws SQLException, DataAccessException { Statement stmt = con.createStatement(); try { String sQuery = "SELECT USER_ID, PROFILE_ID, PROFILE_FNAME, PROFILE_NAME, DESCRIPTION, LAYOUT_ID, STRUCTURE_SS_ID, THEME_SS_ID FROM UP_USER_PROFILE WHERE USER_ID=" + userId + " AND PROFILE_ID=" + profileId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserProfileById(): " + sQuery); ResultSet rs = stmt.executeQuery(sQuery); try { if (rs.next()) { String temp2 = rs.getString(3); String temp3 = rs.getString(4); String temp4 = rs.getString(5); int layoutId = rs.getInt(6); if (rs.wasNull()) { layoutId = 0; } int structSsId = rs.getInt(7); if (rs.wasNull()) { // This is probably a data issue and probably an export operation; defer to the system user... if (!person.equals(getSystemUser())) { structSsId = getSystemProfileByFname(temp2).getStructureStylesheetId(); } else { String msg = "The system user profile has no structure stylesheet Id."; throw new IllegalStateException(msg); } } int themeSsId = rs.getInt(8); if (rs.wasNull()) { // This is probably a data issue and probably an export operation; defer to the system user... if (!person.equals(getSystemUser())) { themeSsId = getSystemProfileByFname(temp2).getThemeStylesheetId(); } else { String msg = "The system user profile has no theme stylesheet Id."; throw new IllegalStateException(msg); } } IUserProfile userProfile = new UserProfile(profileId, temp2, temp3,temp4, layoutId, structSsId, themeSsId); final Locale[] userLocales = localeStore.getUserLocales(person); userProfile.setLocaleManager(new LocaleManager(person, userLocales)); return userProfile; } else { throw new RuntimeException("Unable to find User Profile for user " + userId + " and profile " + profileId); } } finally { rs.close(); } } finally { stmt.close(); } } }); } private final ThreadLocal<Cache<Tuple<String, String>, UserProfile>> profileCacheHolder = new ThreadLocal<Cache<Tuple<String,String>,UserProfile>>(); /** * Cache used during import/export operations */ public void setProfileImportExportCache(Cache<Tuple<String, String>, UserProfile> profileCache) { if (profileCache == null) { this.profileCacheHolder.remove(); } else { this.profileCacheHolder.set(profileCache); } } private Cache<Tuple<String, String>, UserProfile> getProfileImportExportCache() { return this.profileCacheHolder.get(); } public UserProfile getUserProfileByFname (final IPerson person, final String profileFname) { Tuple<String, String> key = null; final Cache<Tuple<String, String>, UserProfile> profileCache = getProfileImportExportCache(); if (profileCache != null) { key = new Tuple<String, String>(person.getUserName(), profileFname); final UserProfile profile = profileCache.getIfPresent(key); if (profile != null) { return profile; } } log.debug("Getting profile " + profileFname + " for user " + person.getID()); final int userId = person.getID(); final UserProfile userProfile = jdbcOperations.execute(new ConnectionCallback<UserProfile>() { @Override public UserProfile doInConnection(Connection con) throws SQLException, DataAccessException { String query = "SELECT USER_ID, PROFILE_ID, PROFILE_NAME, DESCRIPTION, " + "LAYOUT_ID, STRUCTURE_SS_ID, THEME_SS_ID FROM UP_USER_PROFILE WHERE " + "USER_ID=? AND PROFILE_FNAME=?"; PreparedStatement pstmt = con.prepareStatement(query); pstmt.setInt(1, userId); pstmt.setString(2, profileFname); try { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserProfileByFname(): " + query + " userId: " + userId + " profileFname: " + profileFname); ResultSet rs = pstmt.executeQuery(); try { if (rs.next()) { int profileId = rs.getInt(2); String profileName = rs.getString(3); String profileDesc = rs.getString(4); int layoutId = rs.getInt(5); if (rs.wasNull()) { layoutId = 0; } int structSsId = rs.getInt(6); if (rs.wasNull()) { // This is probably a data issue and probably an export operation; defer to the system user... if (!person.equals(getSystemUser())) { structSsId = getSystemProfileByFname(profileFname).getStructureStylesheetId(); } else { String msg = "The system user profile has no structure stylesheet Id."; throw new IllegalStateException(msg); } } int themeSsId = rs.getInt(7); if (rs.wasNull()) { // This is probably a data issue and probably an export operation; defer to the system user... if (!person.equals(getSystemUser())) { themeSsId = getSystemProfileByFname(profileFname).getThemeStylesheetId(); } else { String msg = "The system user profile has no theme stylesheet Id."; throw new IllegalStateException(msg); } } UserProfile userProfile = new UserProfile(profileId, profileFname, profileName, profileDesc, layoutId, structSsId, themeSsId); final Locale[] userLocales = localeStore.getUserLocales(person); userProfile.setLocaleManager(new LocaleManager(person, userLocales)); return userProfile; } /* Try to copy the template profile. */ log.debug("Copying template profile " + profileFname + " to user " + person.getID()); rs.close(); pstmt.close(); pstmt = con.prepareStatement("SELECT USER_DFLT_USR_ID FROM UP_USER WHERE USER_ID=?"); pstmt.setInt(1, person.getID()); rs = pstmt.executeQuery(); if(rs.next()) { int defaultProfileUser = rs.getInt(1); if (rs.wasNull()) { throw new RuntimeException("Need to clone the '" + profileFname + "' profile from template user for " + person + " but they have no template user"); } IPerson defaultProfilePerson = new PersonImpl(); defaultProfilePerson.setID(defaultProfileUser); if(defaultProfilePerson.getID() != person.getID()) { UserProfile templateProfile = getUserProfileByFname(defaultProfilePerson,profileFname); if(templateProfile != null) { UserProfile newUserProfile = new UserProfile(templateProfile); final Locale[] userLocales = localeStore.getUserLocales(person); newUserProfile.setLayoutId(0); newUserProfile = addUserProfile(person,newUserProfile); newUserProfile.setLocaleManager(new LocaleManager(person, userLocales)); return newUserProfile; } } } throw new RuntimeException("Unable to find User Profile for userId " + userId + " and profile " + profileFname); } finally { rs.close(); } } finally { pstmt.close(); } } }); if (profileCache != null && key != null) { profileCache.put(key, userProfile); } return userProfile; } public Hashtable getUserProfileList (final IPerson person) { final int userId = person.getID(); return jdbcOperations.execute(new ConnectionCallback<Hashtable>() { @Override public Hashtable doInConnection(Connection con) throws SQLException, DataAccessException { Hashtable<Integer,UserProfile> pv = new Hashtable<Integer,UserProfile>(); Statement stmt = con.createStatement(); try { String sQuery = "SELECT USER_ID, PROFILE_ID, PROFILE_FNAME, PROFILE_NAME, DESCRIPTION, LAYOUT_ID, STRUCTURE_SS_ID, THEME_SS_ID FROM UP_USER_PROFILE WHERE USER_ID=" + userId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getUserProfileList(): " + sQuery); ResultSet rs = stmt.executeQuery(sQuery); try { while (rs.next()) { int layoutId = rs.getInt(6); if (rs.wasNull()) { layoutId = 0; } int structSsId = rs.getInt(7); if (rs.wasNull()) { structSsId = 0; } int themeSsId = rs.getInt(8); if (rs.wasNull()) { themeSsId = 0; } UserProfile upl = new UserProfile(rs.getInt(2), rs.getString(3), rs.getString(4), rs.getString(5), layoutId, structSsId, themeSsId); pv.put(new Integer(upl.getProfileId()), upl); } } finally { rs.close(); } } finally { stmt.close(); } return pv; } }); } protected abstract Element getStructure(Document doc, LayoutStructure ls); protected abstract int saveStructure (Node node, PreparedStatement structStmt, PreparedStatement parmStmt) throws SQLException; public void setUserBrowserMapping (final IPerson person, final String userAgentArg, final int profileId) { final int userId = person.getID(); this.transactionOperations.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<Object>() { @Override public Object doInConnection(Connection con) throws SQLException, DataAccessException { final String userAgent; if (userAgentArg.length() > 255){ userAgent = userAgentArg.substring(0,254); log.debug("userAgent trimmed to 255 characters. userAgent: "+userAgentArg); } else { userAgent = userAgentArg; } // remove the old mapping and add the new one PreparedStatement ps = null; try{ ps = con.prepareStatement("DELETE FROM UP_USER_UA_MAP WHERE USER_ID=? AND USER_AGENT=?"); ps.setInt(1,userId); ps.setString(2,userAgent); ps.executeUpdate(); }finally{ try{ ps.close(); }catch(Exception e){ //ignore } } try{ log.debug("writing to UP_USER_UA_MAP: userId: "+userId+", userAgent: "+userAgent+", profileId: "+profileId); ps = con.prepareStatement("INSERT INTO UP_USER_UA_MAP (USER_ID,USER_AGENT,PROFILE_ID) VALUES (?,?,?)"); ps.setInt(1,userId); ps.setString(2,userAgent); ps.setInt(3,profileId); ps.executeUpdate(); }finally{ try{ ps.close(); }catch(Exception e){ //ignore } } return null; } }); } }); } /** * Save the user layout. * @param person * @param profile * @param layoutXML * @throws Exception */ public void setUserLayout(final IPerson person, final IUserProfile profile, final Document layoutXML, final boolean channelsAdded) { final long startTime = System.currentTimeMillis(); final int userId = person.getID(); final int profileId = profile.getProfileId(); this.transactionOperations.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<Object>() { @Override public Object doInConnection(Connection con) throws SQLException, DataAccessException { int layoutId = 0; ResultSet rs; // Eventually we want to be able to just get layoutId from the // profile, but because of the template user layouts we have to do this for now ... layoutId = getLayoutID(userId, profileId); boolean firstLayout = false; if (layoutId == 0) { // First personal layout for this user/profile layoutId = 1; firstLayout = true; } String sql = "DELETE FROM UP_LAYOUT_PARAM WHERE USER_ID=? AND LAYOUT_ID=?"; PreparedStatement pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); pstmt.setInt(2, layoutId); if (log.isDebugEnabled()) log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } sql = "DELETE FROM UP_LAYOUT_STRUCT WHERE USER_ID=? AND LAYOUT_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); pstmt.setInt(2, layoutId); if (log.isDebugEnabled()) log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } PreparedStatement structStmt = con.prepareStatement("INSERT INTO UP_LAYOUT_STRUCT " + "(USER_ID, LAYOUT_ID, STRUCT_ID, NEXT_STRUCT_ID, CHLD_STRUCT_ID,EXTERNAL_ID,CHAN_ID,NAME,TYPE,HIDDEN,IMMUTABLE,UNREMOVABLE) " + "VALUES (" + userId + "," + layoutId + ",?,?,?,?,?,?,?,?,?,?)"); PreparedStatement parmStmt = con.prepareStatement("INSERT INTO UP_LAYOUT_PARAM " + "(USER_ID, LAYOUT_ID, STRUCT_ID, STRUCT_PARM_NM, STRUCT_PARM_VAL) " + "VALUES (" + userId + "," + layoutId + ",?,?,?)"); int firstStructId; try { firstStructId = saveStructure(layoutXML.getFirstChild().getFirstChild(), structStmt, parmStmt); } finally { structStmt.close(); parmStmt.close(); } //Check to see if the user has a matching layout sql = "SELECT * FROM UP_USER_LAYOUT WHERE USER_ID=? AND LAYOUT_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); pstmt.setInt(2, layoutId); if (log.isDebugEnabled()) log.debug(sql); rs = pstmt.executeQuery(); try { if (!rs.next()) { // If not, the default user is found and the layout rows from the default user are copied for the current user. int defaultUserId; sql = "SELECT USER_DFLT_USR_ID FROM UP_USER WHERE USER_ID=?"; PreparedStatement pstmt2 = con.prepareStatement(sql); try { pstmt2.clearParameters(); pstmt2.setInt(1, userId); if (log.isDebugEnabled()) log.debug(sql); ResultSet rs2 = null; try { rs2 = pstmt2.executeQuery(); rs2.next(); defaultUserId = rs2.getInt(1); } finally { rs2.close(); } } finally { pstmt2.close(); } // Add to UP_USER_LAYOUT sql = "SELECT USER_ID,LAYOUT_ID,LAYOUT_TITLE,INIT_STRUCT_ID FROM UP_USER_LAYOUT WHERE USER_ID=?"; pstmt2 = con.prepareStatement(sql); try { pstmt2.clearParameters(); pstmt2.setInt(1, defaultUserId); if (log.isDebugEnabled()) log.debug(sql); ResultSet rs2 = pstmt2.executeQuery(); try { if (rs2.next()) { // There is a row for this user's template user... sql = "INSERT INTO UP_USER_LAYOUT (USER_ID, LAYOUT_ID, LAYOUT_TITLE, INIT_STRUCT_ID) VALUES (?,?,?,?)"; PreparedStatement pstmt3 = con.prepareStatement(sql); try { pstmt3.clearParameters(); pstmt3.setInt(1, userId); pstmt3.setInt(2, rs2.getInt("LAYOUT_ID")); pstmt3.setString(3, rs2.getString("LAYOUT_TITLE")); pstmt3.setInt(4, rs2.getInt("INIT_STRUCT_ID")); if (log.isDebugEnabled()) log.debug(sql); pstmt3.executeUpdate(); } finally { pstmt3.close(); } } else { // We can't rely on the template user, but we still need a row... sql = "INSERT INTO UP_USER_LAYOUT (USER_ID, LAYOUT_ID, LAYOUT_TITLE, INIT_STRUCT_ID) VALUES (?,?,?,?)"; PreparedStatement pstmt3 = con.prepareStatement(sql); try { pstmt3.clearParameters(); pstmt3.setInt(1, userId); pstmt3.setInt(2, layoutId); pstmt3.setString(3, "default layout"); pstmt3.setInt(4, 1); if (log.isDebugEnabled()) log.debug(sql); pstmt3.executeUpdate(); } finally { pstmt3.close(); } } } finally { rs2.close(); } } finally { pstmt2.close(); } } } finally { rs.close(); } } finally { pstmt.close(); } //Update the users layout with the correct inital structure ID sql = "UPDATE UP_USER_LAYOUT SET INIT_STRUCT_ID=? WHERE USER_ID=? AND LAYOUT_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, firstStructId); pstmt.setInt(2, userId); pstmt.setInt(3, layoutId); if (log.isDebugEnabled()) log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } // Update the last time the user saw the list of available channels if (channelsAdded) { sql = "UPDATE UP_USER SET LST_CHAN_UPDT_DT=? WHERE USER_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setDate(1, new java.sql.Date(System.currentTimeMillis())); pstmt.setInt(2, userId); log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } } if (firstLayout) { int defaultUserId; int defaultLayoutId; // Have to copy some of data over from the default user sql = "SELECT USER_DFLT_USR_ID,USER_DFLT_LAY_ID FROM UP_USER WHERE USER_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); log.debug(sql); rs = pstmt.executeQuery(); try { rs.next(); defaultUserId = rs.getInt(1); defaultLayoutId = rs.getInt(2); } finally { rs.close(); } } finally { pstmt.close(); } sql = "UPDATE UP_USER_PROFILE SET LAYOUT_ID=1 WHERE USER_ID=? AND PROFILE_ID=?"; pstmt = con.prepareStatement(sql); try { pstmt.clearParameters(); pstmt.setInt(1, userId); pstmt.setInt(2, profileId); log.debug(sql); pstmt.executeUpdate(); } finally { pstmt.close(); } } return null; } }); } }); if (log.isDebugEnabled()) { long stopTime = System.currentTimeMillis(); log.debug("RDBMUserLayoutStore::setUserLayout(): Layout document for user " + userId + " took " + (stopTime - startTime) + " milliseconds to save"); } } public void updateUserProfile (final IPerson person, final IUserProfile profile) { final int userId = person.getID(); this.transactionOperations.execute(new TransactionCallback<Object>() { @Override public Object doInTransaction(TransactionStatus status) { return jdbcOperations.execute(new ConnectionCallback<Object>() { @Override public Object doInConnection(Connection con) throws SQLException, DataAccessException { String query = "UPDATE UP_USER_PROFILE SET LAYOUT_ID=?,THEME_SS_ID=?,STRUCTURE_SS_ID=?," + "DESCRIPTION=?,PROFILE_NAME=?, PROFILE_FNAME=? WHERE USER_ID=? AND PROFILE_ID=?"; PreparedStatement pstmt = con.prepareStatement(query); pstmt.setInt(1, profile.getLayoutId()); pstmt.setInt(2, profile.getThemeStylesheetId()); pstmt.setInt(3, profile.getStructureStylesheetId()); pstmt.setString(4, profile.getProfileDescription()); pstmt.setString(5, profile.getProfileName()); pstmt.setString(6, profile.getProfileFname()); pstmt.setInt(7, userId); pstmt.setInt(8, profile.getProfileId()); try { if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::updateUserProfile() : " + query + " layout_id: " + profile.getLayoutId() + " theme_ss_id: " + profile.getThemeStylesheetId() + " structure_ss_id: " + profile.getStructureStylesheetId() + " description: " + profile.getProfileDescription() + " name: " + profile.getProfileName() + " user_id: " + userId + " fname: " + profile.getProfileFname()); pstmt.execute(); } finally { pstmt.close(); } return null; } }); } }); } public void setSystemBrowserMapping (String userAgent, int profileId) { this.setUserBrowserMapping(this.getSystemUser(), userAgent, profileId); } private String getSystemBrowserMapping (String userAgent) { return getUserBrowserMapping(this.getSystemUser(), userAgent); } public IUserProfile getUserProfile (IPerson person, String userAgent) { String profileFname = getUserBrowserMapping(person, userAgent); if (profileFname == null) return null; return this.getUserProfileByFname(person, profileFname); } public IUserProfile getSystemProfile (String userAgent) { String profileFname = getSystemBrowserMapping(userAgent); if (profileFname == null) return null; IUserProfile up = this.getUserProfileByFname(this.getSystemUser(), profileFname); up.setSystemProfile(true); return up; } public IUserProfile getSystemProfileById (int profileId) { IUserProfile up = this.getUserProfileById(this.getSystemUser(), profileId); up.setSystemProfile(true); return up; } public IUserProfile getSystemProfileByFname (String profileFname) { IUserProfile up = this.getUserProfileByFname(this.getSystemUser(), profileFname); up.setSystemProfile(true); return up; } public Hashtable getSystemProfileList () { Hashtable pl = this.getUserProfileList(this.getSystemUser()); for (Enumeration e = pl.elements(); e.hasMoreElements();) { IUserProfile up = (IUserProfile)e.nextElement(); up.setSystemProfile(true); } return pl; } public void updateSystemProfile (IUserProfile profile) { this.updateUserProfile(this.getSystemUser(), profile); } public IUserProfile addSystemProfile (IUserProfile profile) { return addUserProfile(this.getSystemUser(), profile); } public void deleteSystemProfile (int profileId) { this.deleteUserProfile(this.getSystemUser(), profileId); } private static class SystemUser implements IPerson { private final int systemUserId; public SystemUser(int systemUserId) { this.systemUserId = systemUserId; } public void setID(int sID) { } public int getID() { return this.systemUserId; } public String getUserName() { return null; } public void setUserName(String userName) { } public void setFullName(String sFullName) { } public String getFullName() { return "uPortal System Account"; } public Object getAttribute(String key) { return null; } public Object[] getAttributeValues(String key) { return null; } public Map<String,List<Object>> getAttributeMap() { return null; } public void setAttribute(String key, Object value) { } public void setAttribute(String key, List<Object> values) { } public void setAttributes(Map attrs) { } public Enumeration getAttributes() { return null; } public Enumeration getAttributeNames() { return null; } public boolean isGuest() { return (false); } public ISecurityContext getSecurityContext() { return (null); } public void setSecurityContext(ISecurityContext context) { } public EntityIdentifier getEntityIdentifier() { return null; } public void setEntityIdentifier(EntityIdentifier ei) { } public String getName() { return null; } } /** * Returns the current layout ID for the user and profile. If the profile doesn't exist or the * layout_id field is null 0 is returned. * * @param userId The userId for the profile * @param profileId The profileId for the profile * @return The layout_id field or 0 if it does not exist or is null * @throws SQLException */ protected int getLayoutID(final int userId, final int profileId) throws SQLException { return jdbcOperations.execute(new ConnectionCallback<Integer>() { @Override public Integer doInConnection(Connection con) throws SQLException, DataAccessException { String query = "SELECT LAYOUT_ID " + "FROM UP_USER_PROFILE " + "WHERE USER_ID=? AND PROFILE_ID=?"; int layoutId = 0; PreparedStatement pstmt = con.prepareStatement(query); try { final int u = userId; final int p = profileId; if (log.isDebugEnabled()) log.debug("RDBMUserLayoutStore::getLayoutID(userId=" + u + ", profileId=" + p + " ): " + query); pstmt.setInt(1, u); pstmt.setInt(2, p); ResultSet rs = pstmt.executeQuery(); try { if (rs.next()) { layoutId = rs.getInt(1); if (rs.wasNull()) { layoutId = 0; } } if (layoutId == 0) { // determine the fname for the currently-requested profile query = "SELECT PROFILE_FNAME FROM UP_USER_PROFILE WHERE " + "USER_ID=? AND PROFILE_ID=?"; pstmt = con.prepareStatement(query); pstmt.setInt(1, u); pstmt.setInt(2, p); rs = pstmt.executeQuery(); String profileFname = null; if (rs.next()) { profileFname = rs.getString("PROFILE_FNAME"); } // using the fname calculated above, attempt to get the // layout id of the default user profile for this fname query = "SELECT LAYOUT_ID FROM UP_USER_PROFILE LEFT JOIN " + "UP_USER ON UP_USER_PROFILE.USER_ID=UP_USER.USER_DFLT_USR_ID " + "WHERE UP_USER.USER_ID=? AND UP_USER_PROFILE.PROFILE_FNAME=?"; pstmt = con.prepareStatement(query); pstmt.setInt(1, u); pstmt.setString(2, profileFname); rs = pstmt.executeQuery(); int intendedLayoutId = 0; if (rs.next()) { intendedLayoutId = rs.getInt("LAYOUT_ID"); } // check to see if another profile for the current user // has already created the requested layout query = "SELECT LAYOUT_ID FROM UP_USER_PROFILE WHERE " + "USER_ID=? AND LAYOUT_ID=?"; pstmt = con.prepareStatement(query); pstmt.setInt(1, u); pstmt.setInt(2, intendedLayoutId); rs = pstmt.executeQuery(); if (rs.next()) { // if the layout already exists, update the profile to // point to that layout query = "UPDATE UP_USER_PROFILE SET LAYOUT_ID=? WHERE " + "USER_ID=? AND PROFILE_ID=?"; pstmt = con.prepareStatement(query); pstmt.setInt(1, intendedLayoutId); pstmt.setInt(2, u); pstmt.setInt(3, p); pstmt.execute(); layoutId = intendedLayoutId; } } } finally { rs.close(); } } finally { pstmt.close(); } return layoutId; } }); } /* (non-Javadoc) * @see org.jasig.portal.layout.IUserLayoutStore#importLayout(org.dom4j.Element) */ public abstract void importLayout(org.dom4j.Element layout); }
UP-2929: reset a user's layout feature is broke with default + mobile theme -> patch RDBMUserLayoutStore.getLayoutID
uportal-war/src/main/java/org/jasig/portal/layout/simple/RDBMUserLayoutStore.java
UP-2929: reset a user's layout feature is broke with default + mobile theme -> patch RDBMUserLayoutStore.getLayoutID
<ide><path>portal-war/src/main/java/org/jasig/portal/layout/simple/RDBMUserLayoutStore.java <ide> <ide> protected final Log log = LogFactory.getLog(getClass()); <ide> private static String PROFILE_TABLE = "UP_USER_PROFILE"; <add> <add> protected static final String DEFAULT_LAYOUT_FNAME = "default"; <ide> <ide> //This class is instantiated ONCE so NO class variables can be used to keep state between calls <ide> protected static final String channelPrefix = "n"; <ide> } <ide> } <ide> <del> if (layoutId == 0) { <del> <del> // determine the fname for the currently-requested profile <del> query = "SELECT PROFILE_FNAME FROM UP_USER_PROFILE WHERE " + <del> "USER_ID=? AND PROFILE_ID=?"; <add> // find the layout used by the default profle for this user <add> query = "SELECT LAYOUT_ID FROM UP_USER_PROFILE " + <add> "WHERE USER_ID=? AND PROFILE_FNAME='" + DEFAULT_LAYOUT_FNAME + "'"; <ide> pstmt = con.prepareStatement(query); <del> pstmt.setInt(1, u); <del> pstmt.setInt(2, p); <del> <del> rs = pstmt.executeQuery(); <del> String profileFname = null; <del> if (rs.next()) { <del> profileFname = rs.getString("PROFILE_FNAME"); <del> } <del> <del> // using the fname calculated above, attempt to get the <del> // layout id of the default user profile for this fname <del> query = "SELECT LAYOUT_ID FROM UP_USER_PROFILE LEFT JOIN " + <del> "UP_USER ON UP_USER_PROFILE.USER_ID=UP_USER.USER_DFLT_USR_ID " + <del> "WHERE UP_USER.USER_ID=? AND UP_USER_PROFILE.PROFILE_FNAME=?"; <del> pstmt = con.prepareStatement(query); <del> pstmt.setInt(1, u); <del> pstmt.setString(2, profileFname); <add> pstmt.setInt(1, userId); <ide> rs = pstmt.executeQuery(); <ide> int intendedLayoutId = 0; <ide> if (rs.next()) { <ide> intendedLayoutId = rs.getInt("LAYOUT_ID"); <add> if (rs.wasNull()) { <add> intendedLayoutId = 0; <add> } <ide> } <ide> <del> // check to see if another profile for the current user <del> // has already created the requested layout <add> // check to see if this profile for the current user <add> // has already used the requested layout <ide> query = "SELECT LAYOUT_ID FROM UP_USER_PROFILE WHERE " + <del> "USER_ID=? AND LAYOUT_ID=?"; <add> "USER_ID=? AND LAYOUT_ID=? AND PROFILE_ID=?"; <ide> pstmt = con.prepareStatement(query); <del> pstmt.setInt(1, u); <add> pstmt.setInt(1, userId); <ide> pstmt.setInt(2, intendedLayoutId); <add> pstmt.setInt(3, profileId); <ide> rs = pstmt.executeQuery(); <del> if (rs.next()) { <del> <del> // if the layout already exists, update the profile to <add> if (!rs.next()) { <add> // if the layout's not already been used, update the profile to <ide> // point to that layout <ide> query = "UPDATE UP_USER_PROFILE SET LAYOUT_ID=? WHERE " + <ide> "USER_ID=? AND PROFILE_ID=?"; <add> <ide> pstmt = con.prepareStatement(query); <add> <ide> pstmt.setInt(1, intendedLayoutId); <del> pstmt.setInt(2, u); <del> pstmt.setInt(3, p); <add> pstmt.setInt(2, userId); <add> pstmt.setInt(3, profileId); <ide> pstmt.execute(); <ide> <add> <ide> layoutId = intendedLayoutId; <ide> <ide> } <ide> <del> } <ide> <ide> } <ide> finally {
JavaScript
mit
708fadee7bb6b1c276512f8e95401636ef3a5a04
0
vanda/vam-fractal,vanda/vam-fractal
const facetClass = 'b-facet-box__facet'; const facetTerm = `${facetClass}-term-toggle`; const facetTermTick = `${facetTerm}-tick`; const facetTextClass = `${facetClass}-text`; const facetTermContainerClass = `${facetClass}-term-container`; const termClass = 'b-facet-box__term'; const termListClass = `${termClass}-list`; const termList = document.querySelector(`.${termListClass}`); const termCheckboxClass = `${facetClass}-term-toggle-checkbox`; const facetCloseClass = 'b-facet-box__close-button'; const facetsWithIndex = {}; const termButtonHTML = (facet, term) => ` <span class="b-facet-box__term-text"> ${facet}: ${term} </span> `; const dateFacetHTML = () => ` <div class="b-facet-box__facet-text"> Dates </div> <div class="b-facet-box__facet-term-container b-facet-box__facet-date-container"> <div class="b-facet-box__facet-date-container-start"> <label class="b-facet-box__facet-date-label"> From year: </label> <input class="b-facet-box__facet-date-input" placeholder="Year" type="number" name="after_year"> </div> <div class="b-facet-box__facet-date-container-end"> <label class="b-facet-box__facet-date-label"> To year: </label> <input class="b-facet-box__facet-date-input" placeholder="Year" type="number" name="before_year"> </div> <div class="b-facet-box__facet-date-container-button"> <label class="b-facet-box__facet-date-label"> &nbsp; </label> <button class="b-facet-box__facet-date-button"> <svg class="b-facet-box__facet-date-button-icon" role="img"> <use xlink:href="/svg/vamicons.svg#search"></use> </svg> </button> </div> </div> `; const termCheckbox = (facet, paramName, term, value, count) => { const checkbox = document.createElement('LI'); checkbox.className = 'b-facet-box__facet-term-toggle'; checkbox.dataset.id = `${paramName}-${value}`; checkbox.dataset.facet = facet; checkbox.dataset.paramName = paramName; checkbox.dataset.term = term; checkbox.dataset.value = value; checkbox.dataset.count = count; checkbox.innerHTML = ` <a class="b-facet-box__facet-term-toggle-checkbox" href="javascript:void(0);"> <svg class="b-facet-box__facet-term-toggle-tick" role="img"> <use xlink:href="/svg/vamicons.svg#tick"></use> </svg> </a> <span class="b-facet-box__facet-term-toggle-text"> ${term} </span> <span class="b-facet-box__facet-term-toggle-result"> (${count}) </span> `; // const hiddenInput = ` // <input class="b-facet-box__hidden-input" type="checkbox" name="${paramName}" value="${value}"> // `; const hiddenInput = document.createElement('INPUT'); hiddenInput.type = 'checkbox'; hiddenInput.className = 'b-facet-box__hidden-input'; hiddenInput.name = paramName; hiddenInput.value = value; hiddenInput.id = `${paramName}=${value}`; checkbox.addEventListener('termToggle', (e) => { const existingHiddenInput = document.querySelector(`input[id="${`${paramName}=${value}`}"]`); // GOTTA ASSUME THERE'S A FORM ON THE PAGE FOR THIS TO WORK!!! // this is because formData has an order which is annoying to change if (existingHiddenInput) { existingHiddenInput.checked = false; existingHiddenInput.remove(); } else { document.querySelector('#vam-etc-search').appendChild(hiddenInput); document.querySelector(`input[id="${`${paramName}=${value}`}"]`).checked = true; } e.target.querySelector(`.${facetTermTick}`).classList.toggle( `${facetTermTick}--active` ); document.querySelector('.b-facet-box').dispatchEvent(new Event('boxChecked', { bubbles: true })); }); return checkbox; }; const facetHTML = (facet, seeMore) => ` <div class="b-facet-box__facet-text"> ${facet} </div> <ul data-facet="${facet}" class="b-facet-box__facet-term-container">${ seeMore ? ` <a data-facet="${facet}" class="b-facet-box__term-more" href="#">See more</a> ` : '' }</ul>`; const revealMoreFacets = (e) => { e.preventDefault(); const linkEl = e.target; const facetContainer = e.target.parentNode; const { terms, index, facet, paramName } = facetsWithIndex[e.target.dataset.facet]; e.target.remove(); terms.slice(index, index + 5).forEach(({ term, count, value }) => { facetContainer.appendChild(termCheckbox(facet, paramName, term, value, count)); }); facetsWithIndex[facet].index += 5; if (facetsWithIndex[facet].index !== terms.length) { facetContainer.appendChild(linkEl); } }; const createFacets = (activeFacets) => { const facetBoxContainer = document.querySelector('.b-facet-box__facet-container'); const facetToTerm = Array.from(activeFacets).reduce((res, termfacet) => { const facet = termfacet.split('-')[0]; const term = termfacet.split('-')[1]; if (res[facet]) { res[facet].push(term); } else { res[facet] = [term]; } return res; }, {}); Object.values(facetsWithIndex).forEach(({ facet, terms, paramName, index }) => { const newFacet = document.createElement('DIV'); newFacet.className = 'b-facet-box__facet'; newFacet.innerHTML = facetHTML(facet, terms.length > 5); newFacet.addEventListener('click', (e) => { if (e.target.classList.contains(facetTextClass)) { e.target.classList.toggle(`${e.target.classList[0]}--active`); e.target.parentNode.querySelector(`.${facetTermContainerClass}`).classList.toggle(`${facetTermContainerClass}--active`); } }); const termValues = terms.map(t => t.value); let newIndex = (facetToTerm[paramName] && facetToTerm[paramName].reduce((current, term) => { const test = termValues.indexOf(term); return (current > test ? current : test); }, 5)) || 0; newIndex = ((Math.ceil(newIndex / 5) * 5)); newIndex = newIndex > terms.length ? terms.length : (newIndex || 5); terms.slice(index, newIndex).forEach(({ term, count, value }) => { newFacet.querySelector(`.${facetTermContainerClass}`).appendChild(termCheckbox(facet, paramName, term, value, count)); }); facetsWithIndex[facet].index += (newIndex); if (terms.length > 5) { if (facetsWithIndex[facet].index < terms.length) { newFacet.querySelector(`.${facetTermContainerClass}`).appendChild(newFacet.querySelector('.b-facet-box__term-more')); newFacet.querySelector(`.${facetTermContainerClass} .b-facet-box__term-more`).onclick = e => revealMoreFacets(e); } else { newFacet.querySelector(`.${facetTermContainerClass}`).appendChild(newFacet.querySelector('.b-facet-box__term-more')).remove(); } } facetBoxContainer.appendChild(newFacet); }); }; const newTermToggleEvent = (detail, bubbles = true) => new CustomEvent('termToggle', { detail, bubbles }); const initialiseFacetOverlay = () => { const toggleTerm = ({ id, facet, term, paramName }) => { if (id) { // if term already exists, get rid of it if (document.querySelector(`div[data-id='${id}']`)) { Array.from(document.querySelectorAll(`div[data-id='${id}']`)).forEach(el => el.remove()); if (!document.querySelector('.b-search-form__facets').children.length) { document.querySelector('.b-search-form__facet-pane').classList.remove('b-search-form__facet-pane--active'); } window.dispatchEvent(new Event('resize')); } else { const newTermOnClick = () => { Array.from(document.querySelectorAll(`div[data-id='${id}']`)).forEach(el => el.dispatchEvent(newTermToggleEvent({ id, facet, term, paramName }))); if (document.querySelector(`li[data-id='${id}']`)) { document.querySelector(`li[data-id='${id}']`).dispatchEvent(newTermToggleEvent({ id, facet, term, paramName })); } }; const newTerm = document.createElement('DIV'); newTerm.dataset.id = id; newTerm.className = 'b-facet-box__term'; newTerm.innerHTML = termButtonHTML(facet, term); newTerm.onclick = e => newTermOnClick(e); termList.appendChild(newTerm); const newFormTerm = newTerm.cloneNode(true); newFormTerm.onclick = e => newTermOnClick(e); newFormTerm.classList.add('b-facet-box__term--form'); if (document.querySelector('.b-search-form__facets')) { document.querySelector('.b-search-form__facets').appendChild(newFormTerm); } if (!document.querySelector('.b-search-form__facet-pane--active')) { document.querySelector('.b-search-form__facet-pane').classList.add('b-search-form__facet-pane--active'); } window.dispatchEvent(new Event('resize')); } } }; termList.addEventListener('termToggle', (e) => { e.stopPropagation(); toggleTerm(e.detail); }); document.querySelector('.b-facet-box').addEventListener('newFacets', (e) => { const { facets, activeFacets } = e.detail; const currentBeforeDate = document.querySelector('input[name="before_year"]') ? document.querySelector('input[name="before_year"]').value : ''; const currentAfterDate = document.querySelector('input[name="after_year"]') ? document.querySelector('input[name="after_year"]').value : ''; facets.forEach((facet) => { Object.assign(facetsWithIndex, { [facet.facet]: Object.assign(facet, { index: 0 }) }); }); const facetBoxContainer = document.querySelector('.b-facet-box__facet-container'); facetBoxContainer.innerHTML = ''; termList.innerHTML = ''; Array.from(document.querySelectorAll('b-facet-box__hidden-input')).forEach(el => el.remove()); createFacets(activeFacets); const dateFacet = document.createElement('DIV'); dateFacet.className = 'b-facet-box__facet b-facet-box__facet-date'; dateFacet.innerHTML = dateFacetHTML(); dateFacet.addEventListener('click', (ev) => { if (ev.target.classList.contains(facetTextClass)) { ev.target.classList.toggle(`${ev.target.classList[0]}--active`); ev.target.parentNode.querySelector(`.${facetTermContainerClass}`).classList.toggle(`${facetTermContainerClass}--active`); } }); dateFacet.querySelector('input[name="before_year"]').value = currentBeforeDate; dateFacet.querySelector('input[name="after_year"]').value = currentAfterDate; facetBoxContainer.append(dateFacet); if (activeFacets) { // is a set... Array.from(activeFacets).forEach((facetId) => { const target = document.querySelector(`li[data-id='${facetId}'`); if (target) { target.dispatchEvent(newTermToggleEvent(target.dataset)); document.querySelector(`.${termListClass}`).dispatchEvent(newTermToggleEvent(target.dataset)); } }); } window.dispatchEvent(new Event('resize')); }, true); document.onclick = (e) => { if (e.target.classList.contains(facetCloseClass)) { e.target.dispatchEvent(new Event('closeFacetOverlay', { bubbles: true })); } if (e.target.parentElement && e.target.parentElement.classList.contains(facetTerm)) { const parent = e.target.closest(facetTerm) || e.target.parentElement; termList.dispatchEvent(newTermToggleEvent(parent.dataset, false)); parent.dispatchEvent(newTermToggleEvent(parent.dataset)); } }; }; (() => { if (document.querySelector('.b-facet-box')) { initialiseFacetOverlay(); if (document.querySelector('.b-facet-box__modal-button-open')) { document.querySelectorAll('.b-facet-box__modal-button-open').forEach(el => el.addEventListener('click', () => document.querySelector('.b-facet-box').classList.add('b-facet-box--active'))); } if (document.querySelector('.b-facet-box__close-button')) { document.querySelector('.b-facet-box__close-button').addEventListener('click', () => document.querySelector('.b-facet-box').classList.remove('b-facet-box--active')); } window.onresize = () => { const facetFormTerms = Array.from(document.querySelectorAll('.b-facet-box__term.b-facet-box__term--form')); if (document.querySelector('.b-facet-box__term-text.b-facet-box__term-text--no-cross')) { if (window.innerWidth > 499 && window.innerWidth < 992) { const facetContainerWidth = document.querySelector('.b-search-form__facets').offsetWidth; let cutOffWidth = 0; let currentIndex = 1; facetFormTerms.forEach((el) => { cutOffWidth += el.offsetWidth + 10; if (cutOffWidth < facetContainerWidth) { currentIndex += 1; } }); if ((facetFormTerms.length - currentIndex) > 0) { document.querySelector('.b-search-form__facets-mobile').style.display = 'block'; } else { document.querySelector('.b-search-form__facets-mobile').style.display = 'none'; } document.querySelector('.b-facet-box__term-text.b-facet-box__term-text--no-cross').innerHTML = `+${facetFormTerms.length - currentIndex}`; } else if (window.innerWidth < 500) { if ((facetFormTerms.length) > 0) { document.querySelector('.b-search-form__facets-mobile').style.display = 'block'; } else { document.querySelector('.b-search-form__facets-mobile').style.display = 'none'; } document.querySelector('.b-facet-box__term-text.b-facet-box__term-text--no-cross').innerHTML = `${facetFormTerms.length - 1} filters applied`; } else { document.querySelector('.b-search-form__facets-mobile').style.display = 'none'; } } }; } })();
src/components/blocks/facet-box/_facet-box.js
const facetClass = 'b-facet-box__facet'; const facetTerm = `${facetClass}-term-toggle`; const facetTermTick = `${facetTerm}-tick`; const facetTextClass = `${facetClass}-text`; const facetTermContainerClass = `${facetClass}-term-container`; const termClass = 'b-facet-box__term'; const termListClass = `${termClass}-list`; const termList = document.querySelector(`.${termListClass}`); const termCheckboxClass = `${facetClass}-term-toggle-checkbox`; const facetCloseClass = 'b-facet-box__close-button'; const facetsWithIndex = {}; const termButtonHTML = (facet, term) => ` <span class="b-facet-box__term-text"> ${facet}: ${term} </span> `; const dateFacetHTML = () => ` <div class="b-facet-box__facet-text"> Dates </div> <div class="b-facet-box__facet-term-container b-facet-box__facet-date-container"> <div class="b-facet-box__facet-date-container-start"> <label class="b-facet-box__facet-date-label"> From year: </label> <input class="b-facet-box__facet-date-input" placeholder="Year" type="number" name="after_year"> </div> <div class="b-facet-box__facet-date-container-end"> <label class="b-facet-box__facet-date-label"> To year: </label> <input class="b-facet-box__facet-date-input" placeholder="Year" type="number" name="before_year"> </div> <div class="b-facet-box__facet-date-container-button"> <label class="b-facet-box__facet-date-label"> &nbsp; </label> <button class="b-facet-box__facet-date-button"> <svg class="b-facet-box__facet-date-button-icon" role="img"> <use xlink:href="/svg/vamicons.svg#search"></use> </svg> </button> </div> </div> `; const termCheckbox = (facet, paramName, term, value, count) => { const checkbox = document.createElement('LI'); checkbox.className = 'b-facet-box__facet-term-toggle'; checkbox.dataset.id = `${paramName}-${value}`; checkbox.dataset.facet = facet; checkbox.dataset.paramName = paramName; checkbox.dataset.term = term; checkbox.dataset.value = value; checkbox.dataset.count = count; checkbox.innerHTML = ` <a class="b-facet-box__facet-term-toggle-checkbox" href="javascript:void(0);"> <svg class="b-facet-box__facet-term-toggle-tick" role="img"> <use xlink:href="/svg/vamicons.svg#tick"></use> </svg> </a> <span class="b-facet-box__facet-term-toggle-text"> ${term} </span> <span class="b-facet-box__facet-term-toggle-result"> (${count}) </span> `; // const hiddenInput = ` // <input class="b-facet-box__hidden-input" type="checkbox" name="${paramName}" value="${value}"> // `; const hiddenInput = document.createElement('INPUT'); hiddenInput.type = 'checkbox'; hiddenInput.className = 'b-facet-box__hidden-input'; hiddenInput.name = paramName; hiddenInput.value = value; hiddenInput.id = `${paramName}=${value}`; checkbox.addEventListener('termToggle', (e) => { const existingHiddenInput = document.querySelector(`input[id="${`${paramName}=${value}`}"]`); // GOTTA ASSUME THERE'S A FORM ON THE PAGE FOR THIS TO WORK!!! // this is because formData has an order which is annoying to change if (existingHiddenInput) { existingHiddenInput.checked = false; existingHiddenInput.remove(); } else { document.querySelector('#vam-etc-search').appendChild(hiddenInput); document.querySelector(`input[id="${`${paramName}=${value}`}"]`).checked = true; } e.target.querySelector(`.${facetTermTick}`).classList.toggle( `${facetTermTick}--active` ); document.querySelector('.b-facet-box').dispatchEvent(new Event('boxChecked', { bubbles: true })); }); return checkbox; }; const facetHTML = (facet, seeMore) => ` <div class="b-facet-box__facet-text"> ${facet} </div> <ul data-facet="${facet}" class="b-facet-box__facet-term-container">${ seeMore ? ` <a data-facet="${facet}" class="b-facet-box__term-more" href="#">See more</a> ` : '' }</ul>`; const revealMoreFacets = (e) => { e.preventDefault(); const linkEl = e.target; const facetContainer = e.target.parentNode; const { terms, index, facet, paramName } = facetsWithIndex[e.target.dataset.facet]; e.target.remove(); terms.slice(index, index + 5).forEach(({ term, count, value }) => { facetContainer.appendChild(termCheckbox(facet, paramName, term, value, count)); }); facetsWithIndex[facet].index += 5; if (facetsWithIndex[facet].index !== terms.length) { facetContainer.appendChild(linkEl); } }; const createFacets = (activeFacets) => { const facetBoxContainer = document.querySelector('.b-facet-box__facet-container'); const facetToTerm = Array.from(activeFacets).reduce((res, termfacet) => { const facet = termfacet.split('-')[0]; const term = termfacet.split('-')[1]; if (res[facet]) { res[facet].push(term); } else { res[facet] = [term]; } return res; }, {}); Object.values(facetsWithIndex).forEach(({ facet, terms, paramName, index }) => { const newFacet = document.createElement('DIV'); newFacet.className = 'b-facet-box__facet'; newFacet.innerHTML = facetHTML(facet, terms.length > 5); newFacet.addEventListener('click', (e) => { if (e.target.classList.contains(facetTextClass)) { e.target.classList.toggle(`${e.target.classList[0]}--active`); e.target.parentNode.querySelector(`.${facetTermContainerClass}`).classList.toggle(`${facetTermContainerClass}--active`); } }); const termValues = terms.map(t => t.value); let newIndex = (facetToTerm[paramName] && facetToTerm[paramName].reduce((current, term) => { const test = termValues.indexOf(term); return (current > test ? current : test); }, 5)) || 0; newIndex = ((Math.ceil(newIndex / 5) * 5)); newIndex = newIndex > terms.length ? terms.length : (newIndex || 5); terms.slice(index, newIndex).forEach(({ term, count, value }) => { newFacet.querySelector(`.${facetTermContainerClass}`).appendChild(termCheckbox(facet, paramName, term, value, count)); }); facetsWithIndex[facet].index += (newIndex); if (terms.length > 5) { if (facetsWithIndex[facet].index < terms.length) { newFacet.querySelector(`.${facetTermContainerClass}`).appendChild(newFacet.querySelector('.b-facet-box__term-more')); newFacet.querySelector(`.${facetTermContainerClass} .b-facet-box__term-more`).onclick = e => revealMoreFacets(e); } else { newFacet.querySelector(`.${facetTermContainerClass}`).appendChild(newFacet.querySelector('.b-facet-box__term-more')).remove(); } } facetBoxContainer.appendChild(newFacet); }); }; const newTermToggleEvent = (detail, bubbles = true) => new CustomEvent('termToggle', { detail, bubbles }); const initialiseFacetOverlay = () => { const toggleTerm = ({ id, facet, term, paramName }) => { if (id) { // if term already exists, get rid of it if (document.querySelector(`div[data-id='${id}']`)) { Array.from(document.querySelectorAll(`div[data-id='${id}']`)).forEach(el => el.remove()); if (!document.querySelector('.b-search-form__facets').children.length) { document.querySelector('.b-search-form__facet-pane').classList.remove('b-search-form__facet-pane--active'); } window.dispatchEvent(new Event('resize')); } else { const newTermOnClick = () => { Array.from(document.querySelectorAll(`div[data-id='${id}']`)).forEach(el => el.dispatchEvent(newTermToggleEvent({ id, facet, term, paramName }))); if (document.querySelector(`li[data-id='${id}']`)) { document.querySelector(`li[data-id='${id}']`).dispatchEvent(newTermToggleEvent({ id, facet, term, paramName })); } }; const newTerm = document.createElement('DIV'); newTerm.dataset.id = id; newTerm.className = 'b-facet-box__term'; newTerm.innerHTML = termButtonHTML(facet, term); newTerm.onclick = e => newTermOnClick(e); termList.appendChild(newTerm); const newFormTerm = newTerm.cloneNode(true); newFormTerm.onclick = e => newTermOnClick(e); newFormTerm.classList.add('b-facet-box__term--form'); if (document.querySelector('.b-search-form__facets')) { document.querySelector('.b-search-form__facets').appendChild(newFormTerm); } if (!document.querySelector('.b-search-form__facet-pane--active')) { document.querySelector('.b-search-form__facet-pane').classList.add('b-search-form__facet-pane--active'); } window.dispatchEvent(new Event('resize')); } } }; termList.addEventListener('termToggle', (e) => { e.stopPropagation(); toggleTerm(e.detail); }); document.querySelector('.b-facet-box').addEventListener('newFacets', (e) => { const { facets, activeFacets } = e.detail; const currentBeforeDate = document.querySelector('input[name="before_year"]') ? document.querySelector('input[name="before_year"]').value : ''; const currentAfterDate = document.querySelector('input[name="after_year"]') ? document.querySelector('input[name="after_year"]').value : ''; facets.forEach((facet) => { Object.assign(facetsWithIndex, { [facet.facet]: Object.assign(facet, { index: 0 }) }); }); const facetBoxContainer = document.querySelector('.b-facet-box__facet-container'); facetBoxContainer.innerHTML = ''; termList.innerHTML = ''; createFacets(activeFacets); const dateFacet = document.createElement('DIV'); dateFacet.className = 'b-facet-box__facet b-facet-box__facet-date'; dateFacet.innerHTML = dateFacetHTML(); dateFacet.addEventListener('click', (ev) => { if (ev.target.classList.contains(facetTextClass)) { ev.target.classList.toggle(`${ev.target.classList[0]}--active`); ev.target.parentNode.querySelector(`.${facetTermContainerClass}`).classList.toggle(`${facetTermContainerClass}--active`); } }); dateFacet.querySelector('input[name="before_year"]').value = currentBeforeDate; dateFacet.querySelector('input[name="after_year"]').value = currentAfterDate; facetBoxContainer.append(dateFacet); if (activeFacets) { // is a set... Array.from(activeFacets).forEach((facetId) => { const target = document.querySelector(`li[data-id='${facetId}'`); if (target) { target.dispatchEvent(newTermToggleEvent(target.dataset)); document.querySelector(`.${termListClass}`).dispatchEvent(newTermToggleEvent(target.dataset)); } }); } window.dispatchEvent(new Event('resize')); }, true); document.onclick = (e) => { if (e.target.classList.contains(facetCloseClass)) { e.target.dispatchEvent(new Event('closeFacetOverlay', { bubbles: true })); } if (e.target.parentElement && e.target.parentElement.classList.contains(facetTerm)) { const parent = e.target.closest(facetTerm) || e.target.parentElement; termList.dispatchEvent(newTermToggleEvent(parent.dataset, false)); parent.dispatchEvent(newTermToggleEvent(parent.dataset)); } }; }; (() => { if (document.querySelector('.b-facet-box')) { initialiseFacetOverlay(); if (document.querySelector('.b-facet-box__modal-button-open')) { document.querySelectorAll('.b-facet-box__modal-button-open').forEach(el => el.addEventListener('click', () => document.querySelector('.b-facet-box').classList.add('b-facet-box--active'))); } if (document.querySelector('.b-facet-box__close-button')) { document.querySelector('.b-facet-box__close-button').addEventListener('click', () => document.querySelector('.b-facet-box').classList.remove('b-facet-box--active')); } window.onresize = () => { const facetFormTerms = Array.from(document.querySelectorAll('.b-facet-box__term.b-facet-box__term--form')); if (document.querySelector('.b-facet-box__term-text.b-facet-box__term-text--no-cross')) { if (window.innerWidth > 499 && window.innerWidth < 992) { const facetContainerWidth = document.querySelector('.b-search-form__facets').offsetWidth; let cutOffWidth = 0; let currentIndex = 1; facetFormTerms.forEach((el) => { cutOffWidth += el.offsetWidth + 10; if (cutOffWidth < facetContainerWidth) { currentIndex += 1; } }); if ((facetFormTerms.length - currentIndex) > 0) { document.querySelector('.b-search-form__facets-mobile').style.display = 'block'; } else { document.querySelector('.b-search-form__facets-mobile').style.display = 'none'; } document.querySelector('.b-facet-box__term-text.b-facet-box__term-text--no-cross').innerHTML = `+${facetFormTerms.length - currentIndex}`; } else if (window.innerWidth < 500) { if ((facetFormTerms.length) > 0) { document.querySelector('.b-search-form__facets-mobile').style.display = 'block'; } else { document.querySelector('.b-search-form__facets-mobile').style.display = 'none'; } document.querySelector('.b-facet-box__term-text.b-facet-box__term-text--no-cross').innerHTML = `${facetFormTerms.length - 1} filters applied`; } else { document.querySelector('.b-search-form__facets-mobile').style.display = 'none'; } } }; } })();
introduced new bug whoops
src/components/blocks/facet-box/_facet-box.js
introduced new bug whoops
<ide><path>rc/components/blocks/facet-box/_facet-box.js <ide> facetBoxContainer.innerHTML = ''; <ide> termList.innerHTML = ''; <ide> <add> Array.from(document.querySelectorAll('b-facet-box__hidden-input')).forEach(el => el.remove()); <add> <ide> createFacets(activeFacets); <ide> <ide> const dateFacet = document.createElement('DIV');
JavaScript
mit
8f638e2ee1ccc8c4ce9e9a9ec56f38102f39161c
0
jpommerening/release-station,jpommerening/release-station
/** * Copyright 2015 aixigo AG * Released under the MIT license. * http://laxarjs.org */ define( [ 'laxar-patterns' ], function( patterns ) { 'use strict'; /////////////////////////////////////////////////////////////////////////////////////////////////////////// Controller.injections = [ 'axEventBus', 'axFeatures' ]; Controller.create = function create( eventBus, features ) { return new Controller( eventBus, features ); }; /////////////////////////////////////////////////////////////////////////////////////////////////////////// function Controller( eventBus, features ) { this.eventBus = eventBus; this.features = features; var baseOptions = { method: 'GET', headers: {} }; var ready = false; var authorized = authHandler( this, 'auth' ).then( setAuthHeader ); var resources = {}; if( features.data.sources.resource ) { patterns.resources.handlerFor( this ) .registerResourceFromFeature( 'data.sources', { onReplace: function( event ) { return provideResources( event.data ); }, unUpdate: function( event ) { } } ); } else if( features.data.sources.length ) { provideResources( features.data.sources ); } var provideActions = [ 'provide-resource' ]; var provideHandler = createRequestHandler( eventBus, provideResource ); provideActions.forEach( function( action ) { eventBus.subscribe( 'takeActionRequest.' + action, provideHandler ); } ); eventBus.subscribe( 'beginLifecycleRequest', function() { } ); eventBus.subscribe( 'endLifecycleRequest', function() { } ); function setAuthHeader( data ) { if( data && data.access_token ) { baseOptions.headers[ 'Authorization' ] = 'token ' + data.access_token; } else { delete baseOptions.headers[ 'Authorization' ]; } } function provideResources( sources ) { return sources.map( provideResource ); } function provideResource( source ) { var options = Object.create( baseOptions ); var promise = resources[ source.resource ]; if( !promise ) { promise = authorized.then( function() { return fetch( source.url, options ).then( handleResponse ); } ); } function handleResponse( response ) { var promise = response.json(); var links = response.headers.get( 'Link' ); var next = links && parseLinks( links ).next; if( next ) { return fetch( next, options ) .then( handleResponse ) .then( function( tail ) { return promise.then( function( head ) { return head.concat( tail ); } ); } ); } else { return promise; } } return promise.then( null, function( error ) { // Cache failures too, but prune them after 10 seconds if( !promise.timeout ) { promise.timeout = setTimeout( function() { delete resources[ source.resource ]; }, 10000 ); } throw error; } ); } } /////////////////////////////////////////////////////////////////////////////////////////////////////////// function parseLinks( string ) { var pattern = /^\s*<([^>]+)>;\s*rel="(\w+)"\s*$/; if( !string ) { return {}; } return string .split( ',' ) .map( pattern.exec.bind( pattern ) ) .reduce( function( object, match ) { if( match ) { object[ match[ 2 ] ] = match[ 1 ]; } return object; }, {} ); } /////////////////////////////////////////////////////////////////////////////////////////////////////////// function authHandler( context, name ) { var feature = context.features[ name ]; var resource = feature.resource; var flag = feature.flag; return new Promise( function( resolve, reject ) { var data = {}; var state = !flag; if( !resource && !flag ) { return resolve( data ); } if( resource ) { context.eventBus.subscribe( 'didReplace.' + resource, function( event ) { data = event.data; if( state ) { resolve( data ); } } ); } if( flag ) { context.eventBus.subscribe( 'didChangeFlag.' + flag, function( event ) { state = event.state; if( !state ) { reject( data ); } else if( data ) { resolve( data ); } } ); } } ); } /////////////////////////////////////////////////////////////////////////////////////////////////////////// function createRequestHandler( eventBus, provider ) { var OUTCOME_ERROR = patterns.actions.OUTCOME_ERROR; var OUTCOME_SUCCESS = patterns.actions.OUTCOME_SUCCESS; return function( event ) { var action = event.action; var data = event.data; var resource = data.resource; var topic = action + '-' + resource; return eventBus.publish( 'willTakeAction.' + topic, { action: action, data: data } ).then( function() { return provider( data ); } ).then( function( data ) { return eventBus.publish( 'didReplace.' + resource, { resource: resource, data: data } ); } ).then( function() { return OUTCOME_SUCCESS; }, function() { return OUTCOME_ERROR; } ).then( function( outcome ) { return eventBus.publish( 'didTakeAction.' + topic + '.' + outcome, { action: action, outcome: outcome, data: data } ); } ); }; } /////////////////////////////////////////////////////////////////////////////////////////////////////////// return { name: 'githubDataActivity', create: Controller.create, injections: Controller.injections }; } );
includes/widgets/release-station/github-data-activity/github-data-activity.js
/** * Copyright 2015 aixigo AG * Released under the MIT license. * http://laxarjs.org */ define( [ 'laxar-patterns' ], function( patterns ) { 'use strict'; /////////////////////////////////////////////////////////////////////////////////////////////////////////// Controller.injections = [ 'axEventBus', 'axFeatures' ]; Controller.create = function create( eventBus, features ) { return new Controller( eventBus, features ); }; /////////////////////////////////////////////////////////////////////////////////////////////////////////// function Controller( eventBus, features ) { this.eventBus = eventBus; this.features = features; var baseOptions = { method: 'GET', headers: {} }; var ready = false; var authorized = authHandler( this, 'auth' ).then( setAuthHeader ); var resources = {}; if( features.data.sources.resource ) { patterns.resources.handlerFor( this ) .registerResourceFromFeature( 'data.sources', { onReplace: function( event ) { return provideResources( event.data ); }, unUpdate: function( event ) { } } ); } else if( features.data.sources.length ) { provideResources( features.data.sources ); } var provideActions = [ 'provide-resource' ]; var provideHandler = createRequestHandler( eventBus, provideResource ); provideActions.forEach( function( action ) { eventBus.subscribe( 'takeActionRequest.' + action, provideHandler ); } ); eventBus.subscribe( 'beginLifecycleRequest', function() { } ); eventBus.subscribe( 'endLifecycleRequest', function() { } ); function setAuthHeader( data ) { if( data && data.access_token ) { baseOptions.headers[ 'Authorization' ] = 'token ' + data.access_token; } else { delete baseOptions.headers[ 'Authorization' ]; } } function provideResources( sources ) { return sources.map( provideResource ); } function provideResource( source ) { var options = Object.create( baseOptions ); var promise = resources[ source.resource ]; if( !promise ) { promise = fetch( source.url, options ).then( handleResponse ); } function handleResponse( response ) { var promise = response.json(); var links = response.headers.get( 'Link' ); var next = links && parseLinks( links ).next; if( next ) { return fetch( next, options ) .then( handleResponse ) .then( function( tail ) { return promise.then( function( head ) { return head.concat( tail ); } ); } ); } else { return promise; } } return promise.then( null, function( error ) { // Cache failures too, but prune them after 10 seconds if( !promise.timeout ) { promise.timeout = setTimeout( function() { delete resources[ source.resource ]; }, 10000 ); } throw error; } ); } } /////////////////////////////////////////////////////////////////////////////////////////////////////////// function parseLinks( string ) { var pattern = /^\s*<([^>]+)>;\s*rel="(\w+)"\s*$/; if( !string ) { return {}; } return string .split( ',' ) .map( pattern.exec.bind( pattern ) ) .reduce( function( object, match ) { if( match ) { object[ match[ 2 ] ] = match[ 1 ]; } return object; }, {} ); } /////////////////////////////////////////////////////////////////////////////////////////////////////////// function authHandler( context, name ) { var feature = context.features[ name ]; var resource = feature.resource; var flag = feature.flag; return new Promise( function( resolve, reject ) { var data = {}; var state = !flag; if( !resource && !flag ) { return resolve( data ); } if( resource ) { context.eventBus.subscribe( 'didReplace.' + resource, function( event ) { data = event.data; if( state ) { resolve( data ); } } ); } if( flag ) { context.eventBus.subscribe( 'didChangeFlag.' + flag, function( event ) { state = event.state; if( !state ) { reject( data ); } else if( data ) { resolve( data ); } } ); } } ); } /////////////////////////////////////////////////////////////////////////////////////////////////////////// function createRequestHandler( eventBus, provider ) { var OUTCOME_ERROR = patterns.actions.OUTCOME_ERROR; var OUTCOME_SUCCESS = patterns.actions.OUTCOME_SUCCESS; return function( event ) { var action = event.action; var data = event.data; var resource = data.resource; var topic = action + '-' + resource; return eventBus.publish( 'willTakeAction.' + topic, { action: action, data: data } ).then( function() { return provider( data ); } ).then( function( data ) { return eventBus.publish( 'didReplace.' + resource, { resource: resource, data: data } ); } ).then( function() { return OUTCOME_SUCCESS; }, function() { return OUTCOME_ERROR; } ).then( function( outcome ) { return eventBus.publish( 'didTakeAction.' + topic + '.' + outcome, { action: action, outcome: outcome, data: data } ); } ); }; } /////////////////////////////////////////////////////////////////////////////////////////////////////////// return { name: 'githubDataActivity', create: Controller.create, injections: Controller.injections }; } );
Don't forget to auth :unlock:
includes/widgets/release-station/github-data-activity/github-data-activity.js
Don't forget to auth :unlock:
<ide><path>ncludes/widgets/release-station/github-data-activity/github-data-activity.js <ide> var promise = resources[ source.resource ]; <ide> <ide> if( !promise ) { <del> promise = fetch( source.url, options ).then( handleResponse ); <add> promise = authorized.then( function() { <add> return fetch( source.url, options ).then( handleResponse ); <add> } ); <ide> } <ide> <ide> function handleResponse( response ) {
Java
agpl-3.0
b4f501e8a2518e2878d016290c7bc1bf1634b6ca
0
bio4j/bio4j
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.era7.bioinfo.bio4j.programs; import com.era7.bioinfo.bio4jmodel.nodes.OrganismNode; import com.era7.bioinfo.bio4jmodel.nodes.TaxonNode; import com.era7.bioinfo.bio4jmodel.nodes.ncbi.NCBITaxonNode; import com.era7.bioinfo.bio4jmodel.relationships.ncbi.NCBIMainTaxonRel; import com.era7.bioinfo.bio4jmodel.relationships.ncbi.NCBITaxonParentRel; import com.era7.bioinfo.bio4jmodel.relationships.ncbi.NCBITaxonRel; import com.era7.bioinfo.bio4jmodel.util.Bio4jManager; import com.era7.bioinfo.bio4jmodel.util.NodeRetriever; import com.era7.lib.bioinfo.bioinfoutil.Executable; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Transaction; /** * Imports NCBI taxonomy into Bio4j * * @author Pablo Pareja Tobes <[email protected]> */ public class ImportNCBITaxonomy implements Executable { private static final Logger logger = Logger.getLogger("ImportNCBITaxonomy"); private static FileHandler fh; @Override public void execute(ArrayList<String> array) { String[] args = new String[array.size()]; for (int i = 0; i < array.size(); i++) { args[i] = array.get(i); } main(args); } public static void main(String[] args) { if (args.length != 4) { System.out.println("This program expects three parameters: \n" + "1. Nodes DMP filename \n" + "2. Names DMP filename \n" + "3. Merged DMP filename \n" + "4. Bio4j DB folder"); } else { Bio4jManager manager = null; Transaction txn = null; int txnCounter = 0; int txnLimitForCommit = 10000; try { // This block configure the logger with handler and formatter fh = new FileHandler("ImportNCBITaxonomy.log", true); SimpleFormatter formatter = new SimpleFormatter(); fh.setFormatter(formatter); logger.addHandler(fh); logger.setLevel(Level.ALL); File nodesDumpFile = new File(args[0]); File namesDumpFile = new File(args[1]); File mergedDumpFile = new File(args[2]); BufferedReader reader = new BufferedReader(new FileReader(nodesDumpFile)); String line = null; logger.log(Level.INFO, "creating manager..."); manager = new Bio4jManager(args[3], true, false); NodeRetriever nodeRetriever = new NodeRetriever(manager); HashMap<String, String> nodeParentMap = new HashMap<String, String>(); txn = manager.beginTransaction(); logger.log(Level.INFO, "reading nodes file..."); while ((line = reader.readLine()) != null) { if (line.trim().length() > 0) { String[] columns = line.split("\\|"); NCBITaxonNode node = new NCBITaxonNode(manager.createNode()); //setting node_type property node.setNodeType(NCBITaxonNode.NODE_TYPE); node.setTaxId(columns[0].trim()); node.setRank(columns[2].trim()); node.setEmblCode(columns[3].trim()); //indexing the node.. manager.getNCBITaxonIdIndex().add(node.getNode(), NCBITaxonNode.NCBI_TAXON_ID_INDEX, node.getTaxId()); //indexing the node by its node_type manager.getNodeTypeIndex().add(node.getNode(), Bio4jManager.NODE_TYPE_INDEX_NAME, NCBITaxonNode.NODE_TYPE); //saving the parent of the node for later nodeParentMap.put(node.getTaxId(), columns[1].trim()); txnCounter++; if (txnCounter % txnLimitForCommit == 0) { txn.success(); txn.finish(); txn = manager.beginTransaction(); } } } //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); txnCounter = 0; reader.close(); logger.log(Level.INFO, "done!"); logger.log(Level.INFO, "reading names file..."); //------------reading names file----------------- reader = new BufferedReader(new FileReader(namesDumpFile)); while ((line = reader.readLine()) != null) { String[] columns = line.split("\\|"); if (columns[columns.length - 1].trim().equals("scientific name")) { String taxId = columns[0].trim(); String nameSt = columns[1].trim(); NCBITaxonNode node = nodeRetriever.getNCBITaxonByTaxId(taxId); node.setScientificName(nameSt); txnCounter++; if (txnCounter % txnLimitForCommit == 0) { //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); } } } reader.close(); logger.log(Level.INFO, "done!"); logger.log(Level.INFO, "storing relationships..."); //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); txnCounter = 0; Set<String> nodesSet = nodeParentMap.keySet(); for (String nodeTaxId : nodesSet) { String parentTaxId = nodeParentMap.get(nodeTaxId); NCBITaxonNode currentNode = nodeRetriever.getNCBITaxonByTaxId(nodeTaxId); if (!nodeTaxId.equals(parentTaxId)) { NCBITaxonNode parentNode = nodeRetriever.getNCBITaxonByTaxId(parentTaxId); parentNode.getNode().createRelationshipTo(currentNode.getNode(), new NCBITaxonParentRel(null)); } else { manager.getReferenceNode().createRelationshipTo(currentNode.getNode(), new NCBIMainTaxonRel(null)); } txnCounter++; if (txnCounter % txnLimitForCommit == 0) { //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); } } txn.success(); txn.finish(); txn = manager.beginTransaction(); logger.log(Level.INFO, "Done!"); logger.log(Level.INFO, "Associating uniprot taxonomy..."); associateTaxonomy(nodeRetriever.getMainTaxon(), nodeRetriever, new NCBITaxonRel(null)); logger.log(Level.INFO, "Done!"); logger.log(Level.INFO, "reading merged file..."); //------------reading merged file----------------- reader = new BufferedReader(new FileReader(mergedDumpFile)); while ((line = reader.readLine()) != null) { String[] columns = line.split("\\|"); String oldId = columns[0].trim(); String goodId = columns[1].trim(); NCBITaxonNode goodNode = nodeRetriever.getNCBITaxonByTaxId(goodId); //indexing the node.. manager.getNCBITaxonIdIndex().add(goodNode.getNode(), NCBITaxonNode.NCBI_TAXON_ID_INDEX, oldId); txnCounter++; if (txnCounter % txnLimitForCommit == 0) { //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); } } reader.close(); logger.log(Level.INFO, "done!"); txn.success(); } catch (Exception ex) { Logger.getLogger(ImportNCBITaxonomy.class.getName()).log(Level.SEVERE, null, ex); txn.failure(); } finally { //commiting transaction txn.finish(); //closing logger file handler fh.close(); logger.log(Level.INFO, "Closing up inserter and index service...."); // shutdown, makes sure all changes are written to disk manager.shutDown(); } } } private static void associateTaxonomy(TaxonNode taxonNode, NodeRetriever nodeRetriever, NCBITaxonRel nCBITaxonRel) { List<OrganismNode> organisms = taxonNode.getOrganisms(); if (!organisms.isEmpty()) { for (OrganismNode tempOrg : organisms) { Node ncbiNode = nodeRetriever.getNCBITaxonByTaxId(tempOrg.getNcbiTaxonomyId()).getNode(); tempOrg.getNode().createRelationshipTo(ncbiNode, nCBITaxonRel); } } else { for (TaxonNode tempTaxon : taxonNode.getChildren()) { associateTaxonomy(tempTaxon, nodeRetriever, nCBITaxonRel); } } } }
src/com/era7/bioinfo/bio4j/programs/ImportNCBITaxonomy.java
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.era7.bioinfo.bio4j.programs; import com.era7.bioinfo.bio4jmodel.nodes.OrganismNode; import com.era7.bioinfo.bio4jmodel.nodes.TaxonNode; import com.era7.bioinfo.bio4jmodel.nodes.ncbi.NCBITaxonNode; import com.era7.bioinfo.bio4jmodel.relationships.ncbi.NCBIMainTaxonRel; import com.era7.bioinfo.bio4jmodel.relationships.ncbi.NCBITaxonParentRel; import com.era7.bioinfo.bio4jmodel.relationships.ncbi.NCBITaxonRel; import com.era7.bioinfo.bio4jmodel.util.Bio4jManager; import com.era7.bioinfo.bio4jmodel.util.NodeRetriever; import com.era7.lib.bioinfo.bioinfoutil.Executable; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Set; import java.util.logging.FileHandler; import java.util.logging.Level; import java.util.logging.Logger; import java.util.logging.SimpleFormatter; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.Transaction; /** * Imports NCBI taxonomy into Bio4j * @author Pablo Pareja Tobes <[email protected]> */ public class ImportNCBITaxonomy implements Executable { private static final Logger logger = Logger.getLogger("ImportNCBITaxonomy"); private static FileHandler fh; @Override public void execute(ArrayList<String> array) { String[] args = new String[array.size()]; for (int i = 0; i < array.size(); i++) { args[i] = array.get(i); } main(args); } public static void main(String[] args) { if (args.length != 3) { System.out.println("This program expects three parameters: \n" + "1. Nodes DMP filename \n" + "2. Names DMP filename \n" + "3. Bio4j DB folder"); } else { Bio4jManager manager = null; Transaction txn = null; int txnCounter = 0; int txnLimitForCommit = 10000; try { // This block configure the logger with handler and formatter fh = new FileHandler("ImportNCBITaxonomy.log", true); SimpleFormatter formatter = new SimpleFormatter(); fh.setFormatter(formatter); logger.addHandler(fh); logger.setLevel(Level.ALL); File nodesDumpFile = new File(args[0]); File namesDumpFile = new File(args[1]); BufferedReader reader = new BufferedReader(new FileReader(nodesDumpFile)); String line = null; logger.log(Level.INFO, "creating manager..."); manager = new Bio4jManager(args[2], true, false); NodeRetriever nodeRetriever = new NodeRetriever(manager); HashMap<String, String> nodeParentMap = new HashMap<String, String>(); txn = manager.beginTransaction(); logger.log(Level.INFO, "reading nodes file..."); while ((line = reader.readLine()) != null) { if (line.trim().length() > 0) { String[] columns = line.split("\\|"); NCBITaxonNode node = new NCBITaxonNode(manager.createNode()); //setting node_type property node.setNodeType(NCBITaxonNode.NODE_TYPE); node.setTaxId(columns[0].trim()); node.setRank(columns[2].trim()); node.setEmblCode(columns[3].trim()); //indexing the node.. manager.getNCBITaxonIdIndex().add(node.getNode(), NCBITaxonNode.NCBI_TAXON_ID_INDEX, node.getTaxId()); //indexing the node by its node_type manager.getNodeTypeIndex().add(node.getNode(), Bio4jManager.NODE_TYPE_INDEX_NAME, NCBITaxonNode.NODE_TYPE); //saving the parent of the node for later nodeParentMap.put(node.getTaxId(), columns[1].trim()); txnCounter++; if (txnCounter % txnLimitForCommit == 0) { txn.success(); txn.finish(); txn = manager.beginTransaction(); } } } //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); txnCounter = 0; reader.close(); logger.log(Level.INFO, "done!"); logger.log(Level.INFO, "reading names file..."); //------------reading names file----------------- reader = new BufferedReader(new FileReader(namesDumpFile)); while ((line = reader.readLine()) != null) { String[] columns = line.split("\\|"); if (columns[columns.length - 1].trim().equals("scientific name")) { String taxId = columns[0].trim(); String nameSt = columns[1].trim(); NCBITaxonNode node = nodeRetriever.getNCBITaxonByTaxId(taxId); node.setScientificName(nameSt); txnCounter++; if (txnCounter % txnLimitForCommit == 0) { //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); } } } reader.close(); logger.log(Level.INFO, "done!"); logger.log(Level.INFO, "storing relationships..."); //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); txnCounter = 0; Set<String> nodesSet = nodeParentMap.keySet(); for (String nodeTaxId : nodesSet) { String parentTaxId = nodeParentMap.get(nodeTaxId); NCBITaxonNode currentNode = nodeRetriever.getNCBITaxonByTaxId(nodeTaxId); if (!nodeTaxId.equals(parentTaxId)) { NCBITaxonNode parentNode = nodeRetriever.getNCBITaxonByTaxId(parentTaxId); parentNode.getNode().createRelationshipTo(currentNode.getNode(), new NCBITaxonParentRel(null)); } else { manager.getReferenceNode().createRelationshipTo(currentNode.getNode(), new NCBIMainTaxonRel(null)); } txnCounter++; if (txnCounter % txnLimitForCommit == 0) { //commiting and 'restarting' transaction txn.success(); txn.finish(); txn = manager.beginTransaction(); } } txn.success(); txn.finish(); txn = manager.beginTransaction(); logger.log(Level.INFO, "Done!"); logger.log(Level.INFO,"Associating uniprot taxonomy..."); associateTaxonomy(nodeRetriever.getMainTaxon(), nodeRetriever, new NCBITaxonRel(null)); logger.log(Level.INFO, "Done!"); txn.success(); } catch (Exception ex) { Logger.getLogger(ImportNCBITaxonomy.class.getName()).log(Level.SEVERE, null, ex); txn.failure(); } finally { //commiting transaction txn.finish(); //closing logger file handler fh.close(); logger.log(Level.INFO, "Closing up inserter and index service...."); // shutdown, makes sure all changes are written to disk manager.shutDown(); } } } private static void associateTaxonomy(TaxonNode taxonNode, NodeRetriever nodeRetriever, NCBITaxonRel nCBITaxonRel){ List<OrganismNode> organisms = taxonNode.getOrganisms(); if(!organisms.isEmpty()){ for (OrganismNode tempOrg : organisms) { Node ncbiNode = nodeRetriever.getNCBITaxonByTaxId(tempOrg.getNcbiTaxonomyId()).getNode(); tempOrg.getNode().createRelationshipTo(ncbiNode, nCBITaxonRel); } }else{ for (TaxonNode tempTaxon : taxonNode.getChildren()) { associateTaxonomy(tempTaxon, nodeRetriever, nCBITaxonRel); } } } }
adding support for old NCBI taxonomic unit IDs
src/com/era7/bioinfo/bio4j/programs/ImportNCBITaxonomy.java
adding support for old NCBI taxonomic unit IDs
<ide><path>rc/com/era7/bioinfo/bio4j/programs/ImportNCBITaxonomy.java <ide> <ide> /** <ide> * Imports NCBI taxonomy into Bio4j <add> * <ide> * @author Pablo Pareja Tobes <[email protected]> <ide> */ <ide> public class ImportNCBITaxonomy implements Executable { <ide> <ide> public static void main(String[] args) { <ide> <del> if (args.length != 3) { <add> if (args.length != 4) { <ide> System.out.println("This program expects three parameters: \n" <ide> + "1. Nodes DMP filename \n" <ide> + "2. Names DMP filename \n" <del> + "3. Bio4j DB folder"); <add> + "3. Merged DMP filename \n" <add> + "4. Bio4j DB folder"); <ide> } else { <ide> <ide> Bio4jManager manager = null; <ide> <ide> File nodesDumpFile = new File(args[0]); <ide> File namesDumpFile = new File(args[1]); <add> File mergedDumpFile = new File(args[2]); <ide> <ide> BufferedReader reader = new BufferedReader(new FileReader(nodesDumpFile)); <ide> String line = null; <del> <add> <ide> logger.log(Level.INFO, "creating manager..."); <del> manager = new Bio4jManager(args[2], true, false); <del> NodeRetriever nodeRetriever = new NodeRetriever(manager); <add> manager = new Bio4jManager(args[3], true, false); <add> NodeRetriever nodeRetriever = new NodeRetriever(manager); <ide> <ide> <ide> HashMap<String, String> nodeParentMap = new HashMap<String, String>(); <ide> <ide> //indexing the node.. <ide> manager.getNCBITaxonIdIndex().add(node.getNode(), NCBITaxonNode.NCBI_TAXON_ID_INDEX, node.getTaxId()); <del> <add> <ide> //indexing the node by its node_type <ide> manager.getNodeTypeIndex().add(node.getNode(), Bio4jManager.NODE_TYPE_INDEX_NAME, NCBITaxonNode.NODE_TYPE); <ide> <ide> txn = manager.beginTransaction(); <ide> } <ide> <del> } <add> } <ide> <ide> txn.success(); <ide> txn.finish(); <ide> txn = manager.beginTransaction(); <ide> logger.log(Level.INFO, "Done!"); <del> <del> logger.log(Level.INFO,"Associating uniprot taxonomy..."); <del> <del> associateTaxonomy(nodeRetriever.getMainTaxon(), nodeRetriever, new NCBITaxonRel(null)); <del> <add> <add> logger.log(Level.INFO, "Associating uniprot taxonomy..."); <add> <add> associateTaxonomy(nodeRetriever.getMainTaxon(), nodeRetriever, new NCBITaxonRel(null)); <add> <ide> logger.log(Level.INFO, "Done!"); <add> <add> logger.log(Level.INFO, "reading merged file..."); <add> //------------reading merged file----------------- <add> reader = new BufferedReader(new FileReader(mergedDumpFile)); <add> while ((line = reader.readLine()) != null) { <add> <add> String[] columns = line.split("\\|"); <add> <add> String oldId = columns[0].trim(); <add> String goodId = columns[1].trim(); <add> <add> NCBITaxonNode goodNode = nodeRetriever.getNCBITaxonByTaxId(goodId); <add> //indexing the node.. <add> manager.getNCBITaxonIdIndex().add(goodNode.getNode(), NCBITaxonNode.NCBI_TAXON_ID_INDEX, oldId); <add> <add> txnCounter++; <add> if (txnCounter % txnLimitForCommit == 0) { <add> //commiting and 'restarting' transaction <add> txn.success(); <add> txn.finish(); <add> txn = manager.beginTransaction(); <add> } <add> <add> <add> } <add> reader.close(); <add> logger.log(Level.INFO, "done!"); <ide> <ide> txn.success(); <ide> <ide> } catch (Exception ex) { <ide> Logger.getLogger(ImportNCBITaxonomy.class.getName()).log(Level.SEVERE, null, ex); <del> <add> <ide> txn.failure(); <ide> <ide> } finally { <ide> <ide> //commiting transaction <ide> txn.finish(); <del> <add> <ide> //closing logger file handler <ide> fh.close(); <ide> logger.log(Level.INFO, "Closing up inserter and index service...."); <ide> <ide> } <ide> } <del> <add> <ide> private static void associateTaxonomy(TaxonNode taxonNode, <del> NodeRetriever nodeRetriever, <del> NCBITaxonRel nCBITaxonRel){ <del> <add> NodeRetriever nodeRetriever, <add> NCBITaxonRel nCBITaxonRel) { <add> <ide> List<OrganismNode> organisms = taxonNode.getOrganisms(); <del> <del> if(!organisms.isEmpty()){ <add> <add> if (!organisms.isEmpty()) { <ide> for (OrganismNode tempOrg : organisms) { <ide> Node ncbiNode = nodeRetriever.getNCBITaxonByTaxId(tempOrg.getNcbiTaxonomyId()).getNode(); <ide> tempOrg.getNode().createRelationshipTo(ncbiNode, nCBITaxonRel); <ide> } <del> }else{ <add> } else { <ide> for (TaxonNode tempTaxon : taxonNode.getChildren()) { <ide> associateTaxonomy(tempTaxon, nodeRetriever, nCBITaxonRel); <ide> } <ide> } <del> <add> <ide> } <ide> }
Java
apache-2.0
28797e3f553d3cbd04f0cb18ac085af581e460e8
0
jenkinsci/pagerduty-plugin,alexanderlz/jenkins-pagerduty-plugin,jenkinsci/pagerduty-plugin,alexanderlz/jenkins-pagerduty-plugin
package org.jenkinsci.plugins.pagerduty; /** * Created by alexander on 9/15/15. */ import com.github.dikhan.PagerDutyEventsClient; import com.github.dikhan.domain.EventResult; import com.github.dikhan.domain.ResolveIncident; import com.github.dikhan.domain.TriggerIncident; import hudson.EnvVars; import hudson.Extension; import hudson.Launcher; import hudson.model.*; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.Notifier; import hudson.tasks.Publisher; import jenkins.model.Jenkins; import org.kohsuke.stapler.DataBoundConstructor; import java.io.IOException; import java.io.PrintStream; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public class PagerDutyTrigger extends Notifier { private static final String JENKINS_PD_CLIENT = "JenkinsPagerDutyClient"; private static final String DEFAULT_RESOLVE_STR = "Automatically Resolved by PD plugin"; private static final String DEFAULT_RESOLVE_DESC = "Resolved by PD plugin"; private enum ValidationResult { DO_NOTHING, DO_TRIGGER, DO_RESOLVE } private static final String DEFAULT_DESCRIPTION_STRING = "I was too lazy to create a description, but trust me it's important!"; public String serviceKey; public boolean resolveOnBackToNormal; public boolean triggerOnSuccess; public boolean triggerOnFailure; public boolean triggerOnUnstable; public boolean triggerOnAborted; public boolean triggerOnNotBuilt; public String incidentKey; public String incDescription; public String incDetails; public Integer numPreviousBuildsToProbe; public boolean isResolveOnBackToNormal() { return resolveOnBackToNormal; } public void setResolveOnBackToNormal(boolean resolveOnBackToNormal) { this.resolveOnBackToNormal = resolveOnBackToNormal; } public String getServiceKey() { return serviceKey; } public boolean isTriggerOnSuccess() { return triggerOnSuccess; } public boolean isTriggerOnFailure() { return triggerOnFailure; } public boolean isTriggerOnUnstable() { return triggerOnUnstable; } public boolean isTriggerOnAborted() { return triggerOnAborted; } public boolean isTriggerOnNotBuilt() { return triggerOnNotBuilt; } public String getIncidentKey() { return incidentKey; } public String getIncDescription() { return incDescription; } public String getIncDetails() { return incDetails; } public Integer getNumPreviousBuildsToProbe() { return numPreviousBuildsToProbe; } protected Object readResolve() { // this.getDescriptor().load(); return this; } @Override public DescriptorImpl getDescriptor() { Jenkins j = Jenkins.getInstance(); return j.getDescriptorByType(DescriptorImpl.class); } @DataBoundConstructor public PagerDutyTrigger(String serviceKey, boolean resolveOnBackToNormal, boolean triggerOnSuccess, boolean triggerOnFailure, boolean triggerOnAborted, boolean triggerOnUnstable, boolean triggerOnNotBuilt, String incidentKey, String incDescription, String incDetails, Integer numPreviousBuildsToProbe) { super(); this.serviceKey = serviceKey; this.resolveOnBackToNormal = resolveOnBackToNormal; this.triggerOnSuccess = triggerOnSuccess; this.triggerOnFailure = triggerOnFailure; this.triggerOnUnstable = triggerOnUnstable; this.triggerOnAborted = triggerOnAborted; this.triggerOnNotBuilt = triggerOnNotBuilt; this.incidentKey = incidentKey; this.incDescription = incDescription; this.incDetails = incDetails; this.numPreviousBuildsToProbe = (numPreviousBuildsToProbe != null && numPreviousBuildsToProbe > 0) ? numPreviousBuildsToProbe : 1; } private LinkedList<Result> generateResultProbe() { LinkedList<Result> res = new LinkedList<Result>(); if (triggerOnSuccess) res.add(Result.SUCCESS); if (triggerOnFailure) res.add(Result.FAILURE); if (triggerOnUnstable) res.add(Result.UNSTABLE); if (triggerOnAborted) res.add(Result.ABORTED); if (triggerOnNotBuilt) res.add(Result.NOT_BUILT); return res; } /* * method to fetch and replace possible Environment Variables from job parameteres */ private String replaceEnvVars(String str, EnvVars envv, String defaultString) { StringBuffer sb = new StringBuffer(); if (str == null || str.trim().length() < 1) { if (defaultString == null) return null; str = defaultString; } Matcher m = Pattern.compile("\\$\\{.*?\\}|\\$[^\\-\\*\\.#!, ]*") .matcher(str); while (m.find()) { String v = m.group(); v = v.replaceAll("\\$", "").replaceAll("\\{", "").replaceAll("\\}", ""); m.appendReplacement(sb, envv.get(v, "")); } m.appendTail(sb); return sb.toString(); } /* * method to verify X previous builds finished with the desired result */ private ValidationResult validWithPreviousResults(AbstractBuild<?, ?> build, List<Result> desiredResultList, int depth) { int i = 0; if (this.resolveOnBackToNormal && build != null && Result.SUCCESS.equals(build.getResult())) { AbstractBuild<?, ?> previousBuild = build.getPreviousBuild(); if (previousBuild != null && !Result.SUCCESS.equals(previousBuild.getResult())) return ValidationResult.DO_RESOLVE; } else { while (i < depth && build != null) { if (!desiredResultList.contains(build.getResult())) { break; } i++; build = build.getPreviousBuild(); } if (i == depth) { return ValidationResult.DO_TRIGGER; } } return ValidationResult.DO_NOTHING; } /* * (non-Javadoc) * * @see hudson.tasks.BuildStep#getRequiredMonitorService() */ public BuildStepMonitor getRequiredMonitorService() { return BuildStepMonitor.NONE; } /* * (non-Javadoc) * * @see * hudson.tasks.BuildStepCompatibilityLayer#perform(hudson.model.AbstractBuild * , hudson.Launcher, hudson.model.BuildListener) */ @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { PagerDutyEventsClient pagerDutyEventsClient = null; LinkedList<Result> resultProbe = generateResultProbe(); EnvVars env = build.getEnvironment(listener); ValidationResult validationResult = validWithPreviousResults(build, resultProbe, numPreviousBuildsToProbe); if (validationResult != ValidationResult.DO_NOTHING) { if (this.serviceKey != null && this.serviceKey.trim().length() > 0) pagerDutyEventsClient = PagerDutyEventsClient.create(); if (validationResult == ValidationResult.DO_TRIGGER) { listener.getLogger().println("Triggering PagerDuty Notification"); return triggerPagerDuty(listener, env, pagerDutyEventsClient); } else if (validationResult == ValidationResult.DO_RESOLVE) { listener.getLogger().println("Resolving incident"); return resolveIncident(pagerDutyEventsClient, listener.getLogger()); } } return true; } private boolean resolveIncident(PagerDutyEventsClient pagerDuty, PrintStream logger) { if (this.incidentKey != null && this.incidentKey.trim().length() > 0) { ResolveIncident.ResolveIncidentBuilder resolveIncidentBuilder = ResolveIncident.ResolveIncidentBuilder.create(this.serviceKey, this.incidentKey); resolveIncidentBuilder.details(DEFAULT_RESOLVE_STR).description(DEFAULT_RESOLVE_DESC); ResolveIncident resolveIncident = resolveIncidentBuilder.build(); try { EventResult result = pagerDuty.resolve(resolveIncident); if (result != null) { logger.println("Finished resolving - " + result.getStatus()); } else { logger.println("Attempt to resolve the incident returned null - Incident may already be closed or may not exist."); } } catch (Exception e) { logger.println("Error while trying to resolve "); logger.println(e.getMessage()); return false; } } else { logger.println("incidentKey not provided, nothing to resolve. (check previous builds for further clues)"); } return true; } private boolean triggerPagerDuty(BuildListener listener, EnvVars env, PagerDutyEventsClient pagerDuty) { if (pagerDuty == null) { listener.getLogger().println("Unable to activate pagerduty module, check configuration!"); return false; } String descr = replaceEnvVars(this.incDescription, env, DEFAULT_DESCRIPTION_STRING); String serviceK = replaceEnvVars(this.serviceKey, env, null); String incK = replaceEnvVars(this.incidentKey, env, null); String details = replaceEnvVars(this.incDetails, env, null); boolean hasIncidentKey = false; if (incK != null && incK.length() > 0) { hasIncidentKey = true; } listener.getLogger().printf("Triggering pagerDuty with serviceKey %s%n", serviceK); try { listener.getLogger().printf("incidentKey %s%n", incK); listener.getLogger().printf("description %s%n", descr); listener.getLogger().printf("details %s%n", details); TriggerIncident.TriggerIncidentBuilder incBuilder = TriggerIncident.TriggerIncidentBuilder.create(serviceK, descr).client(JENKINS_PD_CLIENT).details(details); if (hasIncidentKey) { incBuilder.incidentKey(incidentKey); } TriggerIncident incident = incBuilder.build(); EventResult result = pagerDuty.trigger(incident); if (result != null) { if (!hasIncidentKey) { this.incidentKey = result.getIncidentKey(); } listener.getLogger().printf("PagerDuty Notification Result: %s%n", result.getStatus()); listener.getLogger().printf("PagerDuty IncidentKey: %s%n", this.incidentKey); } else { listener.getLogger().printf("PagerDuty returned NULL. check network or PD settings!"); } } catch (Exception e) { e.printStackTrace(listener.error("Tried to trigger PD with serviceKey = [%s]", serviceK)); return false; } return true; } @Extension public static final class DescriptorImpl extends BuildStepDescriptor<Publisher> { /* * (non-Javadoc) * * @see hudson.tasks.BuildStepDescriptor#isApplicable(java.lang.Class) */ @Override public boolean isApplicable(Class<? extends AbstractProject> jobType) { return true; } /* * (non-Javadoc) * * @see hudson.model.Descriptor#getDisplayName() */ @Override public String getDisplayName() { return "PagerDuty Incident Trigger"; } } }
src/main/java/org/jenkinsci/plugins/pagerduty/PagerDutyTrigger.java
package org.jenkinsci.plugins.pagerduty; /** * Created by alexander on 9/15/15. */ import com.github.dikhan.PagerDutyEventsClient; import com.github.dikhan.domain.EventResult; import com.github.dikhan.domain.ResolveIncident; import com.github.dikhan.domain.TriggerIncident; import hudson.EnvVars; import hudson.Extension; import hudson.Launcher; import hudson.model.*; import hudson.tasks.BuildStepDescriptor; import hudson.tasks.BuildStepMonitor; import hudson.tasks.Notifier; import hudson.tasks.Publisher; import jenkins.model.Jenkins; import org.kohsuke.stapler.DataBoundConstructor; import java.io.IOException; import java.io.PrintStream; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; public class PagerDutyTrigger extends Notifier { private static final String JENKINS_PD_CLIENT = "JenkinsPagerDutyClient"; private static final String DEFAULT_RESOLVE_STR = "Automatically Resolved by PD plugin"; private static final String DEFAULT_RESOLVE_DESC = "Resolved by PD plugin"; private enum ValidationResult { DO_NOTHING, DO_TRIGGER, DO_RESOLVE } private static final String DEFAULT_DESCRIPTION_STRING = "I was too lazy to create a description, but trust me it's important!"; public String serviceKey; public boolean resolveOnBackToNormal; public boolean triggerOnSuccess; public boolean triggerOnFailure; public boolean triggerOnUnstable; public boolean triggerOnAborted; public boolean triggerOnNotBuilt; public String incidentKey; public String incDescription; public String incDetails; public Integer numPreviousBuildsToProbe; public boolean isResolveOnBackToNormal() { return resolveOnBackToNormal; } public void setResolveOnBackToNormal(boolean resolveOnBackToNormal) { this.resolveOnBackToNormal = resolveOnBackToNormal; } public String getServiceKey() { return serviceKey; } public boolean isTriggerOnSuccess() { return triggerOnSuccess; } public boolean isTriggerOnFailure() { return triggerOnFailure; } public boolean isTriggerOnUnstable() { return triggerOnUnstable; } public boolean isTriggerOnAborted() { return triggerOnAborted; } public boolean isTriggerOnNotBuilt() { return triggerOnNotBuilt; } public String getIncidentKey() { return incidentKey; } public String getIncDescription() { return incDescription; } public String getIncDetails() { return incDetails; } public Integer getNumPreviousBuildsToProbe() { return numPreviousBuildsToProbe; } protected Object readResolve() { // this.getDescriptor().load(); return this; } @Override public DescriptorImpl getDescriptor() { Jenkins j = Jenkins.getInstance(); return (j != null) ? j.getDescriptorByType(DescriptorImpl.class) : null; } @DataBoundConstructor public PagerDutyTrigger(String serviceKey, boolean resolveOnBackToNormal, boolean triggerOnSuccess, boolean triggerOnFailure, boolean triggerOnAborted, boolean triggerOnUnstable, boolean triggerOnNotBuilt, String incidentKey, String incDescription, String incDetails, Integer numPreviousBuildsToProbe) { super(); this.serviceKey = serviceKey; this.resolveOnBackToNormal = resolveOnBackToNormal; this.triggerOnSuccess = triggerOnSuccess; this.triggerOnFailure = triggerOnFailure; this.triggerOnUnstable = triggerOnUnstable; this.triggerOnAborted = triggerOnAborted; this.triggerOnNotBuilt = triggerOnNotBuilt; this.incidentKey = incidentKey; this.incDescription = incDescription; this.incDetails = incDetails; this.numPreviousBuildsToProbe = (numPreviousBuildsToProbe != null && numPreviousBuildsToProbe > 0) ? numPreviousBuildsToProbe : 1; } private LinkedList<Result> generateResultProbe() { LinkedList<Result> res = new LinkedList<Result>(); if (triggerOnSuccess) res.add(Result.SUCCESS); if (triggerOnFailure) res.add(Result.FAILURE); if (triggerOnUnstable) res.add(Result.UNSTABLE); if (triggerOnAborted) res.add(Result.ABORTED); if (triggerOnNotBuilt) res.add(Result.NOT_BUILT); return res; } /* * method to fetch and replace possible Environment Variables from job parameteres */ private String replaceEnvVars(String str, EnvVars envv, String defaultString) { StringBuffer sb = new StringBuffer(); if (str == null || str.trim().length() < 1) { if (defaultString == null) return null; str = defaultString; } Matcher m = Pattern.compile("\\$\\{.*?\\}|\\$[^\\-\\*\\.#!, ]*") .matcher(str); while (m.find()) { String v = m.group(); v = v.replaceAll("\\$", "").replaceAll("\\{", "").replaceAll("\\}", ""); m.appendReplacement(sb, envv.get(v, "")); } m.appendTail(sb); return sb.toString(); } /* * method to verify X previous builds finished with the desired result */ private ValidationResult validWithPreviousResults(AbstractBuild<?, ?> build, List<Result> desiredResultList, int depth) { int i = 0; if (this.resolveOnBackToNormal && build != null && Result.SUCCESS.equals(build.getResult())) { AbstractBuild<?, ?> previousBuild = build.getPreviousBuild(); if (previousBuild != null && !Result.SUCCESS.equals(previousBuild.getResult())) return ValidationResult.DO_RESOLVE; } else { while (i < depth && build != null) { if (!desiredResultList.contains(build.getResult())) { break; } i++; build = build.getPreviousBuild(); } if (i == depth) { return ValidationResult.DO_TRIGGER; } } return ValidationResult.DO_NOTHING; } /* * (non-Javadoc) * * @see hudson.tasks.BuildStep#getRequiredMonitorService() */ public BuildStepMonitor getRequiredMonitorService() { return BuildStepMonitor.NONE; } /* * (non-Javadoc) * * @see * hudson.tasks.BuildStepCompatibilityLayer#perform(hudson.model.AbstractBuild * , hudson.Launcher, hudson.model.BuildListener) */ @Override public boolean perform(AbstractBuild<?, ?> build, Launcher launcher, BuildListener listener) throws InterruptedException, IOException { PagerDutyEventsClient pagerDutyEventsClient = null; LinkedList<Result> resultProbe = generateResultProbe(); EnvVars env = build.getEnvironment(listener); ValidationResult validationResult = validWithPreviousResults(build, resultProbe, numPreviousBuildsToProbe); if (validationResult != ValidationResult.DO_NOTHING) { if (this.serviceKey != null && this.serviceKey.trim().length() > 0) pagerDutyEventsClient = PagerDutyEventsClient.create(); if (validationResult == ValidationResult.DO_TRIGGER) { listener.getLogger().println("Triggering PagerDuty Notification"); return triggerPagerDuty(listener, env, pagerDutyEventsClient); } else if (validationResult == ValidationResult.DO_RESOLVE) { listener.getLogger().println("Resolving incident"); return resolveIncident(pagerDutyEventsClient, listener.getLogger()); } } return true; } private boolean resolveIncident(PagerDutyEventsClient pagerDuty, PrintStream logger) { if (this.incidentKey != null && this.incidentKey.trim().length() > 0) { ResolveIncident.ResolveIncidentBuilder resolveIncidentBuilder = ResolveIncident.ResolveIncidentBuilder.create(this.serviceKey, this.incidentKey); resolveIncidentBuilder.details(DEFAULT_RESOLVE_STR).description(DEFAULT_RESOLVE_DESC); ResolveIncident resolveIncident = resolveIncidentBuilder.build(); try { EventResult result = pagerDuty.resolve(resolveIncident); if (result != null) { logger.println("Finished resolving - " + result.getStatus()); } else { logger.println("Attempt to resolve the incident returned null - Incident may already be closed or may not exist."); } } catch (Exception e) { logger.println("Error while trying to resolve "); logger.println(e.getMessage()); return false; } } else { logger.println("incidentKey not provided, nothing to resolve. (check previous builds for further clues)"); } return true; } private boolean triggerPagerDuty(BuildListener listener, EnvVars env, PagerDutyEventsClient pagerDuty) { if (pagerDuty == null) { listener.getLogger().println("Unable to activate pagerduty module, check configuration!"); return false; } String descr = replaceEnvVars(this.incDescription, env, DEFAULT_DESCRIPTION_STRING); String serviceK = replaceEnvVars(this.serviceKey, env, null); String incK = replaceEnvVars(this.incidentKey, env, null); String details = replaceEnvVars(this.incDetails, env, null); boolean hasIncidentKey = false; if (incK != null && incK.length() > 0) { hasIncidentKey = true; } listener.getLogger().printf("Triggering pagerDuty with serviceKey %s%n", serviceK); try { listener.getLogger().printf("incidentKey %s%n", incK); listener.getLogger().printf("description %s%n", descr); listener.getLogger().printf("details %s%n", details); TriggerIncident.TriggerIncidentBuilder incBuilder = TriggerIncident.TriggerIncidentBuilder.create(serviceK, descr).client(JENKINS_PD_CLIENT).details(details); if (hasIncidentKey) { incBuilder.incidentKey(incidentKey); } TriggerIncident incident = incBuilder.build(); EventResult result = pagerDuty.trigger(incident); if (result != null) { if (!hasIncidentKey) { this.incidentKey = result.getIncidentKey(); } listener.getLogger().printf("PagerDuty Notification Result: %s%n", result.getStatus()); listener.getLogger().printf("PagerDuty IncidentKey: %s%n", this.incidentKey); } else { listener.getLogger().printf("PagerDuty returned NULL. check network or PD settings!"); } } catch (Exception e) { e.printStackTrace(listener.error("Tried to trigger PD with serviceKey = [%s]", serviceK)); return false; } return true; } @Extension public static final class DescriptorImpl extends BuildStepDescriptor<Publisher> { /* * (non-Javadoc) * * @see hudson.tasks.BuildStepDescriptor#isApplicable(java.lang.Class) */ @Override public boolean isApplicable(Class<? extends AbstractProject> jobType) { return true; } /* * (non-Javadoc) * * @see hudson.model.Descriptor#getDisplayName() */ @Override public String getDisplayName() { return "PagerDuty Incident Trigger"; } } }
remove redundant null check (findbugs)
src/main/java/org/jenkinsci/plugins/pagerduty/PagerDutyTrigger.java
remove redundant null check (findbugs)
<ide><path>rc/main/java/org/jenkinsci/plugins/pagerduty/PagerDutyTrigger.java <ide> @Override <ide> public DescriptorImpl getDescriptor() { <ide> Jenkins j = Jenkins.getInstance(); <del> return (j != null) ? j.getDescriptorByType(DescriptorImpl.class) : null; <add> return j.getDescriptorByType(DescriptorImpl.class); <ide> } <ide> <ide> @DataBoundConstructor
Java
apache-2.0
error: pathspec 'tensorflow/java/src/main/java/org/tensorflow/types/TFType.java' did not match any file(s) known to git
1020ee116d2bf99bc90e95730a8431fb001fbb40
1
andrewcmyers/tensorflow,andrewcmyers/tensorflow,andrewcmyers/tensorflow,andrewcmyers/tensorflow,andrewcmyers/tensorflow,andrewcmyers/tensorflow,andrewcmyers/tensorflow,andrewcmyers/tensorflow
package org.tensorflow.types; /** * A marker interface for classes representing TensorFlow types. */ public interface TFType {}
tensorflow/java/src/main/java/org/tensorflow/types/TFType.java
Somehow I did not notice that TFType.java was not checked in.
tensorflow/java/src/main/java/org/tensorflow/types/TFType.java
Somehow I did not notice that TFType.java was not checked in.
<ide><path>ensorflow/java/src/main/java/org/tensorflow/types/TFType.java <add>package org.tensorflow.types; <add> <add>/** <add> * A marker interface for classes representing TensorFlow types. <add> */ <add>public interface TFType {}
Java
agpl-3.0
2817be54ebaa0942e5b81e78b96aa51f2931c349
0
jspacco/CloudCoder2,jspacco/CloudCoder,csirkeee/CloudCoder,csirkeee/CloudCoder,aayushmudgal/CloudCoder,csirkeee/CloudCoder,aayushmudgal/CloudCoder,aayushmudgal/CloudCoder,csirkeee/CloudCoder,jspacco/CloudCoder2,wicky-info/CloudCoder,wicky-info/CloudCoder,wicky-info/CloudCoder,jspacco/CloudCoder,jspacco/CloudCoder2,x77686d/CloudCoder,csirkeee/CloudCoder,daveho/CloudCoder,x77686d/CloudCoder,wicky-info/CloudCoder,vjpudelski/CloudCoder,x77686d/CloudCoder,vjpudelski/CloudCoder,daveho/CloudCoder,jspacco/CloudCoder2,jspacco/CloudCoder,vjpudelski/CloudCoder,jspacco/CloudCoder,jspacco/CloudCoder2,vjpudelski/CloudCoder,daveho/CloudCoder,cloudcoderdotorg/CloudCoder,daveho/CloudCoder,aayushmudgal/CloudCoder,x77686d/CloudCoder,daveho/CloudCoder,cloudcoderdotorg/CloudCoder,jspacco/CloudCoder2,vjpudelski/CloudCoder,daveho/CloudCoder,vjpudelski/CloudCoder,jspacco/CloudCoder2,wicky-info/CloudCoder,jspacco/CloudCoder,wicky-info/CloudCoder,aayushmudgal/CloudCoder,csirkeee/CloudCoder,wicky-info/CloudCoder,x77686d/CloudCoder,vjpudelski/CloudCoder,aayushmudgal/CloudCoder,x77686d/CloudCoder,cloudcoderdotorg/CloudCoder,x77686d/CloudCoder,cloudcoderdotorg/CloudCoder,cloudcoderdotorg/CloudCoder,cloudcoderdotorg/CloudCoder,csirkeee/CloudCoder,jspacco/CloudCoder,cloudcoderdotorg/CloudCoder,daveho/CloudCoder,jspacco/CloudCoder
// CloudCoder - a web-based pedagogical programming environment // Copyright (C) 2011-2013, Jaime Spacco <[email protected]> // Copyright (C) 2011-2013, David H. Hovemeyer <[email protected]> // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. package org.cloudcoder.app.client; import org.cloudcoder.app.client.model.PageId; import org.cloudcoder.app.client.model.PageStack; import org.cloudcoder.app.client.model.Session; import org.cloudcoder.app.client.model.StatusMessage; import org.cloudcoder.app.client.page.CloudCoderPage; import org.cloudcoder.app.client.page.CoursesAndProblemsPage2; import org.cloudcoder.app.client.page.DevelopmentPage; import org.cloudcoder.app.client.page.EditProblemPage; import org.cloudcoder.app.client.page.InitErrorPage; import org.cloudcoder.app.client.page.LoginPage; import org.cloudcoder.app.client.page.PlaygroundPage; import org.cloudcoder.app.client.page.ProblemAdminPage; import org.cloudcoder.app.client.page.QuizPage; import org.cloudcoder.app.client.page.StatisticsPage; import org.cloudcoder.app.client.page.UserAccountPage; import org.cloudcoder.app.client.page.UserAdminPage; import org.cloudcoder.app.client.page.UserProblemSubmissionsPage; import org.cloudcoder.app.client.page.UserProgressPage; import org.cloudcoder.app.client.rpc.RPC; import org.cloudcoder.app.shared.model.Activity; import org.cloudcoder.app.shared.model.ActivityObject; import org.cloudcoder.app.shared.model.InitErrorException; import org.cloudcoder.app.shared.model.User; import org.cloudcoder.app.shared.util.DefaultSubscriptionRegistrar; import org.cloudcoder.app.shared.util.Publisher; import org.cloudcoder.app.shared.util.Subscriber; import org.cloudcoder.app.shared.util.SubscriptionRegistrar; import com.google.gwt.core.client.EntryPoint; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.RootLayoutPanel; /** * CloudCoder entry point class. */ public class CloudCoder implements EntryPoint, Subscriber { private Session session; private PageStack pageStack; private SubscriptionRegistrar subscriptionRegistrar; private CloudCoderPage currentPage; /** * This is the entry point method. */ public void onModuleLoad() { GWT.log("loading, fragment name is " + Window.Location.getHash()); session = new Session(); pageStack = new PageStack(); session.add(pageStack); subscriptionRegistrar = new DefaultSubscriptionRegistrar(); // Subscribe to PAGE_CHANGE events in the PageStack pageStack.subscribe(PageStack.Event.PAGE_CHANGE, this, subscriptionRegistrar); // Subscribe to all Session events session.subscribeToAll(Session.Event.values(), this, subscriptionRegistrar); // Go to whatever initial page is appropriate. createInitialPage(); } private void createInitialPage() { // See if a URL fragment was specified, and if so, see if it // identifies a valid page. PageId linkPageId_ = null; // page id specified by the link, if any String linkPageParams_ = null; // page parameters specified by the link, if any String fragment = Window.Location.getHash(); if (fragment != null && !fragment.equals("")) { GWT.log("URL fragment is " + fragment); String fragmentName = getFragmentName(fragment); GWT.log("Fragment name is " + fragmentName); linkPageId_ = PageId.forFragmentName(fragmentName); if (linkPageId_ != null) { linkPageParams_ = getFragmentParams(fragment); GWT.log("Link params: " + linkPageParams_); } } final PageId linkPageId = linkPageId_; final String linkPageParams = linkPageParams_; // Check to see if the user is already logged in. RPC.loginService.getUser(new AsyncCallback<User>() { @Override public void onFailure(Throwable caught) { // Special case: if this RPC call (which is the first one) // throws an InitErrorException, switch to the InitErrorPage // so that the cloudcoder admin can diagnose and resolve // the issue. if (caught instanceof InitErrorException) { changePage(new InitErrorPage()); } else { session.add(StatusMessage.error("Could not check for current login status: " + caught.getMessage())); changePage(new LoginPage()); } } @Override public void onSuccess(User result) { if (result == null) { // Not logged in, so show LoginPage LoginPage loginPage = new LoginPage(); if (linkPageId != null) { GWT.log("Login page will redirect to " + linkPageId + ":" + linkPageParams); // A page was linked in the original URL, // so have the LoginPage try to navigate to it // on a successful login. loginPage.setLinkPageId(linkPageId); loginPage.setLinkPageParams(linkPageParams); } changePage(loginPage); } else { // User is logged in! final User user = result; // Add user to session session.add(user); // If a page id was specified as part of the original URL, // try to navigate to it without attempting to recover the // client's server-side Activity. (The page id in the // link should take precedence.) if (linkPageId != null) { GWT.log("Already logged in, linking page " + linkPageId + ":" + linkPageParams); CloudCoderPage page = createPageForPageId(linkPageId, linkPageParams); changePage(page); } else { GWT.log("Already logged in, no link page id specified, checking server for Activity"); // No page id was specified in the original URL. // See if there is a server-side Activity. RPC.loginService.getActivity(new AsyncCallback<Activity>() { @Override public void onFailure(Throwable caught) { session.add(StatusMessage.error("Error getting Activity", caught)); changePage(new CoursesAndProblemsPage2()); } @Override public void onSuccess(Activity result) { // Did we find the user's Activity? if (result == null) { // Don't know what the user's activity was, so take // them to the courses/problems page changePage(new CoursesAndProblemsPage2()); } else { // We have an activity. Find the page. CloudCoderPage page = getPageForActivity(result); // Restore the session objects. for (Object obj : result.getSessionObjects()) { GWT.log("Restoring activity object: " + obj.getClass().getName()); session.add(obj); } changePage(page); } } }); } } } }); } /** * Get the fragment name. * E.g., if the fragment is "exercise?c=4,p=5", then the * fragment name is "exercise". * * @param fragment the fragment * @return the fragment name */ private String getFragmentName(String fragment) { int ques = fragment.indexOf('?'); return (ques >= 0) ? fragment.substring(0, ques) : fragment; } /** * Get the fragment parameters. * E.g., if the fragment is "exercise?c=4,p=5", then the * parameters are "c=4,p=5". * * @param fragment the fragment * @return the fragment parameters */ private String getFragmentParams(String fragment) { int ques = fragment.indexOf('?'); return ques >= 0 ? fragment.substring(ques+1) : ""; } protected CloudCoderPage getPageForActivity(Activity result) { String name = result.getName(); // The activity name must be the string representation of a PageId. PageId pageId; try { pageId = PageId.valueOf(name); } catch (IllegalArgumentException e) { GWT.log("Illegal activity name: " + name); pageId = PageId.COURSES_AND_PROBLEMS; } return createPageForPageId(pageId, null); } protected CloudCoderPage createPageForPageId(PageId pageId, String pageParams) { CloudCoderPage page = createPageForPageId(pageId); // Create a reasonable PageStack. // (Note that we need to disable notifications while we do this, // since we're not actually navigating pages.) PageStack pageStack = session.get(PageStack.class); pageStack.setNotifications(false); page.initDefaultPageStack(pageStack); pageStack.push(page.getPageId()); pageStack.setNotifications(true); // Set initial page parameters (if any) if (pageParams != null) { page.setUrlFragmentParams(pageParams); } return page; } private CloudCoderPage createPageForPageId(PageId pageId) { CloudCoderPage page; switch (pageId) { case COURSES_AND_PROBLEMS: page = new CoursesAndProblemsPage2(); break; case DEVELOPMENT: page = new DevelopmentPage(); // page = new DevelopmentPage2(); break; case PROBLEM_ADMIN: page = new ProblemAdminPage(); break; case EDIT_PROBLEM: page= new EditProblemPage(); break; case USER_ADMIN: page = new UserAdminPage(); break; case STATISTICS: page = new StatisticsPage(); break; case USER_PROGRESS: page = new UserProgressPage(); break; case QUIZ: page = new QuizPage(); break; case USER_ACCOUNT: page = new UserAccountPage(); break; case USER_PROBLEM_SUBMISSIONS: page = new UserProblemSubmissionsPage(); break; case PLAYGROUND_PAGE: page = new PlaygroundPage(); break; default: // This shouldn't happen (can't find page for Activity), // but if it does, go to the courses and problems page. GWT.log("Don't know what kind of page to create for " + pageId); page = new CoursesAndProblemsPage2(); break; } return page; } protected Activity getActivityForPage(CloudCoderPage page) { return getActivityForSessionAndPage(page, session); } /** * Create an {@link Activity} for current page and session. * * @param page current page * @param session current session * @return the {@link Activity} */ public static Activity getActivityForSessionAndPage(CloudCoderPage page, Session session) { // The activity name is the page's PageId (as a string) Activity activity = new Activity(page.getPageId().toString()); // Record the Session objects (the ones that are ActivityObjects) for (Object obj : session.getObjects()) { if (obj instanceof ActivityObject) { GWT.log("Adding " + obj.getClass().getName() + " to Activity"); activity.addSessionObject((ActivityObject) obj); } } return activity; } private void changePage(CloudCoderPage page) { if (currentPage != null) { currentPage.deactivate(); RootLayoutPanel.get().remove(currentPage.getWidget()); // make sure there is no StatusMessage from the previous page session.remove(StatusMessage.class); } page.setSession(session); // Create the page's Widget and add it to the DOM tree. // Leave a 10 pixel border around the page widget. page.createWidget(); IsWidget w = page.getWidget(); RootLayoutPanel.get().add(w); RootLayoutPanel.get().setWidgetLeftRight(w, 10.0, Unit.PX, 10.0, Unit.PX); RootLayoutPanel.get().setWidgetTopBottom(w, 10.0, Unit.PX, 10.0, Unit.PX); // Update the anchor in the URL to identify the page. // See: http://stackoverflow.com/questions/5402732/gwt-set-url-without-submit // TODO: could add params here? String hash = page.getPageId().getFragmentName(); String newURL = Window.Location.createUrlBuilder().setHash(hash).buildString(); // When running in development mode, replacing ":" with "%3A" // (due to URL encoding, I guess) appears to trigger a page reload // on both Firefox and Chrome, completely bollixing our efforts to use // the original URL fragment. So, undo that bit of unnecessary // manipulation of the URL. newURL = newURL.replace("%3A", ":"); Window.Location.replace(newURL); GWT.log("Setting URL to " + newURL); // Now it is safe to activate the page page.activate(); currentPage = page; // Inform the server of the Activity (page) that the user is now working on, // if the page requests it. Otherwise set the activity to null. Activity activity = page.isActivity() ? getActivityForPage(page) : null; RPC.loginService.setActivity(activity, new AsyncCallback<Void>() { @Override public void onFailure(Throwable caught) { // There's not really anything useful we can do here. GWT.log("Couldn't set activity on server?", caught); } @Override public void onSuccess(Void result) { // Nothing to do } }); } @Override public void eventOccurred(Object key, Publisher publisher, Object hint) { // This is where we monitor for events that indicate page changes. // The PageStack makes this pretty straightforward. if (key == PageStack.Event.PAGE_CHANGE) { PageId current = session.get(PageStack.class).getTop(); changePage(createPageForPageId(current)); } else if (key == Session.Event.LOGOUT) { // On logout, clear the Session and PageStack, // add the PageStack back to the Session, // and go back to the LoginPage. session.clear(); pageStack.clear(); session.add(pageStack); changePage(new LoginPage()); } } }
CloudCoder/src/org/cloudcoder/app/client/CloudCoder.java
// CloudCoder - a web-based pedagogical programming environment // Copyright (C) 2011-2013, Jaime Spacco <[email protected]> // Copyright (C) 2011-2013, David H. Hovemeyer <[email protected]> // // This program is free software: you can redistribute it and/or modify // it under the terms of the GNU Affero General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. package org.cloudcoder.app.client; import org.cloudcoder.app.client.model.PageId; import org.cloudcoder.app.client.model.PageStack; import org.cloudcoder.app.client.model.Session; import org.cloudcoder.app.client.model.StatusMessage; import org.cloudcoder.app.client.page.CloudCoderPage; import org.cloudcoder.app.client.page.CoursesAndProblemsPage2; import org.cloudcoder.app.client.page.DevelopmentPage; import org.cloudcoder.app.client.page.EditProblemPage; import org.cloudcoder.app.client.page.InitErrorPage; import org.cloudcoder.app.client.page.LoginPage; import org.cloudcoder.app.client.page.PlaygroundPage; import org.cloudcoder.app.client.page.ProblemAdminPage; import org.cloudcoder.app.client.page.QuizPage; import org.cloudcoder.app.client.page.StatisticsPage; import org.cloudcoder.app.client.page.UserAccountPage; import org.cloudcoder.app.client.page.UserAdminPage; import org.cloudcoder.app.client.page.UserProblemSubmissionsPage; import org.cloudcoder.app.client.page.UserProgressPage; import org.cloudcoder.app.client.rpc.RPC; import org.cloudcoder.app.shared.model.Activity; import org.cloudcoder.app.shared.model.ActivityObject; import org.cloudcoder.app.shared.model.InitErrorException; import org.cloudcoder.app.shared.model.User; import org.cloudcoder.app.shared.util.DefaultSubscriptionRegistrar; import org.cloudcoder.app.shared.util.Publisher; import org.cloudcoder.app.shared.util.Subscriber; import org.cloudcoder.app.shared.util.SubscriptionRegistrar; import com.google.gwt.core.client.EntryPoint; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.rpc.AsyncCallback; import com.google.gwt.user.client.ui.IsWidget; import com.google.gwt.user.client.ui.RootLayoutPanel; /** * CloudCoder entry point class. */ public class CloudCoder implements EntryPoint, Subscriber { private Session session; private PageStack pageStack; private SubscriptionRegistrar subscriptionRegistrar; private CloudCoderPage currentPage; /** * This is the entry point method. */ public void onModuleLoad() { GWT.log("loading, fragment name is " + Window.Location.getHash()); session = new Session(); pageStack = new PageStack(); session.add(pageStack); subscriptionRegistrar = new DefaultSubscriptionRegistrar(); // Subscribe to PAGE_CHANGE events in the PageStack pageStack.subscribe(PageStack.Event.PAGE_CHANGE, this, subscriptionRegistrar); // Subscribe to all Session events session.subscribeToAll(Session.Event.values(), this, subscriptionRegistrar); // Go to whatever initial page is appropriate. createInitialPage(); } private void createInitialPage() { // See if a URL fragment was specified, and if so, see if it // identifies a valid page. PageId linkPageId_ = null; // page id specified by the link, if any String linkPageParams_ = null; // page parameters specified by the link, if any String fragment = Window.Location.getHash(); if (fragment != null && !fragment.equals("")) { GWT.log("URL fragment is " + fragment); String fragmentName = getFragmentName(fragment); GWT.log("Fragment name is " + fragmentName); linkPageId_ = PageId.forFragmentName(fragmentName); if (linkPageId_ != null) { linkPageParams_ = getFragmentParams(fragment); GWT.log("Link params: " + linkPageParams_); } } final PageId linkPageId = linkPageId_; final String linkPageParams = linkPageParams_; // Check to see if the user is already logged in. RPC.loginService.getUser(new AsyncCallback<User>() { @Override public void onFailure(Throwable caught) { // Special case: if this RPC call (which is the first one) // throws an InitErrorException, switch to the InitErrorPage // so that the cloudcoder admin can diagnose and resolve // the issue. if (caught instanceof InitErrorException) { changePage(new InitErrorPage()); } else { session.add(StatusMessage.error("Could not check for current login status: " + caught.getMessage())); changePage(new LoginPage()); } } @Override public void onSuccess(User result) { if (result == null) { // Not logged in, so show LoginPage LoginPage loginPage = new LoginPage(); if (linkPageId != null) { GWT.log("Login page will redirect to " + linkPageId + ":" + linkPageParams); // A page was linked in the original URL, // so have the LoginPage try to navigate to it // on a successful login. loginPage.setLinkPageId(linkPageId); loginPage.setLinkPageParams(linkPageParams); } changePage(loginPage); } else { // User is logged in! final User user = result; // Add user to session session.add(user); // If a page id was specified as part of the original URL, // try to navigate to it without attempting to recover the // client's server-side Activity. (The page id in the // link should take precedence.) if (linkPageId != null) { GWT.log("Already logged in, linking page " + linkPageId + ":" + linkPageParams); CloudCoderPage page = createPageForPageId(linkPageId, linkPageParams); changePage(page); } else { GWT.log("Already logged in, no link page id specified, checking server for Activity"); // No page id was specified in the original URL. // See if there is a server-side Activity. RPC.loginService.getActivity(new AsyncCallback<Activity>() { @Override public void onFailure(Throwable caught) { session.add(StatusMessage.error("Error getting Activity", caught)); changePage(new CoursesAndProblemsPage2()); } @Override public void onSuccess(Activity result) { // Did we find the user's Activity? if (result == null) { // Don't know what the user's activity was, so take // them to the courses/problems page changePage(new CoursesAndProblemsPage2()); } else { // We have an activity. Find the page. CloudCoderPage page = getPageForActivity(result); // Restore the session objects. for (Object obj : result.getSessionObjects()) { GWT.log("Restoring activity object: " + obj.getClass().getName()); session.add(obj); } changePage(page); } } }); } } } }); } /** * Get the fragment name. * E.g., if the fragment is "exercise?c=4,p=5", then the * fragment name is "exercise". * * @param fragment the fragment * @return the fragment name */ private String getFragmentName(String fragment) { int ques = fragment.indexOf('?'); return (ques >= 0) ? fragment.substring(0, ques) : fragment; } /** * Get the fragment parameters. * E.g., if the fragment is "exercise?c=4,p=5", then the * parameters are "c=4,p=5". * * @param fragment the fragment * @return the fragment parameters */ private String getFragmentParams(String fragment) { int ques = fragment.indexOf('?'); return ques >= 0 ? fragment.substring(ques+1) : ""; } protected CloudCoderPage getPageForActivity(Activity result) { String name = result.getName(); // The activity name must be the string representation of a PageId. PageId pageId; try { pageId = PageId.valueOf(name); } catch (IllegalArgumentException e) { GWT.log("Illegal activity name: " + name); pageId = PageId.COURSES_AND_PROBLEMS; } return createPageForPageId(pageId, null); } protected CloudCoderPage createPageForPageId(PageId pageId, String pageParams) { CloudCoderPage page = createPageForPageId(pageId); // Create a reasonable PageStack. // (Note that we need to disable notifications while we do this, // since we're not actually navigating pages.) PageStack pageStack = session.get(PageStack.class); pageStack.setNotifications(false); page.initDefaultPageStack(pageStack); pageStack.push(page.getPageId()); pageStack.setNotifications(true); // Set initial page parameters (if any) if (pageParams != null) { page.setUrlFragmentParams(pageParams); } return page; } private CloudCoderPage createPageForPageId(PageId pageId) { CloudCoderPage page; switch (pageId) { case COURSES_AND_PROBLEMS: page = new CoursesAndProblemsPage2(); break; case DEVELOPMENT: page = new DevelopmentPage(); // page = new DevelopmentPage2(); break; case PROBLEM_ADMIN: page = new ProblemAdminPage(); break; case EDIT_PROBLEM: page= new EditProblemPage(); break; case USER_ADMIN: page = new UserAdminPage(); break; case STATISTICS: page = new StatisticsPage(); break; case USER_PROGRESS: page = new UserProgressPage(); break; case QUIZ: page = new QuizPage(); break; case USER_ACCOUNT: page = new UserAccountPage(); break; case USER_PROBLEM_SUBMISSIONS: page = new UserProblemSubmissionsPage(); break; case PLAYGROUND_PAGE: page = new PlaygroundPage(); break; default: // This shouldn't happen (can't find page for Activity), // but if it does, go to the courses and problems page. GWT.log("Don't know what kind of page to create for " + pageId); page = new CoursesAndProblemsPage2(); break; } return page; } protected Activity getActivityForPage(CloudCoderPage page) { return getActivityForSessionAndPage(page, session); } /** * Create an {@link Activity} for current page and session. * * @param page current page * @param session current session * @return the {@link Activity} */ public static Activity getActivityForSessionAndPage(CloudCoderPage page, Session session) { // The activity name is the page's PageId (as a string) Activity activity = new Activity(page.getPageId().toString()); // Record the Session objects (the ones that are ActivityObjects) for (Object obj : session.getObjects()) { if (obj instanceof ActivityObject) { GWT.log("Adding " + obj.getClass().getName() + " to Activity"); activity.addSessionObject((ActivityObject) obj); } } return activity; } private void changePage(CloudCoderPage page) { if (currentPage != null) { currentPage.deactivate(); RootLayoutPanel.get().remove(currentPage.getWidget()); // make sure there is no StatusMessage from the previous page session.remove(StatusMessage.class); } page.setSession(session); // Create the page's Widget and add it to the DOM tree. // Leave a 10 pixel border around the page widget. page.createWidget(); IsWidget w = page.getWidget(); RootLayoutPanel.get().add(w); RootLayoutPanel.get().setWidgetLeftRight(w, 10.0, Unit.PX, 10.0, Unit.PX); RootLayoutPanel.get().setWidgetTopBottom(w, 10.0, Unit.PX, 10.0, Unit.PX); // Update the anchor in the URL to identify the page. // See: http://stackoverflow.com/questions/5402732/gwt-set-url-without-submit // TODO: could add params here? String hash = page.getPageId().getFragmentName(); String newURL = Window.Location.createUrlBuilder().setHash(hash).buildString(); Window.Location.replace(newURL); // Now it is safe to activate the page page.activate(); currentPage = page; // Inform the server of the Activity (page) that the user is now working on, // if the page requests it. Otherwise set the activity to null. Activity activity = page.isActivity() ? getActivityForPage(page) : null; RPC.loginService.setActivity(activity, new AsyncCallback<Void>() { @Override public void onFailure(Throwable caught) { // There's not really anything useful we can do here. GWT.log("Couldn't set activity on server?", caught); } @Override public void onSuccess(Void result) { // Nothing to do } }); } @Override public void eventOccurred(Object key, Publisher publisher, Object hint) { // This is where we monitor for events that indicate page changes. // The PageStack makes this pretty straightforward. if (key == PageStack.Event.PAGE_CHANGE) { PageId current = session.get(PageStack.class).getTop(); changePage(createPageForPageId(current)); } else if (key == Session.Event.LOGOUT) { // On logout, clear the Session and PageStack, // add the PageStack back to the Session, // and go back to the LoginPage. session.clear(); pageStack.clear(); session.add(pageStack); changePage(new LoginPage()); } } }
found and fixed mysterious changing of the URL fragment name The first time we attempted to change the URL fragment, when going to the LoginPage, it was triggering a page reload in the browser because the ":" (part of the GWT development mode URL) was getting URL-encoded. Undoing that encoding left the non-fragment part of the URL unchanged, allowing the URL change to take place without a page reload. Yay?
CloudCoder/src/org/cloudcoder/app/client/CloudCoder.java
found and fixed mysterious changing of the URL fragment name
<ide><path>loudCoder/src/org/cloudcoder/app/client/CloudCoder.java <ide> // TODO: could add params here? <ide> String hash = page.getPageId().getFragmentName(); <ide> String newURL = Window.Location.createUrlBuilder().setHash(hash).buildString(); <add> <add> // When running in development mode, replacing ":" with "%3A" <add> // (due to URL encoding, I guess) appears to trigger a page reload <add> // on both Firefox and Chrome, completely bollixing our efforts to use <add> // the original URL fragment. So, undo that bit of unnecessary <add> // manipulation of the URL. <add> newURL = newURL.replace("%3A", ":"); <add> <ide> Window.Location.replace(newURL); <add> GWT.log("Setting URL to " + newURL); <ide> <ide> // Now it is safe to activate the page <ide> page.activate();
Java
mit
6cc498b686caf8ab6f0f5577a898fa0728cc11ba
0
cloudjanak/photo-picker-plus-ios,chute/photo-picker-plus-ios,chute/photo-picker-plus-ios,cloudjanak/photo-picker-plus-ios,cloudjanak/photo-picker-plus-ios,Magnat12/photo-picker-plus-ios,dawangjiaowolaixunshan/photo-picker-plus-ios,dawangjiaowolaixunshan/photo-picker-plus-ios,chute/photo-picker-plus-ios,dawangjiaowolaixunshan/photo-picker-plus-ios,Magnat12/photo-picker-plus-ios,Magnat12/photo-picker-plus-ios
/* * Copyright (c) 2012 Chute Corporation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.chute.android.photopickerplus.app; import java.io.File; import java.io.FileNotFoundException; import android.app.Activity; import android.content.Intent; import android.content.pm.PackageManager; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.MediaStore; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.chute.android.photopickerplus.R; import com.chute.android.photopickerplus.dao.MediaDAO; import com.chute.android.photopickerplus.util.AppUtil; import com.chute.android.photopickerplus.util.Constants; import com.chute.android.photopickerplus.util.NotificationUtil; import com.chute.android.photopickerplus.util.intent.AlbumsActivityIntentWrapper; import com.chute.android.photopickerplus.util.intent.IntentUtil; import com.chute.android.photopickerplus.util.intent.PhotoPickerPlusIntentWrapper; import com.chute.android.photopickerplus.util.intent.PhotosIntentWrapper; import com.chute.sdk.api.GCHttpCallback; import com.chute.sdk.api.account.GCAccounts; import com.chute.sdk.api.authentication.GCAuthenticationFactory.AccountType; import com.chute.sdk.collections.GCAccountsCollection; import com.chute.sdk.model.GCAccountMediaModel; import com.chute.sdk.model.GCAccountModel; import com.chute.sdk.model.GCAccountStore; import com.chute.sdk.model.GCHttpRequestParameters; import com.chute.sdk.utils.GCPreferenceUtil; import com.darko.imagedownloader.ImageLoader; public class ChooseServiceActivity extends Activity { public static final String TAG = ChooseServiceActivity.class.getSimpleName(); private TextView txtFacebook; private TextView txtPicasa; private TextView txtFlickr; private TextView txtInstagram; private LinearLayout take_photos; private LinearLayout facebook; private LinearLayout picasa; private LinearLayout instagram; private LinearLayout flickr; private LinearLayout allPhotos; private LinearLayout cameraPhotos; private LinearLayout lastPhoto; private ImageView img_all_photos; private ImageView img_camera_photos; private ImageView img_last_photo; private AccountType accountType; private ImageLoader loader; private PhotoPickerPlusIntentWrapper ppWrapper; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.service_layout); loader = ImageLoader.getLoader(ChooseServiceActivity.this); ppWrapper = new PhotoPickerPlusIntentWrapper(getIntent()); txtFacebook = (TextView) findViewById(R.id.txt_facebook); txtFacebook.setTag(AccountType.FACEBOOK); txtPicasa = (TextView) findViewById(R.id.txt_picasa); txtPicasa.setTag(AccountType.PICASA); txtFlickr = (TextView) findViewById(R.id.txt_flickr); txtFlickr.setTag(AccountType.FLICKR); txtInstagram = (TextView) findViewById(R.id.txt_instagram); txtInstagram.setTag(AccountType.INSTAGRAM); facebook = (LinearLayout) findViewById(R.id.linear_fb); facebook.setTag(AccountType.FACEBOOK); flickr = (LinearLayout) findViewById(R.id.linear_flickr); flickr.setTag(AccountType.FLICKR); picasa = (LinearLayout) findViewById(R.id.linear_picasa); picasa.setTag(AccountType.PICASA); instagram = (LinearLayout) findViewById(R.id.linear_instagram); instagram.setTag(AccountType.INSTAGRAM); allPhotos = (LinearLayout) findViewById(R.id.all_photos_linear); allPhotos.setOnClickListener(new OnPhotoStreamListener()); cameraPhotos = (LinearLayout) findViewById(R.id.camera_shots_linear); cameraPhotos.setOnClickListener(new OnCameraRollListener()); lastPhoto = (LinearLayout) findViewById(R.id.last_photo_linear); lastPhoto.setOnClickListener(new OnLastPhotoClickListener()); img_all_photos = (ImageView) findViewById(R.id.all_photos_icon); img_camera_photos = (ImageView) findViewById(R.id.camera_shots_icon); img_last_photo = (ImageView) findViewById(R.id.last_photo_icon); loader.displayImage(MediaDAO.getLastPhotoFromAllPhotos(getApplicationContext()).toString(), img_all_photos); Uri uri = MediaDAO.getLastPhotoFromCameraPhotos(getApplicationContext()); if (uri != null) { loader.displayImage(uri.toString(), img_camera_photos); loader.displayImage(uri.toString(), img_last_photo); } take_photos = (LinearLayout) findViewById(R.id.album3_linear); take_photos.setOnClickListener(new OnCameraClickListener()); facebook.setOnClickListener(new OnLoginClickListener()); picasa.setOnClickListener(new OnLoginClickListener()); flickr.setOnClickListener(new OnLoginClickListener()); instagram.setOnClickListener(new OnLoginClickListener()); } private final class OnLoginClickListener implements OnClickListener { @Override public void onClick(View v) { accountType = (AccountType) v.getTag(); if (GCPreferenceUtil.get().hasAccountId(accountType)) { accountClicked(GCPreferenceUtil.get().getAccountId(accountType), accountType.getName()); } else { GCAccountStore.getInstance(getApplicationContext()).startAuthenticationActivity( ChooseServiceActivity.this, accountType, Constants.PERMISSIONS_SCOPE, Constants.CALLBACK_URL, Constants.CLIENT_ID, Constants.CLIENT_SECRET); } } } private final class AccountsCallback implements GCHttpCallback<GCAccountsCollection> { @Override public void onSuccess(GCAccountsCollection responseData) { if (accountType == null) { return; } for (GCAccountModel accountModel : responseData) { if (accountModel.getType().equalsIgnoreCase(accountType.getName())) { GCPreferenceUtil.get().setNameForAccount(accountType, accountModel.getUser().getName()); GCPreferenceUtil.get().setIdForAccount(accountType, accountModel.getId()); accountClicked(accountModel.getId(), accountType.getName()); } } } @Override public void onHttpException(GCHttpRequestParameters params, Throwable exception) { } @Override public void onHttpError(int responseCode, String statusMessage) { } @Override public void onParserException(int responseCode, Throwable exception) { } } public void accountClicked(String accountId, String accountName) { AlbumsActivityIntentWrapper wrapper = new AlbumsActivityIntentWrapper( ChooseServiceActivity.this); wrapper.setMultiPicker(ppWrapper.getIsMultiPicker()); wrapper.setAccountId(accountId); wrapper.setAccountName(accountName); wrapper.startActivity(ChooseServiceActivity.this); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == Activity.RESULT_OK) { if (requestCode == GCAccountStore.AUTHENTICATION_REQUEST_CODE) { GCAccounts.all(getApplicationContext(), new AccountsCallback()).executeAsync(); } if (requestCode == PhotosIntentWrapper.ACTIVITY_FOR_RESULT_STREAM_KEY) { finish(); } else if (requestCode == Constants.CAMERA_PIC_REQUEST) { // Bitmap image = (Bitmap) data.getExtras().get("data"); String path = ""; File tempFile = AppUtil.getTempFile(getApplicationContext()); if (AppUtil.hasImageCaptureBug() == false && tempFile.length() > 0) { Uri u; try { u = Uri.parse(android.provider.MediaStore.Images.Media.insertImage( getContentResolver(), tempFile.getAbsolutePath(), null, null)); tempFile.delete(); path = Uri.fromFile(new File(convertMediaUriToPath(u))).toString(); } catch (FileNotFoundException e) { Log.d(TAG, "", e); } } else { Log.e(TAG, "Bug " + data.getData().getPath()); path = Uri.fromFile( new File(AppUtil.getPath(getApplicationContext(), data.getData()))) .toString(); } Log.d(TAG, path); final GCAccountMediaModel model = new GCAccountMediaModel(); model.setLargeUrl(path); model.setThumbUrl(path); model.setUrl(path); IntentUtil.deliverDataToInitialActivity(this, model, ppWrapper.getChuteId()); } } } protected String convertMediaUriToPath(Uri uri) { String[] proj = { MediaStore.Images.Media.DATA }; Cursor cursor = getContentResolver().query(uri, proj, null, null, null); int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA); cursor.moveToFirst(); String path = cursor.getString(column_index); cursor.close(); return path; } private class OnCameraClickListener implements OnClickListener { @Override public void onClick(View v) { if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) { NotificationUtil.makeToast(getApplicationContext(), R.string.toast_feature_camera); return; } final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); if (AppUtil.hasImageCaptureBug() == false) { intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(AppUtil.getTempFile(ChooseServiceActivity.this))); } else { intent.putExtra(android.provider.MediaStore.EXTRA_OUTPUT, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI); } startActivityForResult(intent, Constants.CAMERA_PIC_REQUEST); } } private final class OnPhotoStreamListener implements OnClickListener { @Override public void onClick(View v) { final PhotosIntentWrapper wrapper = new PhotosIntentWrapper(ChooseServiceActivity.this); wrapper.setFilterType(PhotosIntentWrapper.TYPE_ALL_PHOTOS); wrapper.setMultiPicker(ppWrapper.getIsMultiPicker()); wrapper.setChuteId(ppWrapper.getChuteId()); wrapper.startActivityForResult(ChooseServiceActivity.this, PhotosIntentWrapper.ACTIVITY_FOR_RESULT_STREAM_KEY); } } private final class OnCameraRollListener implements OnClickListener { @Override public void onClick(View v) { final PhotosIntentWrapper wrapper = new PhotosIntentWrapper(ChooseServiceActivity.this); wrapper.setMultiPicker(ppWrapper.getIsMultiPicker()); wrapper.setFilterType(PhotosIntentWrapper.TYPE_CAMERA_ROLL); wrapper.setChuteId(ppWrapper.getChuteId()); wrapper.startActivityForResult(ChooseServiceActivity.this, PhotosIntentWrapper.ACTIVITY_FOR_RESULT_STREAM_KEY); } } private final class OnLastPhotoClickListener implements OnClickListener { @Override public void onClick(View v) { Uri uri = MediaDAO.getLastPhotoFromCameraPhotos(getApplicationContext()); if (uri != null) { final GCAccountMediaModel model = new GCAccountMediaModel(); model.setLargeUrl(uri.toString()); model.setThumbUrl(uri.toString()); model.setUrl(uri.toString()); IntentUtil.deliverDataToInitialActivity(ChooseServiceActivity.this, model, ppWrapper.getChuteId()); } } } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setResult(Activity.RESULT_OK, new Intent().putExtras(intent.getExtras())); finish(); } @Override protected void onResume() { super.onResume(); if (GCPreferenceUtil.get().hasAccountId(AccountType.PICASA)) { if (GCPreferenceUtil.get().hasAccountName(AccountType.PICASA)) { txtPicasa.setText(GCPreferenceUtil.get().getAccountName(AccountType.PICASA)); } } if (GCPreferenceUtil.get().hasAccountId(AccountType.FACEBOOK)) { if (GCPreferenceUtil.get().hasAccountName(AccountType.FACEBOOK)) { txtFacebook.setText(GCPreferenceUtil.get().getAccountName(AccountType.FACEBOOK)); } } if (GCPreferenceUtil.get().hasAccountId(AccountType.FLICKR)) { if (GCPreferenceUtil.get().hasAccountName(AccountType.FLICKR)) { txtFlickr.setText(GCPreferenceUtil.get().getAccountName(AccountType.FLICKR)); } } if (GCPreferenceUtil.get().hasAccountId(AccountType.INSTAGRAM)) { if (GCPreferenceUtil.get().hasAccountName(AccountType.INSTAGRAM)) { txtInstagram.setText(GCPreferenceUtil.get().getAccountName(AccountType.INSTAGRAM)); } } } }
Android/ChutePhotoPicker+/src/com/chute/android/photopickerplus/app/ChooseServiceActivity.java
/* * Copyright (c) 2012 Chute Corporation Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.chute.android.photopickerplus.app; import java.io.File; import java.io.FileNotFoundException; import android.app.Activity; import android.content.Intent; import android.content.pm.PackageManager; import android.database.Cursor; import android.net.Uri; import android.os.Bundle; import android.provider.MediaStore; import android.util.Log; import android.view.View; import android.view.View.OnClickListener; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; import com.chute.android.photopickerplus.R; import com.chute.android.photopickerplus.dao.MediaDAO; import com.chute.android.photopickerplus.util.AppUtil; import com.chute.android.photopickerplus.util.Constants; import com.chute.android.photopickerplus.util.NotificationUtil; import com.chute.android.photopickerplus.util.intent.AlbumsActivityIntentWrapper; import com.chute.android.photopickerplus.util.intent.IntentUtil; import com.chute.android.photopickerplus.util.intent.PhotoPickerPlusIntentWrapper; import com.chute.android.photopickerplus.util.intent.PhotosIntentWrapper; import com.chute.sdk.api.GCHttpCallback; import com.chute.sdk.api.account.GCAccounts; import com.chute.sdk.api.authentication.GCAuthenticationFactory.AccountType; import com.chute.sdk.collections.GCAccountsCollection; import com.chute.sdk.model.GCAccountMediaModel; import com.chute.sdk.model.GCAccountModel; import com.chute.sdk.model.GCAccountStore; import com.chute.sdk.model.GCHttpRequestParameters; import com.chute.sdk.utils.GCPreferenceUtil; import com.darko.imagedownloader.ImageLoader; public class ChooseServiceActivity extends Activity { public static final String TAG = ChooseServiceActivity.class.getSimpleName(); private TextView txtFacebook; private TextView txtPicasa; private TextView txtFlickr; private TextView txtInstagram; private LinearLayout take_photos; private LinearLayout facebook; private LinearLayout picasa; private LinearLayout instagram; private LinearLayout flickr; private LinearLayout allPhotos; private LinearLayout cameraPhotos; private LinearLayout lastPhoto; private ImageView img_all_photos; private ImageView img_camera_photos; private ImageView img_last_photo; private AccountType accountType; private ImageLoader loader; private PhotoPickerPlusIntentWrapper ppWrapper; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.service_layout); loader = ImageLoader.getLoader(ChooseServiceActivity.this); ppWrapper = new PhotoPickerPlusIntentWrapper(getIntent()); txtFacebook = (TextView) findViewById(R.id.txt_facebook); txtFacebook.setTag(AccountType.FACEBOOK); txtPicasa = (TextView) findViewById(R.id.txt_picasa); txtPicasa.setTag(AccountType.PICASA); txtFlickr = (TextView) findViewById(R.id.txt_flickr); txtFlickr.setTag(AccountType.FLICKR); txtInstagram = (TextView) findViewById(R.id.txt_instagram); txtInstagram.setTag(AccountType.INSTAGRAM); facebook = (LinearLayout) findViewById(R.id.linear_fb); facebook.setTag(AccountType.FACEBOOK); flickr = (LinearLayout) findViewById(R.id.linear_flickr); flickr.setTag(AccountType.FLICKR); picasa = (LinearLayout) findViewById(R.id.linear_picasa); picasa.setTag(AccountType.PICASA); instagram = (LinearLayout) findViewById(R.id.linear_instagram); instagram.setTag(AccountType.INSTAGRAM); allPhotos = (LinearLayout) findViewById(R.id.all_photos_linear); allPhotos.setOnClickListener(new OnPhotoStreamListener()); cameraPhotos = (LinearLayout) findViewById(R.id.camera_shots_linear); cameraPhotos.setOnClickListener(new OnCameraRollListener()); lastPhoto = (LinearLayout) findViewById(R.id.last_photo_linear); lastPhoto.setOnClickListener(new OnLastPhotoClickListener()); img_all_photos = (ImageView) findViewById(R.id.all_photos_icon); img_camera_photos = (ImageView) findViewById(R.id.camera_shots_icon); img_last_photo = (ImageView) findViewById(R.id.last_photo_icon); loader.displayImage(MediaDAO.getLastPhotoFromAllPhotos(getApplicationContext()).toString(), img_all_photos); Uri uri = MediaDAO.getLastPhotoFromCameraPhotos(getApplicationContext()); if (uri != null) { loader.displayImage(uri.toString(), img_camera_photos); loader.displayImage(uri.toString(), img_last_photo); } take_photos = (LinearLayout) findViewById(R.id.album3_linear); take_photos.setOnClickListener(new OnCameraClickListener()); facebook.setOnClickListener(new OnLoginClickListener()); picasa.setOnClickListener(new OnLoginClickListener()); flickr.setOnClickListener(new OnLoginClickListener()); instagram.setOnClickListener(new OnLoginClickListener()); } private final class OnLoginClickListener implements OnClickListener { @Override public void onClick(View v) { accountType = (AccountType) v.getTag(); if (GCPreferenceUtil.get().hasAccountId(accountType)) { accountClicked(GCPreferenceUtil.get().getAccountId(accountType), accountType.getName()); } else { GCAccountStore.getInstance(getApplicationContext()).startAuthenticationActivity( ChooseServiceActivity.this, accountType, Constants.PERMISSIONS_SCOPE, Constants.CALLBACK_URL, Constants.CLIENT_ID, Constants.CLIENT_SECRET); } } } private final class AccountsCallback implements GCHttpCallback<GCAccountsCollection> { @Override public void onSuccess(GCAccountsCollection responseData) { if (accountType == null) { return; } for (GCAccountModel accountModel : responseData) { if (accountModel.getType().equalsIgnoreCase(accountType.getName())) { GCPreferenceUtil.get().setNameForAccount(accountType, accountModel.getUser().getName()); GCPreferenceUtil.get().setIdForAccount(accountType, accountModel.getId()); accountClicked(accountModel.getId(), accountType.getName()); } } } @Override public void onHttpException(GCHttpRequestParameters params, Throwable exception) { } @Override public void onHttpError(int responseCode, String statusMessage) { } @Override public void onParserException(int responseCode, Throwable exception) { } } public void accountClicked(String accountId, String accountName) { AlbumsActivityIntentWrapper wrapper = new AlbumsActivityIntentWrapper( ChooseServiceActivity.this); wrapper.setMultiPicker(ppWrapper.getIsMultiPicker()); wrapper.setAccountId(accountId); wrapper.setAccountName(accountName); wrapper.startActivity(ChooseServiceActivity.this); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (resultCode == Activity.RESULT_OK) { if (requestCode == GCAccountStore.AUTHENTICATION_REQUEST_CODE) { GCAccounts.all(getApplicationContext(), new AccountsCallback()).executeAsync(); } if (requestCode == PhotosIntentWrapper.ACTIVITY_FOR_RESULT_STREAM_KEY) { finish(); } else if (requestCode == Constants.CAMERA_PIC_REQUEST) { // Bitmap image = (Bitmap) data.getExtras().get("data"); String path = ""; File tempFile = AppUtil.getTempFile(getApplicationContext()); if (AppUtil.hasImageCaptureBug() == false && tempFile.length() > 0) { Uri u; try { u = Uri.parse(android.provider.MediaStore.Images.Media.insertImage( getContentResolver(), tempFile.getAbsolutePath(), null, null)); tempFile.delete(); path =Uri.fromFile(new File(convertMediaUriToPath(u))).toString(); } catch (FileNotFoundException e) { Log.d(TAG, "", e); } } else { Log.e(TAG, "Bug " + data.getData().getPath()); path = Uri.fromFile( new File(AppUtil.getPath(getApplicationContext(), data.getData()))) .toString(); } Log.d(TAG, path); final GCAccountMediaModel model = new GCAccountMediaModel(); model.setLargeUrl(path); model.setThumbUrl(path); model.setUrl(path); IntentUtil.deliverDataToInitialActivity(this, model, ppWrapper.getChuteId()); } } } protected String convertMediaUriToPath(Uri uri) { String[] proj = { MediaStore.Images.Media.DATA }; Cursor cursor = getContentResolver().query(uri, proj, null, null, null); int column_index = cursor.getColumnIndexOrThrow(MediaStore.Images.Media.DATA); cursor.moveToFirst(); String path = cursor.getString(column_index); cursor.close(); return path; } private class OnCameraClickListener implements OnClickListener { @Override public void onClick(View v) { if (!getPackageManager().hasSystemFeature(PackageManager.FEATURE_CAMERA)) { NotificationUtil.makeToast(getApplicationContext(), R.string.toast_feature_camera); return; } final Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE); if (AppUtil.hasImageCaptureBug() == false) { intent.putExtra(MediaStore.EXTRA_OUTPUT, Uri.fromFile(AppUtil.getTempFile(ChooseServiceActivity.this))); } else { intent.putExtra(android.provider.MediaStore.EXTRA_OUTPUT, android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI); } startActivityForResult(intent, Constants.CAMERA_PIC_REQUEST); } } private final class OnPhotoStreamListener implements OnClickListener { @Override public void onClick(View v) { final PhotosIntentWrapper wrapper = new PhotosIntentWrapper(ChooseServiceActivity.this); wrapper.setFilterType(PhotosIntentWrapper.TYPE_ALL_PHOTOS); wrapper.setMultiPicker(ppWrapper.getIsMultiPicker()); wrapper.setChuteId(ppWrapper.getChuteId()); wrapper.startActivityForResult(ChooseServiceActivity.this, PhotosIntentWrapper.ACTIVITY_FOR_RESULT_STREAM_KEY); } } private final class OnCameraRollListener implements OnClickListener { @Override public void onClick(View v) { final PhotosIntentWrapper wrapper = new PhotosIntentWrapper(ChooseServiceActivity.this); wrapper.setMultiPicker(ppWrapper.getIsMultiPicker()); wrapper.setFilterType(PhotosIntentWrapper.TYPE_CAMERA_ROLL); wrapper.setChuteId(ppWrapper.getChuteId()); wrapper.startActivityForResult(ChooseServiceActivity.this, PhotosIntentWrapper.ACTIVITY_FOR_RESULT_STREAM_KEY); } } private final class OnLastPhotoClickListener implements OnClickListener { @Override public void onClick(View v) { Uri uri = MediaDAO.getLastPhotoFromCameraPhotos(getApplicationContext()); if (uri != null) { final GCAccountMediaModel model = new GCAccountMediaModel(); model.setLargeUrl(uri.toString()); model.setThumbUrl(uri.toString()); model.setUrl(uri.toString()); IntentUtil.deliverDataToInitialActivity(ChooseServiceActivity.this, model, ppWrapper.getChuteId()); } } } @Override protected void onNewIntent(Intent intent) { super.onNewIntent(intent); setResult(Activity.RESULT_OK, new Intent().putExtras(intent.getExtras())); finish(); } @Override protected void onResume() { super.onResume(); if (GCPreferenceUtil.get().hasAccountId(AccountType.PICASA)) { if (GCPreferenceUtil.get().hasAccountName(AccountType.PICASA)) { txtPicasa.setText(GCPreferenceUtil.get().getAccountName(AccountType.PICASA)); } } if (GCPreferenceUtil.get().hasAccountId(AccountType.FACEBOOK)) { if (GCPreferenceUtil.get().hasAccountName(AccountType.FACEBOOK)) { txtFacebook.setText(GCPreferenceUtil.get().getAccountName(AccountType.FACEBOOK)); } } if (GCPreferenceUtil.get().hasAccountId(AccountType.FLICKR)) { if (GCPreferenceUtil.get().hasAccountName(AccountType.FLICKR)) { txtFlickr.setText(GCPreferenceUtil.get().getAccountName(AccountType.FLICKR)); } } if (GCPreferenceUtil.get().hasAccountId(AccountType.INSTAGRAM)) { if (GCPreferenceUtil.get().hasAccountName(AccountType.INSTAGRAM)) { txtInstagram.setText(GCPreferenceUtil.get().getAccountName(AccountType.INSTAGRAM)); } } } }
fixed take photo return url
Android/ChutePhotoPicker+/src/com/chute/android/photopickerplus/app/ChooseServiceActivity.java
fixed take photo return url
<ide><path>ndroid/ChutePhotoPicker+/src/com/chute/android/photopickerplus/app/ChooseServiceActivity.java <ide> u = Uri.parse(android.provider.MediaStore.Images.Media.insertImage( <ide> getContentResolver(), tempFile.getAbsolutePath(), null, null)); <ide> tempFile.delete(); <del> path =Uri.fromFile(new File(convertMediaUriToPath(u))).toString(); <add> path = Uri.fromFile(new File(convertMediaUriToPath(u))).toString(); <ide> } catch (FileNotFoundException e) { <ide> Log.d(TAG, "", e); <ide> }
Java
mit
285dbc161c3f4e00d5355d305901a28788c49814
0
takayanagi2087/dataforms,takayanagi2087/dataforms,takayanagi2087/dataforms
package dataforms.dao.sqlgen.oracle; import java.sql.Connection; import dataforms.annotation.SqlGeneratorImpl; import dataforms.dao.Query; import dataforms.dao.QueryPager; import dataforms.dao.Table; import dataforms.dao.sqldatatype.SqlBigint; import dataforms.dao.sqldatatype.SqlChar; import dataforms.dao.sqldatatype.SqlInteger; import dataforms.dao.sqldatatype.SqlNumeric; import dataforms.dao.sqldatatype.SqlSmallint; import dataforms.dao.sqldatatype.SqlTime; import dataforms.dao.sqldatatype.SqlVarchar; import dataforms.dao.sqlgen.SqlGenerator; import dataforms.dao.sqlgen.SqlParser; import dataforms.field.base.Field; import dataforms.util.StringUtil; /** * Oracle用SQL Generator。 * */ @SqlGeneratorImpl(databaseProductName = OracleSqlGenerator.DATABASE_PRODUCT_NAME) public class OracleSqlGenerator extends SqlGenerator { /** * データベースシステムの名称。 */ public static final String DATABASE_PRODUCT_NAME = "Oracle"; /** * コンストラクタ. * @param conn JDBC接続情報. */ public OracleSqlGenerator(final Connection conn) { super(conn); } @Override public String getDatabaseProductName() { return DATABASE_PRODUCT_NAME; } @Override public SqlParser newSqlParser(final String sql) { return new OracleSqlParser(sql); } /** * {@inheritDoc} * シーケンスをサポートしているのでtrueを返します。 */ @Override public boolean isSequenceSupported() { return true; } /** * {@inheritDoc} * nullを返します。 */ @Override public String generateAdjustSequenceSql(final Table table) throws Exception { return null; } /** * {@inheritDoc} * テーブル情報を取得するときには、DatabaseMetadataに対し、大文字のテーブル名を渡す必要があるので * テーブル名を大文字に変換します。 */ @Override public String convertTableNameForDatabaseMetaData(final String tblname) { return tblname.toUpperCase(); } /** * {@inheritDoc} */ @Override public String converTypeNameForDatabaseMetaData(final String type) { String ret = super.converTypeNameForDatabaseMetaData(type); if ("varchar2".equals(ret)) { return "varchar"; } else if ("char".equals(ret)) { return "char"; } else if ("float".equals(ret)) { return "real"; } else if (ret.indexOf("timestamp") == 0) { return "timestamp"; } return ret; } /** * {@inheritDoc} * <pre> * SqlClobの実装クラスはtext型のカラムを作成します。 * SqlBlobの実装クラスはbytea型のカラムを作成します。 * </pre> */ @Override public String getDatabaseType(final Field<?> field) { String type = field.getDbDependentType(DATABASE_PRODUCT_NAME); if (type != null) { return type; } String ret = ""; if (field instanceof SqlVarchar) { ret = "nvarchar2(" + field.getLength() + ")"; } else if (field instanceof SqlChar) { ret = "nchar(" + field.getLength() + ")"; } else if (field instanceof SqlSmallint) { ret = "number(38,0)"; } else if (field instanceof SqlBigint) { ret = "number(38,0)"; } else if (field instanceof SqlInteger) { ret = "number(38,0)"; } else if (field instanceof SqlNumeric) { SqlNumeric nf = (SqlNumeric) field; ret = "number(" + nf.getPrecision() + "," + nf.getScale() + ")"; } else if (field instanceof SqlTime) { ret = "timestamp"; } else { return super.getDatabaseType(field); } if (field.isNotNull()) { ret += " not null"; } else { //ret += " null"; } return ret; } /** * {@inheritDoc} * postgresqlは標準的なcomment文をサポートするので、COMMENTを返します。 * */ @Override protected CommentSyntax getCommentSyntax() { return SqlGenerator.CommentSyntax.COMMENT; } /** * {@inheritDoc} * pg_stat_user_tablesにテーブルが登録されているか確認するSQLを作成します。 */ @Override public String generateTableExistsSql() { String sql = "select count(*) as table_exists from cat where LOWER(table_name) = :table_name"; return sql; } /** * {@inheritDoc} * information_schema.sequencesにシーケンスが登録されているか確認するSQLを作成します。 */ @Override public String generateSequenceExistsSql() { String sql = "select count(*) as SEQUENCE_EXISTS from user_sequences where LOWER(sequence_name)=:sequence_name"; return sql; } @Override public String generateCreateSequenceSql(final String seqname, final Long startValue) throws Exception { String ret = "create sequence " + seqname + " start with " + startValue + " minvalue 0"; return ret; } @Override public String generateGetRecordIdSql(final String tablename) throws Exception { return "select " + tablename + "_seq.nextval as seq from dual"; } @Override public String generateSysTimestampSql() { return "current_timestamp"; } @Override public String generateGetPageSql(final QueryPager qp) { String orgsql = this.getOrgSql(qp); // String sql = "select * from (select row_number() over() as row_no, m.* from (" + orgsql + ") as m) as m where (:row_from + 1) <= m.row_no and m.row_no <= (:row_to + 1)"; String sql = "select * from (select rownum as row_no, m.* from (" + orgsql + ") m) m where (:row_from + 1) <= m.row_no and m.row_no <= (:row_to + 1)"; return sql; } @Override protected String getAsAliasSql() { return " "; } /** * レコード数をカウントするsqlを作成します。 * @param query 問い合わせ。 * @return レコード数をカウントするsql。 */ @Override public String generateHitCountSql(final Query query) { String orgsql = this.generateQuerySql(query, true); String sql = "select count(*) as cnt from (" + orgsql + ") m"; return sql; } /** * レコード数をカウントするsqlを作成します。 * @param qp QueryPager・ * @return レコード数をカウントするsql。 */ @Override public String generateHitCountSql(final QueryPager qp) { String orgsql = getOrgSql(qp); String sql = "select count(*) as cnt from (" + orgsql + ")"; return sql; } /** * レコード数をカウントするsqlを作成します。 * @param orgsql SQL。 * @return レコード数をカウントするsql。 */ @Override public String generateHitCountSql(final String orgsql) { String sql = "select count(*) as cnt from (" + orgsql + ") m"; return sql; } /** * BLOB等のファイルフィールドの更新用SQLを作成します。 * <pre> * 既にファイルが登録されており、ファイルが送信されない場合は、そのままの値を保持するSQLを生成します。 * </pre> * @param id フィールドID. * @return SQL. */ @Override protected String generateUpdateFileFieldSql(final String id) { String pid = StringUtil.camelToSnake(id); // String ret = "case when TO_CHAR(:" + pid + "_kf) = TO_CHAR('1') then " + pid + " else :" + pid + " end "; String ret = "decode(:" + pid + "_kf, '1', " + pid + ",:" + pid + ")"; return ret; } }
src/dataforms/dao/sqlgen/oracle/OracleSqlGenerator.java
package dataforms.dao.sqlgen.oracle; import java.sql.Connection; import dataforms.annotation.SqlGeneratorImpl; import dataforms.dao.Query; import dataforms.dao.QueryPager; import dataforms.dao.Table; import dataforms.dao.sqldatatype.SqlBigint; import dataforms.dao.sqldatatype.SqlChar; import dataforms.dao.sqldatatype.SqlInteger; import dataforms.dao.sqldatatype.SqlNumeric; import dataforms.dao.sqldatatype.SqlSmallint; import dataforms.dao.sqldatatype.SqlTime; import dataforms.dao.sqldatatype.SqlVarchar; import dataforms.dao.sqlgen.SqlGenerator; import dataforms.dao.sqlgen.SqlParser; import dataforms.field.base.Field; import dataforms.util.StringUtil; /** * Oracle用SQL Generator。 * */ @SqlGeneratorImpl(databaseProductName = OracleSqlGenerator.DATABASE_PRODUCT_NAME) public class OracleSqlGenerator extends SqlGenerator { /** * データベースシステムの名称。 */ public static final String DATABASE_PRODUCT_NAME = "Oracle"; /** * コンストラクタ. * @param conn JDBC接続情報. */ public OracleSqlGenerator(final Connection conn) { super(conn); } @Override public String getDatabaseProductName() { return DATABASE_PRODUCT_NAME; } @Override public SqlParser newSqlParser(final String sql) { return new OracleSqlParser(sql); } /** * {@inheritDoc} * シーケンスをサポートしているのでtrueを返します。 */ @Override public boolean isSequenceSupported() { return true; } /** * {@inheritDoc} * nullを返します。 */ @Override public String generateAdjustSequenceSql(final Table table) throws Exception { return null; } /** * {@inheritDoc} * テーブル情報を取得するときには、DatabaseMetadataに対し、大文字のテーブル名を渡す必要があるので * テーブル名を大文字に変換します。 */ @Override public String convertTableNameForDatabaseMetaData(final String tblname) { return tblname.toUpperCase(); } /** * {@inheritDoc} */ @Override public String converTypeNameForDatabaseMetaData(final String type) { String ret = super.converTypeNameForDatabaseMetaData(type); if ("varchar2".equals(ret)) { return "varchar"; } else if ("char".equals(ret)) { return "char"; } else if ("float".equals(ret)) { return "real"; } else if (ret.indexOf("timestamp") == 0) { return "timestamp"; } return ret; } /** * {@inheritDoc} * <pre> * SqlClobの実装クラスはtext型のカラムを作成します。 * SqlBlobの実装クラスはbytea型のカラムを作成します。 * </pre> */ @Override public String getDatabaseType(final Field<?> field) { String type = field.getDbDependentType(DATABASE_PRODUCT_NAME); if (type != null) { return type; } String ret = ""; if (field instanceof SqlVarchar) { ret = "nvarchar2(" + field.getLength() + ")"; } else if (field instanceof SqlChar) { ret = "nchar(" + field.getLength() + ")"; } else if (field instanceof SqlSmallint) { ret = "number(38,0)"; } else if (field instanceof SqlBigint) { ret = "number(38,0)"; } else if (field instanceof SqlInteger) { ret = "number(38,0)"; } else if (field instanceof SqlNumeric) { SqlNumeric nf = (SqlNumeric) field; ret = "number(" + nf.getPrecision() + "," + nf.getScale() + ")"; } else if (field instanceof SqlTime) { ret = "timestamp"; } else { return super.getDatabaseType(field); } if (field.isNotNull()) { ret += " not null"; } else { //ret += " null"; } return ret; } /** * {@inheritDoc} * postgresqlは標準的なcomment文をサポートするので、COMMENTを返します。 * */ @Override protected CommentSyntax getCommentSyntax() { return SqlGenerator.CommentSyntax.COMMENT; } /** * {@inheritDoc} * pg_stat_user_tablesにテーブルが登録されているか確認するSQLを作成します。 */ @Override public String generateTableExistsSql() { String sql = "select count(*) as table_exists from cat where LOWER(table_name) = :table_name"; return sql; } /** * {@inheritDoc} * information_schema.sequencesにシーケンスが登録されているか確認するSQLを作成します。 */ @Override public String generateSequenceExistsSql() { String sql = "select count(*) as SEQUENCE_EXISTS from user_sequences where LOWER(sequence_name)=:sequence_name"; return sql; } @Override public String generateCreateSequenceSql(final String seqname, final Long startValue) throws Exception { String ret = "create sequence " + seqname + " start with " + startValue + " minvalue 0"; return ret; } @Override public String generateGetRecordIdSql(final String tablename) throws Exception { return "select " + tablename + "_seq.nextval as seq from dual"; } @Override public String generateSysTimestampSql() { return "current_timestamp"; } @Override public String generateGetPageSql(final QueryPager qp) { String orgsql = this.getOrgSql(qp); // String sql = "select * from (select row_number() over() as row_no, m.* from (" + orgsql + ") as m) as m where (:row_from + 1) <= m.row_no and m.row_no <= (:row_to + 1)"; String sql = "select * from (select rownum as row_no, m.* from (" + orgsql + ") m) m where (:row_from + 1) <= m.row_no and m.row_no <= (:row_to + 1)"; return sql; } @Override protected String getAsAliasSql() { return " "; } /** * レコード数をカウントするsqlを作成します。 * @param query 問い合わせ。 * @return レコード数をカウントするsql。 */ @Override public String generateHitCountSql(final Query query) { String orgsql = this.generateQuerySql(query, true); String sql = "select count(*) as cnt from (" + orgsql + ") m"; return sql; } /** * レコード数をカウントするsqlを作成します。 * @param qp QueryPager・ * @return レコード数をカウントするsql。 */ @Override public String generateHitCountSql(final QueryPager qp) { String orgsql = getOrgSql(qp); String sql = "select count(*) as cnt from (" + orgsql + ")"; return sql; } /** * BLOB等のファイルフィールドの更新用SQLを作成します。 * <pre> * 既にファイルが登録されており、ファイルが送信されない場合は、そのままの値を保持するSQLを生成します。 * </pre> * @param id フィールドID. * @return SQL. */ @Override protected String generateUpdateFileFieldSql(final String id) { String pid = StringUtil.camelToSnake(id); // String ret = "case when TO_CHAR(:" + pid + "_kf) = TO_CHAR('1') then " + pid + " else :" + pid + " end "; String ret = "decode(:" + pid + "_kf, '1', " + pid + ",:" + pid + ")"; return ret; } }
Oracleで問合せ実行が動作しない不具合を修正。
src/dataforms/dao/sqlgen/oracle/OracleSqlGenerator.java
Oracleで問合せ実行が動作しない不具合を修正。
<ide><path>rc/dataforms/dao/sqlgen/oracle/OracleSqlGenerator.java <ide> String sql = "select count(*) as cnt from (" + orgsql + ")"; <ide> return sql; <ide> } <add> <add> /** <add> * レコード数をカウントするsqlを作成します。 <add> * @param orgsql SQL。 <add> * @return レコード数をカウントするsql。 <add> */ <add> @Override <add> public String generateHitCountSql(final String orgsql) { <add> String sql = "select count(*) as cnt from (" + orgsql + ") m"; <add> return sql; <add> } <ide> <ide> /** <ide> * BLOB等のファイルフィールドの更新用SQLを作成します。
Java
lgpl-2.1
c13cbd150bfd64359b8418e88b5f9e8273dd6de9
0
biojava/biojava,JolantaWojcik/biojavaOwn,zachcp/biojava,biojava/biojava,fionakim/biojava,pwrose/biojava,biojava/biojava,paolopavan/biojava,lafita/biojava,sbliven/biojava-sbliven,lafita/biojava,heuermh/biojava,heuermh/biojava,emckee2006/biojava,pwrose/biojava,sbliven/biojava-sbliven,emckee2006/biojava,JolantaWojcik/biojavaOwn,fionakim/biojava,andreasprlic/biojava,paolopavan/biojava,pwrose/biojava,paolopavan/biojava,fionakim/biojava,andreasprlic/biojava,emckee2006/biojava,sbliven/biojava-sbliven,heuermh/biojava,zachcp/biojava,andreasprlic/biojava,lafita/biojava,zachcp/biojava,andreasprlic/biojava
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * * Created on Jan 4, 2006 * */ package org.biojava.bio.structure; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.biojava.bio.structure.align.util.AtomCache; import org.biojava.bio.structure.io.PDBFileParser; import org.biojava.bio.structure.io.mmcif.chem.PolymerType; import org.biojava.bio.structure.io.mmcif.chem.ResidueType; import org.biojava.bio.structure.io.mmcif.model.ChemComp; /** * A class that provides some tool methods. * * @author Andreas Prlic, Jules Jacobsen * @since 1.0 * @version %I% %G% */ public class StructureTools { /** The Atom name of C-alpha atoms. * */ public static final String caAtomName = " CA "; public static final String nAtomName = "N"; public static final String oAtomName = "O"; public static final String cbAtomName = "CB"; /** The names of the Atoms that form the backbone. * */ public static final String[] backboneAtomNames = {nAtomName,caAtomName,"C",oAtomName, cbAtomName}; public static final Character UNKNOWN_GROUP_LABEL = new Character('x');; //private static final String insertionCodeRegExp = "([0-9]+)([a-zA-Z]*)"; //private static final Pattern insertionCodePattern = Pattern.compile(insertionCodeRegExp); // there is a file format change in PDB 3.0 and nucleotides are being renamed static private Map<String, Integer> nucleotides30 ; static private Map<String, Integer> nucleotides23 ; //amino acid 3 and 1 letter code definitions private static final Map<String, Character> aminoAcids; private static final Set<Element> hBondDonorAcceptors; // // for conversion 3code 1code // private static SymbolTokenization threeLetter ; // private static SymbolTokenization oneLetter ; public static Logger logger = Logger.getLogger("org.biojava.bio.structure"); /** * Pattern to describe subranges. Matches "A", "A:", "A:7-53","A_7-53", etc. * @see #getSubRanges(Structure, String) */ public static final Pattern pdbNumRangeRegex = Pattern.compile( "^\\s*(\\w)" + //chain ID "(?:" + //begin range, this is a "non-capturing group" "(?::|_|:$|_$|$)" + //colon or underscore, could be at the end of a line, another non-capt. group. "(?:"+ // another non capturing group for the residue range "([-+]?[0-9]+[A-Za-z]?)" + // first residue "\\s*-\\s*" + // - "([-+]?[0-9]+[A-Za-z]?)" + // second residue ")?+"+ ")?" + //end range "\\s*"); static { nucleotides30 = new HashMap<String,Integer>(); nucleotides30.put("DA",1); nucleotides30.put("DC",1); nucleotides30.put("DG",1); nucleotides30.put("DT",1); nucleotides30.put("DI",1); nucleotides30.put("A",1); nucleotides30.put("G",1); nucleotides30.put("C",1); nucleotides30.put("U",1); nucleotides30.put("I",1); //TODO: check if they are always HETATMs, in that case this will not be necessary // the DNA linkers - the +C , +G, +A +T +U and +I have been replaced with these: nucleotides30.put("TAF",1); // 2'-DEOXY-2'-FLUORO-ARABINO-FURANOSYL THYMINE-5'-PHOSPHATE nucleotides30.put("TC1",1); // 3-(5-PHOSPHO-2-DEOXY-BETA-D-RIBOFURANOSYL)-2-OXO-1,3-DIAZA-PHENOTHIAZINE nucleotides30.put("TFE",1); // 2'-O-[2-(TRIFLUORO)ETHYL] THYMIDINE-5'-MONOPHOSPHATE nucleotides30.put("TFO",1); // [2-(6-AMINO-9H-PURIN-9-YL)-1-METHYLETHOXY]METHYLPHOSPHONIC ACID" nucleotides30.put("TGP",1); // 5'-THIO-2'-DEOXY-GUANOSINE PHOSPHONIC ACID nucleotides30.put("THX",1); // PHOSPHONIC ACID 6-({6-[6-(6-CARBAMOYL-3,6,7,8-TETRAHYDRO-3,6-DIAZA-AS-INDACENE-2-CARBONYL)-3,6,7,8-TETRAHYDRO-3,6-DIAZA-AS-INDOCENE-2-CARBONYL]-3,6,7,8-TETRAHYDRO-3,6-DIAZA-AS-INDACENE-2-CARBONL}-AMINO)-HEXYL ESTER 5-(5-METHYL-2,4-DIOXO-3,4-DIHYDRO-2H-PYRIMIDIN-1-YL)-TETRAHYDRO-FURAN-2-YLMETHYL ESTER nucleotides30.put("TLC",1); // 2-O,3-ETHDIYL-ARABINOFURANOSYL-THYMINE-5'-MONOPHOSPHATE nucleotides30.put("TLN",1); // [(1R,3R,4R,7S)-7-HYDROXY-3-(THYMIN-1-YL)-2,5-DIOXABICYCLO[2.2.1]HEPT-1-YL]METHYL DIHYDROGEN PHOSPHATE" nucleotides30.put("TP1",1); // 2-(METHYLAMINO)-ETHYLGLYCINE-CARBONYLMETHYLENE-THYMINE nucleotides30.put("TPC",1); // 5'-THIO-2'-DEOXY-CYTOSINE PHOSPHONIC ACID nucleotides30.put("TPN",1); // 2-AMINOETHYLGLYCINE-CARBONYLMETHYLENE-THYMINE // store nucleic acids (C, G, A, T, U, and I), and // the modified versions of nucleic acids (+C, +G, +A, +T, +U, and +I), and nucleotides23 = new HashMap<String,Integer>(); String[] names = {"C","G","A","T","U","I","+C","+G","+A","+T","+U","+I"}; for (int i = 0; i < names.length; i++) { String n = names[i]; nucleotides23.put(n,1); } aminoAcids = new HashMap<String, Character>(); aminoAcids.put("GLY", new Character('G')); aminoAcids.put("ALA", new Character('A')); aminoAcids.put("VAL", new Character('V')); aminoAcids.put("LEU", new Character('L')); aminoAcids.put("ILE", new Character('I')); aminoAcids.put("PHE", new Character('F')); aminoAcids.put("TYR", new Character('Y')); aminoAcids.put("TRP", new Character('W')); aminoAcids.put("PRO", new Character('P')); aminoAcids.put("HIS", new Character('H')); aminoAcids.put("LYS", new Character('K')); aminoAcids.put("ARG", new Character('R')); aminoAcids.put("SER", new Character('S')); aminoAcids.put("THR", new Character('T')); aminoAcids.put("GLU", new Character('E')); aminoAcids.put("GLN", new Character('Q')); aminoAcids.put("ASP", new Character('D')); aminoAcids.put("ASN", new Character('N')); aminoAcids.put("CYS", new Character('C')); aminoAcids.put("MET", new Character('M')); //MSE is only found as a molecular replacement for MET aminoAcids.put("MSE", new Character('M')); //'non-standard', genetically encoded //http://www.chem.qmul.ac.uk/iubmb/newsletter/1999/item3.html //IUBMB recommended name is 'SEC' but the wwPDB currently use 'CSE' //likewise 'PYL' (IUBMB) and 'PYH' (PDB) aminoAcids.put("CSE", new Character('U')); aminoAcids.put("SEC", new Character('U')); aminoAcids.put("PYH", new Character('O')); aminoAcids.put("PYL", new Character('O')); hBondDonorAcceptors = new HashSet<Element>(); hBondDonorAcceptors.add(Element.N); hBondDonorAcceptors.add(Element.O); hBondDonorAcceptors.add(Element.S); } /** Count how many number of Atoms are contained within a Structure object. * * @param s the structure object * @return the number of Atoms in this Structure */ public static final int getNrAtoms(Structure s){ int nrAtoms = 0; Iterator<Group> iter = new GroupIterator(s); while ( iter.hasNext()){ Group g = (Group) iter.next(); nrAtoms += g.size(); } return nrAtoms; } /** Count how many groups are contained within a structure object. * * @param s the structure object * @return the number of groups in the structure */ public static final int getNrGroups(Structure s){ int nrGroups = 0; List<Chain> chains = s.getChains(0); Iterator<Chain> iter = chains.iterator(); while (iter.hasNext()){ Chain c = (Chain) iter.next(); nrGroups += c.getAtomLength(); } return nrGroups; } /** Returns an array of the requested Atoms from the Structure object. Iterates over all groups * and checks if the requested atoms are in this group, no matter if this is a {@link AminoAcid} or {@link HetatomImpl} group. * For structures with more than one model, only model 0 will be used. * * @param s the structure to get the atoms from * * @param atomNames contains the atom names to be used. * @return an Atom[] array */ public static final Atom[] getAtomArray(Structure s, String[] atomNames){ List<Chain> chains = s.getModel(0); List<Atom> atoms = new ArrayList<Atom>(); extractCAatoms(atomNames, chains, atoms); return (Atom[]) atoms.toArray(new Atom[atoms.size()]); } /** Returns an array of the requested Atoms from the Structure object. * In contrast to {@link #getAtomArray(Structure, String[])} this method iterates over all chains. * Iterates over all chains and groups * and checks if the requested atoms are in this group, no matter if this is a {@link AminoAcid} or {@link HetatomImpl} group. * For structures with more than one model, only model 0 will be used. * * @param s the structure to get the atoms from * * @param atomNames contains the atom names to be used. * @return an Atom[] array */ public static final Atom[] getAtomArrayAllModels(Structure s, String[] atomNames){ List<Atom> atoms = new ArrayList<Atom>(); for (int i =0 ; i < s.nrModels(); i++ ) { List<Chain> chains = s.getModel(i); extractCAatoms(atomNames, chains, atoms); } return (Atom[]) atoms.toArray(new Atom[atoms.size()]); } /** Convert all atoms of the structure (first model) into an Atom array * * @param s input structure * @return all atom array */ public static final Atom[] getAllAtomArray(Structure s) { List<Atom> atoms = new ArrayList<Atom>(); AtomIterator iter = new AtomIterator(s); while (iter.hasNext()){ Atom a = iter.next(); atoms.add(a); } return (Atom[]) atoms.toArray(new Atom[atoms.size()]); } private static void extractCAatoms(String[] atomNames, List<Chain> chains, List<Atom> atoms) { for ( Chain c : chains) { for ( Group g : c.getAtomGroups()) { // a temp container for the atoms of this group List<Atom> thisGroupAtoms = new ArrayList<Atom>(); // flag to check if this group contains all the requested atoms. boolean thisGroupAllAtoms = true; for ( int i = 0 ; i < atomNames.length; i++){ String atomName = atomNames[i]; try { Atom a = g.getAtom(atomName); thisGroupAtoms.add(a); } catch (StructureException e){ // this group does not have a required atom, skip it... thisGroupAllAtoms = false; break; } } if ( thisGroupAllAtoms){ // add the atoms of this group to the array. Iterator<Atom> aIter = thisGroupAtoms.iterator(); while(aIter.hasNext()){ Atom a = (Atom) aIter.next(); atoms.add(a); } } } } } /** Returns an array of the requested Atoms from the Structure object. Iterates over all groups * and checks if the requested atoms are in this group, no matter if this is a AminoAcid or Hetatom group. * * * @param c the Chain to get the atoms from * * @param atomNames contains the atom names to be used. * @return an Atom[] array */ public static final Atom[] getAtomArray(Chain c, String[] atomNames){ List<Group> groups = c.getAtomGroups(); List<Atom> atoms = new ArrayList<Atom>(); for (Group g : groups){ // a temp container for the atoms of this group List<Atom> thisGroupAtoms = new ArrayList<Atom>(); // flag to check if this group contains all the requested atoms. boolean thisGroupAllAtoms = true; for ( int i = 0 ; i < atomNames.length; i++){ String atomName = atomNames[i]; try { Atom a = g.getAtom(atomName); thisGroupAtoms.add(a); } catch (StructureException e){ // this group does not have a required atom, skip it... thisGroupAllAtoms = false; break; } } if ( thisGroupAllAtoms){ // add the atoms of this group to the array. Iterator<Atom> aIter = thisGroupAtoms.iterator(); while(aIter.hasNext()){ Atom a = (Atom) aIter.next(); atoms.add(a); } } } return (Atom[]) atoms.toArray(new Atom[atoms.size()]); } /** Returns an Atom array of the CA atoms. * @param c the structure object * @return an Atom[] array */ public static final Atom[] getAtomCAArray(Chain c){ String[] atomNames = {" CA " }; return getAtomArray(c,atomNames); } /** Provides an equivalent copy of Atoms in a new array. Clones everything, starting with parent * groups and chains. The chain will only contain groups that are part of the CA array. * * @param ca array of CA atoms * @return Atom array */ public static final Atom[] cloneCAArray(Atom[] ca) throws StructureException{ Atom[] newCA = new Atom[ca.length]; List<Chain> model = new ArrayList<Chain>(); int apos = -1; for(Atom a: ca){ apos++; Group parentG = a.getGroup(); Chain parentC = parentG.getChain(); Chain newChain = null; for ( Chain c : model){ if ( c.getChainID().equals(parentC.getChainID())){ newChain = c; break; } } if ( newChain == null){ newChain = new ChainImpl(); newChain.setChainID(parentC.getChainID()); model.add(newChain); } Group parentN = (Group)parentG.clone(); newCA[apos] = parentN.getAtom(" CA "); newChain.addGroup(parentN); } return newCA; } /** Clone a set of CA Atoms, but returns the parent groups * * @param ca Atom array * @return Group array */ public static Group[] cloneGroups(Atom[] ca) { Group[] newGroup = new Group[ca.length]; List<Chain> model = new ArrayList<Chain>(); int apos = -1; for(Atom a: ca){ apos++; Group parentG = a.getGroup(); Chain parentC = parentG.getChain(); Chain newChain = null; for ( Chain c : model){ if ( c.getChainID().equals(parentC.getChainID())){ newChain = c; break; } } if ( newChain == null){ newChain = new ChainImpl(); newChain.setChainID(parentC.getChainID()); model.add(newChain); } Group ng = (Group)parentG.clone(); newGroup[apos] = ng; newChain.addGroup(ng); } return newGroup; } /** Utility method for working with circular permutations. Creates a duplicated and cloned set of Calpha atoms from the input array. * * @param ca2 atom array * @return cloned and duplicated set of input array * @throws StructureException */ public static Atom[] duplicateCA2(Atom[] ca2) throws StructureException{ // we don't want to rotate input atoms, do we? Atom[] ca2clone = new Atom[ca2.length*2]; int pos = 0; Chain c = null; String prevChainId = ""; for (Atom a : ca2){ Group g = (Group) a.getGroup().clone(); // works because each group has only a CA atom if (c == null ) { c = new ChainImpl(); Chain orig= a.getGroup().getChain(); c.setChainID(orig.getChainID()); } else { Chain orig= a.getGroup().getChain(); if ( ! orig.getChainID().equals(prevChainId)){ c = new ChainImpl(); c.setChainID(orig.getChainID()); } } c.addGroup(g); ca2clone[pos] = g.getAtom(StructureTools.caAtomName); pos++; } // Duplicate ca2! c = null; prevChainId = ""; for (Atom a : ca2){ Group g = (Group)a.getGroup().clone(); if (c == null ) { c = new ChainImpl(); Chain orig= a.getGroup().getChain(); c.setChainID(orig.getChainID()); } else { Chain orig= a.getGroup().getChain(); if ( ! orig.getChainID().equals(prevChainId)){ c = new ChainImpl(); c.setChainID(orig.getChainID()); } } c.addGroup(g); ca2clone[pos] = g.getAtom(StructureTools.caAtomName); pos++; } return ca2clone; } /** Returns an Atom array of the CA atoms. * @param s the structure object * @return an Atom[] array */ public static Atom[] getAtomCAArray(Structure s){ String[] atomNames = {" CA "}; return getAtomArray(s,atomNames); } /** Returns an Atom array of the MainChain atoms. * @param s the structure object * @return an Atom[] array */ public static Atom[] getBackboneAtomArray(Structure s){ String[] atomNames = backboneAtomNames; return getAtomArray(s,atomNames); } /** convert three character amino acid codes into single character * e.g. convert CYS to C * @return a character * @param code3 a three character amino acid representation String * @throws IllegalSymbolException */ public static final Character convert_3code_1code(String code3) throws UnknownPdbAminoAcidException { // { // Symbol sym = threeLetter.parseToken(code3) ; // String code1 = oneLetter.tokenizeSymbol(sym); // // return new Character(code1.charAt(0)) ; Character code1 = null; code1 = aminoAcids.get(code3); if (code1 == null) { throw new UnknownPdbAminoAcidException(code3 + " not a standard amino acid"); } else { return code1; } } /** convert a three letter code into single character. * catches for unusual characters * * @param groupCode3 three letter representation * @return null if group is a nucleotide code */ public static final Character get1LetterCode(String groupCode3){ Character aminoCode1 = null; try { // is it a standard amino acid ? aminoCode1 = convert_3code_1code(groupCode3); } catch (UnknownPdbAminoAcidException e){ // hm groupCode3 is not standard // perhaps it is an nucleotide? if ( isNucleotide(groupCode3) ) { //System.out.println("nucleotide, aminoCode1:"+aminoCode1); aminoCode1= null; } else { // does not seem to be so let's assume it is // nonstandard aminoacid and label it "X" //logger.warning("unknown group name "+groupCode3 ); aminoCode1 = UNKNOWN_GROUP_LABEL; } } return aminoCode1; } /* Test if the threelettercode of an ATOM entry corresponds to a * nucleotide or to an aminoacid. * @param a 3-character code for a group. * */ public static final boolean isNucleotide(String groupCode3){ String code = groupCode3.trim(); if ( nucleotides30.containsKey(code)){ return true; } if ( nucleotides23.containsKey(code)){ return true; } return false ; } /** Reduce a structure to provide a smaller representation . Only takes the first model of the structure. If chainId is provided only return a structure containing that Chain ID. * Converts lower case chain IDs to upper case if structure does not contain a chain with that ID. * * @param s * @param chainId * @return Structure * @since 3.0 */ @SuppressWarnings("deprecation") public static final Structure getReducedStructure(Structure s, String chainId) throws StructureException{ // since we deal here with structure alignments, // only use Model 1... Structure newS = new StructureImpl(); newS.setHeader(s.getHeader()); newS.setPDBCode(s.getPDBCode()); newS.setPDBHeader(s.getPDBHeader()); newS.setName(s.getName()); newS.setSSBonds(s.getSSBonds()); newS.setDBRefs(s.getDBRefs()); newS.setSites(s.getSites()); newS.setNmr(s.isNmr()); newS.setBiologicalAssembly(s.isBiologicalAssembly()); newS.setCompounds(s.getCompounds()); newS.setConnections(s.getConnections()); newS.setSSBonds(s.getSSBonds()); newS.setSites(s.getSites()); if ( chainId != null) chainId = chainId.trim(); if ( chainId == null || chainId.equals("")){ // only get model 0 List<Chain> model0 = s.getModel(0); for (Chain c : model0){ newS.addChain(c); } return newS; } Chain c = null; try { c = s.getChainByPDB(chainId); } catch (StructureException e){ System.err.println(e.getMessage() + " trying upper case Chain id..."); c = s.getChainByPDB(chainId.toUpperCase()); } if ( c != null) { newS.addChain(c); for ( Compound comp : s.getCompounds()){ if ( comp.getChainId().contains(c.getChainID())){ // found matching compound. set description... newS.getPDBHeader().setDescription("Chain " + c.getChainID() + " of " + s.getPDBCode() + " " + comp.getMolName()); } } } return newS; } /** Reduce a structure to provide a smaller representation. * Only takes the first model of the structure. If chainNr >=0 only takes * the chain at that position into account. * * @param s * @param chainNr can be -1 to request all chains of model 0, otherwise will only add chain at this position * @return Structure object * @since 3.0 */ @SuppressWarnings("deprecation") public static final Structure getReducedStructure(Structure s, int chainNr) throws StructureException{ // since we deal here with structure alignments, // only use Model 1... Structure newS = new StructureImpl(); newS.setHeader(s.getHeader()); newS.setPDBCode(s.getPDBCode()); newS.setPDBHeader(s.getPDBHeader()); newS.setName(s.getName()); newS.setSSBonds(s.getSSBonds()); newS.setDBRefs(s.getDBRefs()); newS.setSites(s.getSites()); newS.setNmr(s.isNmr()); newS.setBiologicalAssembly(s.isBiologicalAssembly()); newS.setCompounds(s.getCompounds()); newS.setConnections(s.getConnections()); newS.setSSBonds(s.getSSBonds()); newS.setSites(s.getSites()); newS.setCrystallographicInfo(s.getCrystallographicInfo()); newS.getPDBHeader().setDescription("subset of " + s.getPDBCode() + " " + s.getPDBHeader().getDescription() ); if ( chainNr < 0 ) { // only get model 0 List<Chain> model0 = s.getModel(0); for (Chain c : model0){ newS.addChain(c); } return newS; } Chain c = null; c = s.getChain(0, chainNr); newS.addChain(c); return newS; } /** In addition to the functionality provided by getReducedStructure also provides a way to specify sub-regions of a structure with the following * specification: * * * ranges can be surrounded by ( and ). (but will be removed). * ranges are specified as * PDBresnum1 : PDBresnum2 * * a list of ranges is separated by , * * Example * 4GCR (A:1-83) * 1CDG (A:407-495,A:582-686) * 1CDG (A_407-495,A_582-686) * * @param s The full structure * @param ranges A comma-seperated list of ranges, optionally surrounded by parentheses * @return Substructure of s specified by ranges */ @SuppressWarnings("deprecation") public static final Structure getSubRanges(Structure s, String ranges ) throws StructureException { Structure struc = getReducedStructure(s, null); if ( ranges == null || ranges.equals("")) throw new IllegalArgumentException("ranges can't be null or empty"); ranges = ranges.trim(); if ( ranges.startsWith("(")) ranges = ranges.substring(1); if ( ranges.endsWith(")")) { ranges = ranges.substring(0,ranges.length()-1); } //special case: '-' means 'everything' if ( ranges.equals("-") ) { return s; } Structure newS = new StructureImpl(); newS.setHeader(s.getHeader()); newS.setPDBCode(s.getPDBCode()); newS.setPDBHeader(s.getPDBHeader()); newS.setName(s.getName()); newS.setDBRefs(s.getDBRefs()); newS.setNmr(s.isNmr()); newS.setBiologicalAssembly(s.isBiologicalAssembly()); newS.getPDBHeader().setDescription("sub-range " + ranges + " of " + newS.getPDBCode() + " " + s.getPDBHeader().getDescription()); newS.setCrystallographicInfo(s.getCrystallographicInfo()); // TODO The following should be only copied for atoms which are present in the range. //newS.setCompounds(s.getCompounds()); //newS.setConnections(s.getConnections()); //newS.setSSBonds(s.getSSBonds()); //newS.setSites(s.getSites()); String[] rangS =ranges.split(","); StringWriter name = new StringWriter(); name.append(s.getName()); boolean firstRange = true; String prevChainId = null; // parse the ranges, adding the specified residues to newS for ( String r: rangS){ // Match a single range, eg "A_4-27" Matcher matcher = pdbNumRangeRegex.matcher(r); if( ! matcher.matches() ){ throw new StructureException("wrong range specification, should be provided as chainID_pdbResnum1-pdbRensum2: "+ranges); } String chainId = matcher.group(1); Chain chain; if(chainId.equals("_") && struc.size() == 1) { // Handle special case of "_" chain for single-chain proteins chain = struc.getChain(0); } else { // Explicit chain chain = struc.getChainByPDB(chainId); } Group[] groups; String pdbresnumStart = matcher.group(2); String pdbresnumEnd = matcher.group(3); if ( ! firstRange){ name.append( ","); } else { name.append(AtomCache.CHAIN_SPLIT_SYMBOL); } if( pdbresnumStart != null && pdbresnumEnd != null) { // not a full chain //since Java doesn't allow '+' before integers, fix this up. if(pdbresnumStart.charAt(0) == '+') pdbresnumStart = pdbresnumStart.substring(1); if(pdbresnumEnd.charAt(0) == '+') pdbresnumEnd = pdbresnumEnd.substring(1); groups = chain.getGroupsByPDB(pdbresnumStart, pdbresnumEnd); name.append( chainId + AtomCache.UNDERSCORE + pdbresnumStart+"-" + pdbresnumEnd); } else { // full chain groups = chain.getAtomGroups().toArray(new Group[chain.getAtomGroups().size()]); name.append(chainId); } firstRange = true; // Create new chain, if needed Chain c = null; if ( prevChainId == null) { // first chain... c = new ChainImpl(); c.setChainID(chain.getChainID()); newS.addChain(c); } else if ( prevChainId.equals(chain.getChainID())) { c = newS.getChainByPDB(prevChainId); } else { try { c = newS.getChainByPDB(chain.getChainID()); } catch (StructureException e){ // chain not in structure yet... c = new ChainImpl(); c.setChainID(chain.getChainID()); newS.addChain(c); } } // add the groups to the chain: for ( Group g: groups) { c.addGroup(g); } prevChainId = c.getChainID(); } newS.setName(name.toString()); return newS; } public static final String convertAtomsToSeq(Atom[] atoms) { StringBuffer buf = new StringBuffer(); Group prevGroup = null; for (Atom a : atoms){ Group g = a.getGroup(); if ( prevGroup != null) { if ( prevGroup.equals(g)) { // we add each group only once. continue; } } String code3 = g.getPDBName(); try { buf.append(convert_3code_1code(code3) ); } catch (UnknownPdbAminoAcidException e){ buf.append('X'); } prevGroup = g; } return buf.toString(); } /** get a PDB residue number object for this group * * @param g Group object * @return a ResidueNumber object * @deprecated replaced by Group.getResidueNumber() */ public static final ResidueNumber getPDBResidueNumber(Group g){ return g.getResidueNumber(); } /** Get a group represented by a ResidueNumber. * * @param struc a {@link Structure} * @param pdbResNum a {@link ResidueNumber} * @return a group in the structure that is represented by the pdbResNum. * @throws StructureException if the group cannot be found. */ public static final Group getGroupByPDBResidueNumber(Structure struc, ResidueNumber pdbResNum) throws StructureException { if (struc == null || pdbResNum==null) { throw new IllegalArgumentException("Null argument(s)."); } Chain chain = struc.findChain(pdbResNum.getChainId()); // String numIns = "" + pdbResNum.getSeqNum(); // if (pdbResNum.getInsCode() != null) { // numIns += pdbResNum.getInsCode(); // } return chain.getGroupByPDB(pdbResNum); } /** * Finds Groups in {@code structure} that contain at least one Atom that is within {@code radius} Angstroms of {@code centroid}. * @param structure The structure from which to find Groups * @param centroid The centroid of the shell * @param excludeResidues A list of ResidueNumbers to exclude * @param radius The radius from {@code centroid}, in Angstroms * @param includeWater Whether to include Groups whose <em>only</em> atoms are water * @param useAverageDistance When set to true, distances are the arithmetic mean (1-norm) of the distances of atoms that belong to the group and that are within the shell; otherwise, distances are the minimum of these values * @return A map of Groups within (or partially within) the shell, to their distances in Angstroms */ public static Map<Group,Double> getGroupDistancesWithinShell(Structure structure, Atom centroid, Set<ResidueNumber> excludeResidues, double radius, boolean includeWater, boolean useAverageDistance) { // for speed, we avoid calculating square roots radius = radius * radius; Map<Group,Double> distances = new HashMap<Group,Double>(); // we only need this if we're averaging distances // note that we can't use group.getAtoms().size() because some the group's atoms be outside the shell Map<Group,Integer> atomCounts = new HashMap<Group,Integer>(); for (Chain chain : structure.getChains()) { groupLoop: for (Group chainGroup : chain.getAtomGroups()) { // exclude water if (!includeWater && chainGroup.getPDBName().equals("HOH")) continue; // check blacklist of residue numbers for (ResidueNumber rn : excludeResidues) { if (rn.equals(chainGroup.getResidueNumber())) continue groupLoop; } for (Atom testAtom : chainGroup.getAtoms()) { try { // use getDistanceFast as we are doing a lot of comparisons double dist = Calc.getDistanceFast(centroid, testAtom); // if we're the shell if (dist <= radius) { if (!distances.containsKey(chainGroup)) distances.put(chainGroup, Double.POSITIVE_INFINITY); if (useAverageDistance) { // sum the distance; we'll divide by the total number later // here, we CANNOT use fastDistance (distance squared) because we want the arithmetic mean distances.put(chainGroup, distances.get(chainGroup) + Math.sqrt(dist)); if (!atomCounts.containsKey(chainGroup)) atomCounts.put(chainGroup, 0); atomCounts.put(chainGroup, atomCounts.get(chainGroup) + 1); } else { // take the minimum distance among all atoms of chainGroup // note that we can't break here because we might find a smaller distance if (dist < distances.get(chainGroup)) { distances.put(chainGroup, dist); } } } } catch (StructureException ex) { Logger.getLogger(StructureTools.class.getName()).log(Level.SEVERE, null, ex); } } } } if (useAverageDistance) { for (Map.Entry<Group,Double> entry : distances.entrySet()) { int count = atomCounts.get(entry.getKey()); distances.put(entry.getKey(), entry.getValue() / count); } } else { // in this case we used getDistanceFast for (Map.Entry<Group,Double> entry : distances.entrySet()) { distances.put(entry.getKey(), Math.sqrt(entry.getValue())); } } return distances; } public static Set<Group> getGroupsWithinShell(Structure structure, Atom atom, Set<ResidueNumber> excludeResidues, double distance, boolean includeWater) { //square the distance to use as a comparison against getDistanceFast which returns the square of a distance. distance = distance * distance; Set<Group> returnSet = new LinkedHashSet<Group>(); for (Chain chain : structure.getChains()) { groupLoop: for (Group chainGroup : chain.getAtomGroups()) { if (!includeWater && chainGroup.getPDBName().equals("HOH")) continue; for (ResidueNumber rn : excludeResidues) { if (rn.equals(chainGroup.getResidueNumber())) continue groupLoop; } for (Atom atomB : chainGroup.getAtoms()) { try { //use getDistanceFast as we are doing a lot of comparisons double dist = Calc.getDistanceFast(atom, atomB); if (dist <= distance) { returnSet.add(chainGroup); break; } } catch (StructureException ex) { Logger.getLogger(StructureTools.class.getName()).log(Level.SEVERE, null, ex); } } } } return returnSet; } /* * Returns a List of Groups in a structure within the distance specified of a given group. */ public static List<Group> getGroupsWithinShell(Structure structure, Group group, double distance, boolean includeWater) { List<Group> returnList = new ArrayList<Group>(); Set<ResidueNumber> excludeGroups = new HashSet<ResidueNumber>(); excludeGroups.add(group.getResidueNumber()); for (Atom atom : group.getAtoms()) { Set<Group> set = getGroupsWithinShell(structure, atom, excludeGroups, distance, includeWater); returnList.addAll(set); } return returnList; } // This code relies on an old version of the Bond class. // // /* // * Very simple distance-based bond calculator. Will give approximations, // * but do not rely on this to be chemically correct. // */ // public static List<Bond> findBonds(Group group, List<Group> groups) { // List<Bond> bondList = new ArrayList<Bond>(); // for (Atom atomA : group.getAtoms()) { // for (Group groupB : groups) { // if (groupB.getType().equals(GroupType.HETATM)) { // continue; // } // for (Atom atomB : groupB.getAtoms()) { // try { // double dist = Calc.getDistance(atomA, atomB); // BondType bondType = BondType.UNDEFINED; // if (dist <= 2) { // bondType = BondType.COVALENT; // Bond bond = new Bond(dist, bondType, group, atomA, groupB, atomB); // bondList.add(bond); // // System.out.println(String.format("%s within %s of %s", atomB, dist, atomA)); // } // else if (dist <= 3.25) { // // if (isHbondDonorAcceptor(atomA) && isHbondDonorAcceptor(atomB)) { // bondType = BondType.HBOND; // } // else if (atomA.getElement().isMetal() && isHbondDonorAcceptor(atomB)) { // bondType = BondType.METAL; // } // else if (atomA.getElement().equals(Element.C) && atomB.getElement().equals(Element.C)) { // bondType = BondType.HYDROPHOBIC; // } // //not really interested in 'undefined' types // if (bondType != BondType.UNDEFINED) { // Bond bond = new Bond(dist, bondType, group, atomA, groupB, atomB); // bondList.add(bond); // } // // System.out.println(String.format("%s within %s of %s", atomB, dist, atomA)); // } else if (dist <= 3.9) { // if (atomA.getElement().equals(Element.C) && atomB.getElement().equals(Element.C)) { // bondType = BondType.HYDROPHOBIC; // } // //not really interested in 'undefined' types // if (bondType != BondType.UNDEFINED) { // Bond bond = new Bond(dist, bondType, group, atomA, groupB, atomB); // bondList.add(bond); // } // } // // } catch (StructureException ex) { // Logger.getLogger(StructureTools.class.getName()).log(Level.SEVERE, null, ex); // } // // } // } // } // // // return bondList; // } // // private static boolean isHbondDonorAcceptor(Atom atom) { // if (hBondDonorAcceptors.contains(atom.getElement())) { // return true; // } // return false; // } /** Remove all models from a Structure and keep only the first * * @param s original Structure * @return a structure that contains only the first model * @since 3.0.5 */ @SuppressWarnings("deprecation") public static Structure removeModels(Structure s){ if ( ! s.isNmr()) return s; Structure n = new StructureImpl(); // go through whole substructure and clone ... // copy structure data n.setNmr(true); n.setPDBCode(s.getPDBCode()); n.setName(s.getName()); // we are calling this legacy menthod for backwards compatibility n.setHeader(s.getHeader()); //TODO: do deep copying of data! n.setPDBHeader(s.getPDBHeader()); n.setDBRefs(s.getDBRefs()); n.setConnections(s.getConnections()); n.setSites(s.getSites()); n.setCrystallographicInfo(s.getCrystallographicInfo()); n.setChains(s.getModel(0)); return n; } /** Removes all polymeric and solvent groups from a list of groups * */ public static List<Group> filterLigands(List<Group> allGroups){ //String prop = System.getProperty(PDBFileReader.LOAD_CHEM_COMP_PROPERTY); // if ( prop == null || ( ! prop.equalsIgnoreCase("true"))){ // System.err.println("You did not specify PDBFileReader.setLoadChemCompInfo, need to fetch Chemical Components anyways."); // } List<Group> groups = new ArrayList<Group>(); for ( Group g: allGroups) { ChemComp cc = g.getChemComp(); if ( ResidueType.lPeptideLinking.equals(cc.getResidueType()) || PolymerType.PROTEIN_ONLY.contains(cc.getPolymerType()) || PolymerType.POLYNUCLEOTIDE_ONLY.contains(cc.getPolymerType()) ){ continue; } if ( ! g.isWater()) { //System.out.println("not a prot, nuc or solvent : " + g.getChemComp()); groups.add(g); } } return groups; } /** * Short version of {@link #getStructure(String, PDBFileParser, AtomCache)} * which creates new parsers when needed * @param name * @return * @throws IOException * @throws StructureException */ public static Structure getStructure(String name) throws IOException, StructureException { return StructureTools.getStructure(name,null,null); } /** * Flexibly get a structure from an input String. The intent of this method * is to allow any reasonable string which could refer to a structure to be * correctly parsed. The following are currently supported: * <ol> * <li>Filename (if name refers to an existing file) * <li>PDB ID * <li>SCOP domains * <li>PDP domains * <li>Residue ranges * <li>Other formats supported by AtomCache * </ol> * @param name Some reference to the protein structure * @param parser A clean PDBFileParser to use if it is a file. If null, * a PDBFileParser will be instantiated if needed. * @param cache An AtomCache to use if the structure can be fetched from the * PDB. If null, a AtomCache will be instantiated if needed. * @return A Structure object * @throws IOException if name is an existing file, but doesn't parse correctly * @throws StructureException if the format is unknown, or if AtomCache throws * an exception. */ public static Structure getStructure(String name,PDBFileParser parser, AtomCache cache) throws IOException, StructureException { File f = new File(name); if(f.exists()) { if(parser == null) { parser = new PDBFileParser(); } InputStream inStream = new FileInputStream(f); return parser.parsePDBFile(inStream); } else { if( cache == null) { cache = new AtomCache(); } return cache.getStructure(name); } } }
biojava3-structure/src/main/java/org/biojava/bio/structure/StructureTools.java
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * * Created on Jan 4, 2006 * */ package org.biojava.bio.structure; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.StringWriter; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.biojava.bio.structure.align.util.AtomCache; import org.biojava.bio.structure.io.PDBFileParser; import org.biojava.bio.structure.io.mmcif.chem.PolymerType; import org.biojava.bio.structure.io.mmcif.chem.ResidueType; import org.biojava.bio.structure.io.mmcif.model.ChemComp; /** * A class that provides some tool methods. * * @author Andreas Prlic, Jules Jacobsen * @since 1.0 * @version %I% %G% */ public class StructureTools { /** The Atom name of C-alpha atoms. * */ public static final String caAtomName = " CA "; public static final String nAtomName = "N"; public static final String oAtomName = "O"; public static final String cbAtomName = "CB"; /** The names of the Atoms that form the backbone. * */ public static final String[] backboneAtomNames = {nAtomName,caAtomName,"C",oAtomName, cbAtomName}; public static final Character UNKNOWN_GROUP_LABEL = new Character('x');; //private static final String insertionCodeRegExp = "([0-9]+)([a-zA-Z]*)"; //private static final Pattern insertionCodePattern = Pattern.compile(insertionCodeRegExp); // there is a file format change in PDB 3.0 and nucleotides are being renamed static private Map<String, Integer> nucleotides30 ; static private Map<String, Integer> nucleotides23 ; //amino acid 3 and 1 letter code definitions private static final Map<String, Character> aminoAcids; private static final Set<Element> hBondDonorAcceptors; // // for conversion 3code 1code // private static SymbolTokenization threeLetter ; // private static SymbolTokenization oneLetter ; public static Logger logger = Logger.getLogger("org.biojava.bio.structure"); /** * Pattern to describe subranges. Matches "A", "A:", "A:7-53","A_7-53", etc. * @see #getSubRanges(Structure, String) */ public static final Pattern pdbNumRangeRegex = Pattern.compile( "^\\s*(\\w)" + //chain ID "(?:" + //begin range, this is a "non-capturing group" "(?::|_|:$|_$|$)" + //colon or underscore, could be at the end of a line, another non-capt. group. "(?:"+ // another non capturing group for the residue range "([-+]?[0-9]+[A-Za-z]?)" + // first residue "\\s*-\\s*" + // - "([-+]?[0-9]+[A-Za-z]?)" + // second residue ")?+"+ ")?" + //end range "\\s*"); static { nucleotides30 = new HashMap<String,Integer>(); nucleotides30.put("DA",1); nucleotides30.put("DC",1); nucleotides30.put("DG",1); nucleotides30.put("DT",1); nucleotides30.put("DI",1); nucleotides30.put("A",1); nucleotides30.put("G",1); nucleotides30.put("C",1); nucleotides30.put("U",1); nucleotides30.put("I",1); //TODO: check if they are always HETATMs, in that case this will not be necessary // the DNA linkers - the +C , +G, +A +T +U and +I have been replaced with these: nucleotides30.put("TAF",1); // 2'-DEOXY-2'-FLUORO-ARABINO-FURANOSYL THYMINE-5'-PHOSPHATE nucleotides30.put("TC1",1); // 3-(5-PHOSPHO-2-DEOXY-BETA-D-RIBOFURANOSYL)-2-OXO-1,3-DIAZA-PHENOTHIAZINE nucleotides30.put("TFE",1); // 2'-O-[2-(TRIFLUORO)ETHYL] THYMIDINE-5'-MONOPHOSPHATE nucleotides30.put("TFO",1); // [2-(6-AMINO-9H-PURIN-9-YL)-1-METHYLETHOXY]METHYLPHOSPHONIC ACID" nucleotides30.put("TGP",1); // 5'-THIO-2'-DEOXY-GUANOSINE PHOSPHONIC ACID nucleotides30.put("THX",1); // PHOSPHONIC ACID 6-({6-[6-(6-CARBAMOYL-3,6,7,8-TETRAHYDRO-3,6-DIAZA-AS-INDACENE-2-CARBONYL)-3,6,7,8-TETRAHYDRO-3,6-DIAZA-AS-INDOCENE-2-CARBONYL]-3,6,7,8-TETRAHYDRO-3,6-DIAZA-AS-INDACENE-2-CARBONL}-AMINO)-HEXYL ESTER 5-(5-METHYL-2,4-DIOXO-3,4-DIHYDRO-2H-PYRIMIDIN-1-YL)-TETRAHYDRO-FURAN-2-YLMETHYL ESTER nucleotides30.put("TLC",1); // 2-O,3-ETHDIYL-ARABINOFURANOSYL-THYMINE-5'-MONOPHOSPHATE nucleotides30.put("TLN",1); // [(1R,3R,4R,7S)-7-HYDROXY-3-(THYMIN-1-YL)-2,5-DIOXABICYCLO[2.2.1]HEPT-1-YL]METHYL DIHYDROGEN PHOSPHATE" nucleotides30.put("TP1",1); // 2-(METHYLAMINO)-ETHYLGLYCINE-CARBONYLMETHYLENE-THYMINE nucleotides30.put("TPC",1); // 5'-THIO-2'-DEOXY-CYTOSINE PHOSPHONIC ACID nucleotides30.put("TPN",1); // 2-AMINOETHYLGLYCINE-CARBONYLMETHYLENE-THYMINE // store nucleic acids (C, G, A, T, U, and I), and // the modified versions of nucleic acids (+C, +G, +A, +T, +U, and +I), and nucleotides23 = new HashMap<String,Integer>(); String[] names = {"C","G","A","T","U","I","+C","+G","+A","+T","+U","+I"}; for (int i = 0; i < names.length; i++) { String n = names[i]; nucleotides23.put(n,1); } aminoAcids = new HashMap<String, Character>(); aminoAcids.put("GLY", new Character('G')); aminoAcids.put("ALA", new Character('A')); aminoAcids.put("VAL", new Character('V')); aminoAcids.put("LEU", new Character('L')); aminoAcids.put("ILE", new Character('I')); aminoAcids.put("PHE", new Character('F')); aminoAcids.put("TYR", new Character('Y')); aminoAcids.put("TRP", new Character('W')); aminoAcids.put("PRO", new Character('P')); aminoAcids.put("HIS", new Character('H')); aminoAcids.put("LYS", new Character('K')); aminoAcids.put("ARG", new Character('R')); aminoAcids.put("SER", new Character('S')); aminoAcids.put("THR", new Character('T')); aminoAcids.put("GLU", new Character('E')); aminoAcids.put("GLN", new Character('Q')); aminoAcids.put("ASP", new Character('D')); aminoAcids.put("ASN", new Character('N')); aminoAcids.put("CYS", new Character('C')); aminoAcids.put("MET", new Character('M')); //MSE is only found as a molecular replacement for MET aminoAcids.put("MSE", new Character('M')); //'non-standard', genetically encoded //http://www.chem.qmul.ac.uk/iubmb/newsletter/1999/item3.html //IUBMB recommended name is 'SEC' but the wwPDB currently use 'CSE' //likewise 'PYL' (IUBMB) and 'PYH' (PDB) aminoAcids.put("CSE", new Character('U')); aminoAcids.put("SEC", new Character('U')); aminoAcids.put("PYH", new Character('O')); aminoAcids.put("PYL", new Character('O')); hBondDonorAcceptors = new HashSet<Element>(); hBondDonorAcceptors.add(Element.N); hBondDonorAcceptors.add(Element.O); hBondDonorAcceptors.add(Element.S); } /** Count how many number of Atoms are contained within a Structure object. * * @param s the structure object * @return the number of Atoms in this Structure */ public static final int getNrAtoms(Structure s){ int nrAtoms = 0; Iterator<Group> iter = new GroupIterator(s); while ( iter.hasNext()){ Group g = (Group) iter.next(); nrAtoms += g.size(); } return nrAtoms; } /** Count how many groups are contained within a structure object. * * @param s the structure object * @return the number of groups in the structure */ public static final int getNrGroups(Structure s){ int nrGroups = 0; List<Chain> chains = s.getChains(0); Iterator<Chain> iter = chains.iterator(); while (iter.hasNext()){ Chain c = (Chain) iter.next(); nrGroups += c.getAtomLength(); } return nrGroups; } /** Returns an array of the requested Atoms from the Structure object. Iterates over all groups * and checks if the requested atoms are in this group, no matter if this is a {@link AminoAcid} or {@link HetatomImpl} group. * For structures with more than one model, only model 0 will be used. * * @param s the structure to get the atoms from * * @param atomNames contains the atom names to be used. * @return an Atom[] array */ public static final Atom[] getAtomArray(Structure s, String[] atomNames){ List<Chain> chains = s.getModel(0); List<Atom> atoms = new ArrayList<Atom>(); extractCAatoms(atomNames, chains, atoms); return (Atom[]) atoms.toArray(new Atom[atoms.size()]); } /** Returns an array of the requested Atoms from the Structure object. * In contrast to {@link #getAtomArray(Structure, String[])} this method iterates over all chains. * Iterates over all chains and groups * and checks if the requested atoms are in this group, no matter if this is a {@link AminoAcid} or {@link HetatomImpl} group. * For structures with more than one model, only model 0 will be used. * * @param s the structure to get the atoms from * * @param atomNames contains the atom names to be used. * @return an Atom[] array */ public static final Atom[] getAtomArrayAllModels(Structure s, String[] atomNames){ List<Atom> atoms = new ArrayList<Atom>(); for (int i =0 ; i < s.nrModels(); i++ ) { List<Chain> chains = s.getModel(i); extractCAatoms(atomNames, chains, atoms); } return (Atom[]) atoms.toArray(new Atom[atoms.size()]); } /** Convert all atoms of the structure (first model) into an Atom array * * @param s input structure * @return all atom array */ public static final Atom[] getAllAtomArray(Structure s) { List<Atom> atoms = new ArrayList<Atom>(); AtomIterator iter = new AtomIterator(s); while (iter.hasNext()){ Atom a = iter.next(); atoms.add(a); } return (Atom[]) atoms.toArray(new Atom[atoms.size()]); } private static void extractCAatoms(String[] atomNames, List<Chain> chains, List<Atom> atoms) { for ( Chain c : chains) { for ( Group g : c.getAtomGroups()) { // a temp container for the atoms of this group List<Atom> thisGroupAtoms = new ArrayList<Atom>(); // flag to check if this group contains all the requested atoms. boolean thisGroupAllAtoms = true; for ( int i = 0 ; i < atomNames.length; i++){ String atomName = atomNames[i]; try { Atom a = g.getAtom(atomName); thisGroupAtoms.add(a); } catch (StructureException e){ // this group does not have a required atom, skip it... thisGroupAllAtoms = false; break; } } if ( thisGroupAllAtoms){ // add the atoms of this group to the array. Iterator<Atom> aIter = thisGroupAtoms.iterator(); while(aIter.hasNext()){ Atom a = (Atom) aIter.next(); atoms.add(a); } } } } } /** Returns an array of the requested Atoms from the Structure object. Iterates over all groups * and checks if the requested atoms are in this group, no matter if this is a AminoAcid or Hetatom group. * * * @param c the Chain to get the atoms from * * @param atomNames contains the atom names to be used. * @return an Atom[] array */ public static final Atom[] getAtomArray(Chain c, String[] atomNames){ List<Group> groups = c.getAtomGroups(); List<Atom> atoms = new ArrayList<Atom>(); for (Group g : groups){ // a temp container for the atoms of this group List<Atom> thisGroupAtoms = new ArrayList<Atom>(); // flag to check if this group contains all the requested atoms. boolean thisGroupAllAtoms = true; for ( int i = 0 ; i < atomNames.length; i++){ String atomName = atomNames[i]; try { Atom a = g.getAtom(atomName); thisGroupAtoms.add(a); } catch (StructureException e){ // this group does not have a required atom, skip it... thisGroupAllAtoms = false; break; } } if ( thisGroupAllAtoms){ // add the atoms of this group to the array. Iterator<Atom> aIter = thisGroupAtoms.iterator(); while(aIter.hasNext()){ Atom a = (Atom) aIter.next(); atoms.add(a); } } } return (Atom[]) atoms.toArray(new Atom[atoms.size()]); } /** Returns an Atom array of the CA atoms. * @param c the structure object * @return an Atom[] array */ public static final Atom[] getAtomCAArray(Chain c){ String[] atomNames = {" CA " }; return getAtomArray(c,atomNames); } /** Provides an equivalent copy of Atoms in a new array. Clones everything, starting with parent * groups and chains. The chain will only contain groups that are part of the CA array. * * @param ca array of CA atoms * @return Atom array */ public static final Atom[] cloneCAArray(Atom[] ca) throws StructureException{ Atom[] newCA = new Atom[ca.length]; List<Chain> model = new ArrayList<Chain>(); int apos = -1; for(Atom a: ca){ apos++; Group parentG = a.getGroup(); Chain parentC = parentG.getChain(); Chain newChain = null; for ( Chain c : model){ if ( c.getChainID().equals(parentC.getChainID())){ newChain = c; break; } } if ( newChain == null){ newChain = new ChainImpl(); newChain.setChainID(parentC.getChainID()); model.add(newChain); } Group parentN = (Group)parentG.clone(); newCA[apos] = parentN.getAtom(" CA "); newChain.addGroup(parentN); } return newCA; } /** Clone a set of CA Atoms, but returns the parent groups * * @param ca Atom array * @return Group array */ public static Group[] cloneGroups(Atom[] ca) { Group[] newGroup = new Group[ca.length]; List<Chain> model = new ArrayList<Chain>(); int apos = -1; for(Atom a: ca){ apos++; Group parentG = a.getGroup(); Chain parentC = parentG.getChain(); Chain newChain = null; for ( Chain c : model){ if ( c.getChainID().equals(parentC.getChainID())){ newChain = c; break; } } if ( newChain == null){ newChain = new ChainImpl(); newChain.setChainID(parentC.getChainID()); model.add(newChain); } Group ng = (Group)parentG.clone(); newGroup[apos] = ng; newChain.addGroup(ng); } return newGroup; } /** Utility method for working with circular permutations. Creates a duplicated and cloned set of Calpha atoms from the input array. * * @param ca2 atom array * @return cloned and duplicated set of input array * @throws StructureException */ public static Atom[] duplicateCA2(Atom[] ca2) throws StructureException{ // we don't want to rotate input atoms, do we? Atom[] ca2clone = new Atom[ca2.length*2]; int pos = 0; Chain c = null; String prevChainId = ""; for (Atom a : ca2){ Group g = (Group) a.getGroup().clone(); // works because each group has only a CA atom if (c == null ) { c = new ChainImpl(); Chain orig= a.getGroup().getChain(); c.setChainID(orig.getChainID()); } else { Chain orig= a.getGroup().getChain(); if ( ! orig.getChainID().equals(prevChainId)){ c = new ChainImpl(); c.setChainID(orig.getChainID()); } } c.addGroup(g); ca2clone[pos] = g.getAtom(StructureTools.caAtomName); pos++; } // Duplicate ca2! c = null; prevChainId = ""; for (Atom a : ca2){ Group g = (Group)a.getGroup().clone(); if (c == null ) { c = new ChainImpl(); Chain orig= a.getGroup().getChain(); c.setChainID(orig.getChainID()); } else { Chain orig= a.getGroup().getChain(); if ( ! orig.getChainID().equals(prevChainId)){ c = new ChainImpl(); c.setChainID(orig.getChainID()); } } c.addGroup(g); ca2clone[pos] = g.getAtom(StructureTools.caAtomName); pos++; } return ca2clone; } /** Returns an Atom array of the CA atoms. * @param s the structure object * @return an Atom[] array */ public static Atom[] getAtomCAArray(Structure s){ String[] atomNames = {" CA "}; return getAtomArray(s,atomNames); } /** Returns an Atom array of the MainChain atoms. * @param s the structure object * @return an Atom[] array */ public static Atom[] getBackboneAtomArray(Structure s){ String[] atomNames = backboneAtomNames; return getAtomArray(s,atomNames); } /** convert three character amino acid codes into single character * e.g. convert CYS to C * @return a character * @param code3 a three character amino acid representation String * @throws IllegalSymbolException */ public static final Character convert_3code_1code(String code3) throws UnknownPdbAminoAcidException { // { // Symbol sym = threeLetter.parseToken(code3) ; // String code1 = oneLetter.tokenizeSymbol(sym); // // return new Character(code1.charAt(0)) ; Character code1 = null; code1 = aminoAcids.get(code3); if (code1 == null) { throw new UnknownPdbAminoAcidException(code3 + " not a standard amino acid"); } else { return code1; } } /** convert a three letter code into single character. * catches for unusual characters * * @param groupCode3 three letter representation * @return null if group is a nucleotide code */ public static final Character get1LetterCode(String groupCode3){ Character aminoCode1 = null; try { // is it a standard amino acid ? aminoCode1 = convert_3code_1code(groupCode3); } catch (UnknownPdbAminoAcidException e){ // hm groupCode3 is not standard // perhaps it is an nucleotide? if ( isNucleotide(groupCode3) ) { //System.out.println("nucleotide, aminoCode1:"+aminoCode1); aminoCode1= null; } else { // does not seem to be so let's assume it is // nonstandard aminoacid and label it "X" //logger.warning("unknown group name "+groupCode3 ); aminoCode1 = UNKNOWN_GROUP_LABEL; } } return aminoCode1; } /* Test if the threelettercode of an ATOM entry corresponds to a * nucleotide or to an aminoacid. * @param a 3-character code for a group. * */ public static final boolean isNucleotide(String groupCode3){ String code = groupCode3.trim(); if ( nucleotides30.containsKey(code)){ return true; } if ( nucleotides23.containsKey(code)){ return true; } return false ; } /** Reduce a structure to provide a smaller representation . Only takes the first model of the structure. If chainId is provided only return a structure containing that Chain ID. * Converts lower case chain IDs to upper case if structure does not contain a chain with that ID. * * @param s * @param chainId * @return Structure * @since 3.0 */ @SuppressWarnings("deprecation") public static final Structure getReducedStructure(Structure s, String chainId) throws StructureException{ // since we deal here with structure alignments, // only use Model 1... Structure newS = new StructureImpl(); newS.setHeader(s.getHeader()); newS.setPDBCode(s.getPDBCode()); newS.setPDBHeader(s.getPDBHeader()); newS.setName(s.getName()); newS.setSSBonds(s.getSSBonds()); newS.setDBRefs(s.getDBRefs()); newS.setSites(s.getSites()); newS.setNmr(s.isNmr()); newS.setBiologicalAssembly(s.isBiologicalAssembly()); newS.setCompounds(s.getCompounds()); newS.setConnections(s.getConnections()); newS.setSSBonds(s.getSSBonds()); newS.setSites(s.getSites()); if ( chainId != null) chainId = chainId.trim(); if ( chainId == null || chainId.equals("")){ // only get model 0 List<Chain> model0 = s.getModel(0); for (Chain c : model0){ newS.addChain(c); } return newS; } Chain c = null; try { c = s.getChainByPDB(chainId); } catch (StructureException e){ System.err.println(e.getMessage() + " trying upper case Chain id..."); c = s.getChainByPDB(chainId.toUpperCase()); } if ( c != null) { newS.addChain(c); for ( Compound comp : s.getCompounds()){ if ( comp.getChainId().contains(c.getChainID())){ // found matching compound. set description... newS.getPDBHeader().setDescription("Chain " + c.getChainID() + " of " + s.getPDBCode() + " " + comp.getMolName()); } } } return newS; } /** Reduce a structure to provide a smaller representation. * Only takes the first model of the structure. If chainNr >=0 only takes * the chain at that position into account. * * @param s * @param chainNr can be -1 to request all chains of model 0, otherwise will only add chain at this position * @return Structure object * @since 3.0 */ @SuppressWarnings("deprecation") public static final Structure getReducedStructure(Structure s, int chainNr) throws StructureException{ // since we deal here with structure alignments, // only use Model 1... Structure newS = new StructureImpl(); newS.setHeader(s.getHeader()); newS.setPDBCode(s.getPDBCode()); newS.setPDBHeader(s.getPDBHeader()); newS.setName(s.getName()); newS.setSSBonds(s.getSSBonds()); newS.setDBRefs(s.getDBRefs()); newS.setSites(s.getSites()); newS.setNmr(s.isNmr()); newS.setBiologicalAssembly(s.isBiologicalAssembly()); newS.setCompounds(s.getCompounds()); newS.setConnections(s.getConnections()); newS.setSSBonds(s.getSSBonds()); newS.setSites(s.getSites()); newS.setCrystallographicInfo(s.getCrystallographicInfo()); newS.getPDBHeader().setDescription("subset of " + s.getPDBCode() + " " + s.getPDBHeader().getDescription() ); if ( chainNr < 0 ) { // only get model 0 List<Chain> model0 = s.getModel(0); for (Chain c : model0){ newS.addChain(c); } return newS; } Chain c = null; c = s.getChain(0, chainNr); newS.addChain(c); return newS; } /** In addition to the functionality provided by getReducedStructure also provides a way to specify sub-regions of a structure with the following * specification: * * * ranges can be surrounded by ( and ). (but will be removed). * ranges are specified as * PDBresnum1 : PDBresnum2 * * a list of ranges is separated by , * * Example * 4GCR (A:1-83) * 1CDG (A:407-495,A:582-686) * 1CDG (A_407-495,A_582-686) * * @param s The full structure * @param ranges A comma-seperated list of ranges, optionally surrounded by parentheses * @return Substructure of s specified by ranges */ @SuppressWarnings("deprecation") public static final Structure getSubRanges(Structure s, String ranges ) throws StructureException { Structure struc = getReducedStructure(s, null); if ( ranges == null || ranges.equals("")) throw new IllegalArgumentException("ranges can't be null or empty"); ranges = ranges.trim(); if ( ranges.startsWith("(")) ranges = ranges.substring(1); if ( ranges.endsWith(")")) { ranges = ranges.substring(0,ranges.length()-1); } //special case: '-' means 'everything' if ( ranges.equals("-") ) { return s; } Structure newS = new StructureImpl(); newS.setHeader(s.getHeader()); newS.setPDBCode(s.getPDBCode()); newS.setPDBHeader(s.getPDBHeader()); newS.setName(s.getName()); newS.setDBRefs(s.getDBRefs()); newS.setNmr(s.isNmr()); newS.setBiologicalAssembly(s.isBiologicalAssembly()); newS.getPDBHeader().setDescription("sub-range " + ranges + " of " + newS.getPDBCode() + " " + s.getPDBHeader().getDescription()); newS.setCrystallographicInfo(s.getCrystallographicInfo()); // TODO The following should be only copied for atoms which are present in the range. //newS.setCompounds(s.getCompounds()); //newS.setConnections(s.getConnections()); //newS.setSSBonds(s.getSSBonds()); //newS.setSites(s.getSites()); String[] rangS =ranges.split(","); StringWriter name = new StringWriter(); name.append(s.getName()); boolean firstRange = true; String prevChainId = null; // parse the ranges, adding the specified residues to newS for ( String r: rangS){ // Match a single range, eg "A_4-27" Matcher matcher = pdbNumRangeRegex.matcher(r); if( ! matcher.matches() ){ throw new StructureException("wrong range specification, should be provided as chainID_pdbResnum1-pdbRensum2: "+ranges); } String chainId = matcher.group(1); Chain chain; if(chainId.equals("_") && struc.size() == 1) { // Handle special case of "_" chain for single-chain proteins chain = struc.getChain(0); } else { // Explicit chain chain = struc.getChainByPDB(chainId); } Group[] groups; String pdbresnumStart = matcher.group(2); String pdbresnumEnd = matcher.group(3); if ( ! firstRange){ name.append( ","); } else { name.append(AtomCache.CHAIN_SPLIT_SYMBOL); } if( pdbresnumStart != null && pdbresnumEnd != null) { // not a full chain //since Java doesn't allow '+' before integers, fix this up. if(pdbresnumStart.charAt(0) == '+') pdbresnumStart = pdbresnumStart.substring(1); if(pdbresnumEnd.charAt(0) == '+') pdbresnumEnd = pdbresnumEnd.substring(1); groups = chain.getGroupsByPDB(pdbresnumStart, pdbresnumEnd); name.append( chainId + AtomCache.UNDERSCORE + pdbresnumStart+"-" + pdbresnumEnd); } else { // full chain groups = chain.getAtomGroups().toArray(new Group[chain.getAtomGroups().size()]); name.append(chainId); } firstRange = true; // Create new chain, if needed Chain c = null; if ( prevChainId == null) { // first chain... c = new ChainImpl(); c.setChainID(chain.getChainID()); newS.addChain(c); } else if ( prevChainId.equals(chain.getChainID())) { c = newS.getChainByPDB(prevChainId); } else { try { c = newS.getChainByPDB(chain.getChainID()); } catch (StructureException e){ // chain not in structure yet... c = new ChainImpl(); c.setChainID(chain.getChainID()); newS.addChain(c); } } // add the groups to the chain: for ( Group g: groups) { c.addGroup(g); } prevChainId = c.getChainID(); } newS.setName(name.toString()); return newS; } public static final String convertAtomsToSeq(Atom[] atoms) { StringBuffer buf = new StringBuffer(); Group prevGroup = null; for (Atom a : atoms){ Group g = a.getGroup(); if ( prevGroup != null) { if ( prevGroup.equals(g)) { // we add each group only once. continue; } } String code3 = g.getPDBName(); try { buf.append(convert_3code_1code(code3) ); } catch (UnknownPdbAminoAcidException e){ buf.append('X'); } prevGroup = g; } return buf.toString(); } /** get a PDB residue number object for this group * * @param g Group object * @return a ResidueNumber object * @deprecated replaced by Group.getResidueNumber() */ public static final ResidueNumber getPDBResidueNumber(Group g){ return g.getResidueNumber(); } /** Get a group represented by a ResidueNumber. * * @param struc a {@link Structure} * @param pdbResNum a {@link ResidueNumber} * @return a group in the structure that is represented by the pdbResNum. * @throws StructureException if the group cannot be found. */ public static final Group getGroupByPDBResidueNumber(Structure struc, ResidueNumber pdbResNum) throws StructureException { if (struc == null || pdbResNum==null) { throw new IllegalArgumentException("Null argument(s)."); } Chain chain = struc.findChain(pdbResNum.getChainId()); // String numIns = "" + pdbResNum.getSeqNum(); // if (pdbResNum.getInsCode() != null) { // numIns += pdbResNum.getInsCode(); // } return chain.getGroupByPDB(pdbResNum); } public static Set<Group> getGroupsWithinShell(Structure structure, Atom atom, Set<ResidueNumber> excludeResidues, double distance, boolean includeWater) { //square the distance to use as a comparison against getDistanceFast which returns the square of a distance. distance = distance * distance; Set<Group> returnSet = new LinkedHashSet<Group>(); for (Chain chain : structure.getChains()) { groupLoop: for (Group chainGroup : chain.getAtomGroups()) { for (ResidueNumber rn : excludeResidues) { if (rn.equals(chainGroup.getResidueNumber())) continue groupLoop; } // if (excludeResidues.contains(chainGroup.getResidueNumber())) continue; if (!includeWater && chainGroup.getPDBName().equals("HOH")) continue; for (Atom atomB : chainGroup.getAtoms()) { try { //use getDistanceFast as we are doing a lot of comparisons double dist = Calc.getDistanceFast(atom, atomB); if (dist <= distance) { returnSet.add(chainGroup); break; } } catch (StructureException ex) { Logger.getLogger(StructureTools.class.getName()).log(Level.SEVERE, null, ex); } } } } return returnSet; } /* * Returns a List of Groups in a structure within the distance specified of a given group. */ public static List<Group> getGroupsWithinShell(Structure structure, Group group, double distance, boolean includeWater) { List<Group> returnList = new ArrayList<Group>(); Set<ResidueNumber> excludeGroups = new HashSet<ResidueNumber>(); excludeGroups.add(group.getResidueNumber()); for (Atom atom : group.getAtoms()) { Set<Group> set = getGroupsWithinShell(structure, atom, excludeGroups, distance, includeWater); returnList.addAll(set); } return returnList; } // This code relies on an old version of the Bond class. // // /* // * Very simple distance-based bond calculator. Will give approximations, // * but do not rely on this to be chemically correct. // */ // public static List<Bond> findBonds(Group group, List<Group> groups) { // List<Bond> bondList = new ArrayList<Bond>(); // for (Atom atomA : group.getAtoms()) { // for (Group groupB : groups) { // if (groupB.getType().equals(GroupType.HETATM)) { // continue; // } // for (Atom atomB : groupB.getAtoms()) { // try { // double dist = Calc.getDistance(atomA, atomB); // BondType bondType = BondType.UNDEFINED; // if (dist <= 2) { // bondType = BondType.COVALENT; // Bond bond = new Bond(dist, bondType, group, atomA, groupB, atomB); // bondList.add(bond); // // System.out.println(String.format("%s within %s of %s", atomB, dist, atomA)); // } // else if (dist <= 3.25) { // // if (isHbondDonorAcceptor(atomA) && isHbondDonorAcceptor(atomB)) { // bondType = BondType.HBOND; // } // else if (atomA.getElement().isMetal() && isHbondDonorAcceptor(atomB)) { // bondType = BondType.METAL; // } // else if (atomA.getElement().equals(Element.C) && atomB.getElement().equals(Element.C)) { // bondType = BondType.HYDROPHOBIC; // } // //not really interested in 'undefined' types // if (bondType != BondType.UNDEFINED) { // Bond bond = new Bond(dist, bondType, group, atomA, groupB, atomB); // bondList.add(bond); // } // // System.out.println(String.format("%s within %s of %s", atomB, dist, atomA)); // } else if (dist <= 3.9) { // if (atomA.getElement().equals(Element.C) && atomB.getElement().equals(Element.C)) { // bondType = BondType.HYDROPHOBIC; // } // //not really interested in 'undefined' types // if (bondType != BondType.UNDEFINED) { // Bond bond = new Bond(dist, bondType, group, atomA, groupB, atomB); // bondList.add(bond); // } // } // // } catch (StructureException ex) { // Logger.getLogger(StructureTools.class.getName()).log(Level.SEVERE, null, ex); // } // // } // } // } // // // return bondList; // } // // private static boolean isHbondDonorAcceptor(Atom atom) { // if (hBondDonorAcceptors.contains(atom.getElement())) { // return true; // } // return false; // } /** Remove all models from a Structure and keep only the first * * @param s original Structure * @return a structure that contains only the first model * @since 3.0.5 */ @SuppressWarnings("deprecation") public static Structure removeModels(Structure s){ if ( ! s.isNmr()) return s; Structure n = new StructureImpl(); // go through whole substructure and clone ... // copy structure data n.setNmr(true); n.setPDBCode(s.getPDBCode()); n.setName(s.getName()); // we are calling this legacy menthod for backwards compatibility n.setHeader(s.getHeader()); //TODO: do deep copying of data! n.setPDBHeader(s.getPDBHeader()); n.setDBRefs(s.getDBRefs()); n.setConnections(s.getConnections()); n.setSites(s.getSites()); n.setCrystallographicInfo(s.getCrystallographicInfo()); n.setChains(s.getModel(0)); return n; } /** Removes all polymeric and solvent groups from a list of groups * */ public static List<Group> filterLigands(List<Group> allGroups){ //String prop = System.getProperty(PDBFileReader.LOAD_CHEM_COMP_PROPERTY); // if ( prop == null || ( ! prop.equalsIgnoreCase("true"))){ // System.err.println("You did not specify PDBFileReader.setLoadChemCompInfo, need to fetch Chemical Components anyways."); // } List<Group> groups = new ArrayList<Group>(); for ( Group g: allGroups) { ChemComp cc = g.getChemComp(); if ( ResidueType.lPeptideLinking.equals(cc.getResidueType()) || PolymerType.PROTEIN_ONLY.contains(cc.getPolymerType()) || PolymerType.POLYNUCLEOTIDE_ONLY.contains(cc.getPolymerType()) ){ continue; } if ( ! g.isWater()) { //System.out.println("not a prot, nuc or solvent : " + g.getChemComp()); groups.add(g); } } return groups; } /** * Short version of {@link #getStructure(String, PDBFileParser, AtomCache)} * which creates new parsers when needed * @param name * @return * @throws IOException * @throws StructureException */ public static Structure getStructure(String name) throws IOException, StructureException { return StructureTools.getStructure(name,null,null); } /** * Flexibly get a structure from an input String. The intent of this method * is to allow any reasonable string which could refer to a structure to be * correctly parsed. The following are currently supported: * <ol> * <li>Filename (if name refers to an existing file) * <li>PDB ID * <li>SCOP domains * <li>PDP domains * <li>Residue ranges * <li>Other formats supported by AtomCache * </ol> * @param name Some reference to the protein structure * @param parser A clean PDBFileParser to use if it is a file. If null, * a PDBFileParser will be instantiated if needed. * @param cache An AtomCache to use if the structure can be fetched from the * PDB. If null, a AtomCache will be instantiated if needed. * @return A Structure object * @throws IOException if name is an existing file, but doesn't parse correctly * @throws StructureException if the format is unknown, or if AtomCache throws * an exception. */ public static Structure getStructure(String name,PDBFileParser parser, AtomCache cache) throws IOException, StructureException { File f = new File(name); if(f.exists()) { if(parser == null) { parser = new PDBFileParser(); } InputStream inStream = new FileInputStream(f); return parser.parsePDBFile(inStream); } else { if( cache == null) { cache = new AtomCache(); } return cache.getStructure(name); } } }
Wrote a method getGroupDistancesWithinShell.
biojava3-structure/src/main/java/org/biojava/bio/structure/StructureTools.java
Wrote a method getGroupDistancesWithinShell.
<ide><path>iojava3-structure/src/main/java/org/biojava/bio/structure/StructureTools.java <ide> return chain.getGroupByPDB(pdbResNum); <ide> } <ide> <add> <add> /** <add> * Finds Groups in {@code structure} that contain at least one Atom that is within {@code radius} Angstroms of {@code centroid}. <add> * @param structure The structure from which to find Groups <add> * @param centroid The centroid of the shell <add> * @param excludeResidues A list of ResidueNumbers to exclude <add> * @param radius The radius from {@code centroid}, in Angstroms <add> * @param includeWater Whether to include Groups whose <em>only</em> atoms are water <add> * @param useAverageDistance When set to true, distances are the arithmetic mean (1-norm) of the distances of atoms that belong to the group and that are within the shell; otherwise, distances are the minimum of these values <add> * @return A map of Groups within (or partially within) the shell, to their distances in Angstroms <add> */ <add> public static Map<Group,Double> getGroupDistancesWithinShell(Structure structure, Atom centroid, Set<ResidueNumber> excludeResidues, double radius, boolean includeWater, boolean useAverageDistance) { <add> <add> // for speed, we avoid calculating square roots <add> radius = radius * radius; <add> <add> Map<Group,Double> distances = new HashMap<Group,Double>(); <add> <add> // we only need this if we're averaging distances <add> // note that we can't use group.getAtoms().size() because some the group's atoms be outside the shell <add> Map<Group,Integer> atomCounts = new HashMap<Group,Integer>(); <add> <add> for (Chain chain : structure.getChains()) { <add> groupLoop: for (Group chainGroup : chain.getAtomGroups()) { <add> <add> // exclude water <add> if (!includeWater && chainGroup.getPDBName().equals("HOH")) continue; <add> <add> // check blacklist of residue numbers <add> for (ResidueNumber rn : excludeResidues) { <add> if (rn.equals(chainGroup.getResidueNumber())) continue groupLoop; <add> } <add> <add> for (Atom testAtom : chainGroup.getAtoms()) { <add> <add> try { <add> <add> // use getDistanceFast as we are doing a lot of comparisons <add> double dist = Calc.getDistanceFast(centroid, testAtom); <add> <add> // if we're the shell <add> if (dist <= radius) { <add> if (!distances.containsKey(chainGroup)) distances.put(chainGroup, Double.POSITIVE_INFINITY); <add> if (useAverageDistance) { <add> // sum the distance; we'll divide by the total number later <add> // here, we CANNOT use fastDistance (distance squared) because we want the arithmetic mean <add> distances.put(chainGroup, distances.get(chainGroup) + Math.sqrt(dist)); <add> if (!atomCounts.containsKey(chainGroup)) atomCounts.put(chainGroup, 0); <add> atomCounts.put(chainGroup, atomCounts.get(chainGroup) + 1); <add> } else { <add> // take the minimum distance among all atoms of chainGroup <add> // note that we can't break here because we might find a smaller distance <add> if (dist < distances.get(chainGroup)) { <add> distances.put(chainGroup, dist); <add> } <add> } <add> } <add> <add> } catch (StructureException ex) { <add> Logger.getLogger(StructureTools.class.getName()).log(Level.SEVERE, null, ex); <add> } <add> <add> } <add> } <add> } <add> <add> if (useAverageDistance) { <add> for (Map.Entry<Group,Double> entry : distances.entrySet()) { <add> int count = atomCounts.get(entry.getKey()); <add> distances.put(entry.getKey(), entry.getValue() / count); <add> } <add> } else { <add> // in this case we used getDistanceFast <add> for (Map.Entry<Group,Double> entry : distances.entrySet()) { <add> distances.put(entry.getKey(), Math.sqrt(entry.getValue())); <add> } <add> } <add> <add> return distances; <add> <add> } <add> <ide> public static Set<Group> getGroupsWithinShell(Structure structure, Atom atom, Set<ResidueNumber> excludeResidues, double distance, boolean includeWater) { <ide> <ide> //square the distance to use as a comparison against getDistanceFast which returns the square of a distance. <ide> Set<Group> returnSet = new LinkedHashSet<Group>(); <ide> for (Chain chain : structure.getChains()) { <ide> groupLoop: for (Group chainGroup : chain.getAtomGroups()) { <add> if (!includeWater && chainGroup.getPDBName().equals("HOH")) continue; <ide> for (ResidueNumber rn : excludeResidues) { <ide> if (rn.equals(chainGroup.getResidueNumber())) continue groupLoop; <ide> } <del>// if (excludeResidues.contains(chainGroup.getResidueNumber())) continue; <del> if (!includeWater && chainGroup.getPDBName().equals("HOH")) continue; <ide> for (Atom atomB : chainGroup.getAtoms()) { <ide> try { <ide> //use getDistanceFast as we are doing a lot of comparisons <ide> public static List<Group> getGroupsWithinShell(Structure structure, Group group, double distance, boolean includeWater) { <ide> <ide> List<Group> returnList = new ArrayList<Group>(); <del> <add> <ide> Set<ResidueNumber> excludeGroups = new HashSet<ResidueNumber>(); <ide> excludeGroups.add(group.getResidueNumber()); <ide> for (Atom atom : group.getAtoms()) { <ide> Set<Group> set = getGroupsWithinShell(structure, atom, excludeGroups, distance, includeWater); <ide> returnList.addAll(set); <ide> } <del> <add> <ide> return returnList; <ide> } <ide>
Java
mit
d76585d431f07763db5167b9223cf7b7b08d53a7
0
sniffy/sniffy,sniffy/sniffy,bedrin/jdbc-sniffer,sniffy/sniffy
package io.sniffy.nio; import io.sniffy.util.ExceptionUtil; import io.sniffy.util.OSUtil; import io.sniffy.util.ReflectionCopier; import io.sniffy.util.StackTraceExtractor; import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement; import sun.nio.ch.SelChImpl; import sun.nio.ch.SelectionKeyImpl; import java.io.FileDescriptor; import java.io.IOException; import java.net.ServerSocket; import java.net.SocketAddress; import java.net.SocketOption; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.spi.AbstractSelectableChannel; import java.nio.channels.spi.SelectorProvider; import java.util.Set; import static io.sniffy.util.ReflectionUtil.invokeMethod; public class SniffyServerSocketChannel extends ServerSocketChannel implements SelChImpl { private static final ReflectionCopier<ServerSocketChannel> socketChannelFieldsCopier = new ReflectionCopier<ServerSocketChannel>(ServerSocketChannel.class, "provider"); private final ServerSocketChannel delegate; private final SelChImpl selChImplDelegate; public SniffyServerSocketChannel(SelectorProvider provider, ServerSocketChannel delegate) { super(provider); this.delegate = delegate; this.selChImplDelegate = (SelChImpl) delegate; } private void copyToDelegate() { socketChannelFieldsCopier.copy(this, delegate); } private void copyFromDelegate() { socketChannelFieldsCopier.copy(delegate, this); } @Override @IgnoreJRERequirement public ServerSocketChannel bind(SocketAddress local, int backlog) throws IOException { try { copyToDelegate(); delegate.bind(local, backlog); return this; } finally { copyFromDelegate(); } } @Override @IgnoreJRERequirement public <T> ServerSocketChannel setOption(SocketOption<T> name, T value) throws IOException { try { copyToDelegate(); delegate.setOption(name, value); return this; } finally { copyFromDelegate(); } } @Override public ServerSocket socket() { try { copyToDelegate(); return delegate.socket(); // TODO: should we wrap it with SniffyServerSocket ?? } finally { copyFromDelegate(); } } @Override public SocketChannel accept() throws IOException { try { copyToDelegate(); return OSUtil.isWindows() && StackTraceExtractor.hasClassInStackTrace("sun.nio.ch.Pipe") ? delegate.accept() : new SniffySocketChannelAdapter(provider(), delegate.accept()); } finally { copyFromDelegate(); } } @Override @IgnoreJRERequirement public SocketAddress getLocalAddress() throws IOException { try { copyToDelegate(); return delegate.getLocalAddress(); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } @Override public void implCloseSelectableChannel() { copyToDelegate(); try { invokeMethod(AbstractSelectableChannel.class, delegate, "implCloseSelectableChannel", Void.class); } catch (Exception e) { ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } @Override public void implConfigureBlocking(boolean block) { copyToDelegate(); try { invokeMethod(AbstractSelectableChannel.class, delegate, "implConfigureBlocking", Boolean.TYPE, block, Void.class); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } @Override @IgnoreJRERequirement public <T> T getOption(SocketOption<T> name) throws IOException { try { copyToDelegate(); return delegate.getOption(name); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } @Override @IgnoreJRERequirement public Set<SocketOption<?>> supportedOptions() { try { copyToDelegate(); return delegate.supportedOptions(); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } // Modern SelChImpl @Override public FileDescriptor getFD() { copyToDelegate(); try { return selChImplDelegate.getFD(); } finally { copyFromDelegate(); } } @Override public int getFDVal() { copyToDelegate(); try { return selChImplDelegate.getFDVal(); } finally { copyFromDelegate(); } } @Override public boolean translateAndUpdateReadyOps(int ops, SelectionKeyImpl ski) { copyToDelegate(); try { return selChImplDelegate.translateAndUpdateReadyOps(ops, ski); } finally { copyFromDelegate(); } } @Override public boolean translateAndSetReadyOps(int ops, SelectionKeyImpl ski) { copyToDelegate(); try { return selChImplDelegate.translateAndSetReadyOps(ops, ski); } finally { copyFromDelegate(); } } @Override public void kill() throws IOException { copyToDelegate(); try { selChImplDelegate.kill(); } finally { copyFromDelegate(); } } // Note: this method is absent in newer JDKs so we cannot use @Override annotation // @Override public void translateAndSetInterestOps(int ops, SelectionKeyImpl sk) { try { copyToDelegate(); invokeMethod(SelChImpl.class, selChImplDelegate, "translateAndSetInterestOps", Integer.TYPE, ops, SelectionKeyImpl.class, sk, Void.TYPE); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } // Note: this method was absent in earlier JDKs so we cannot use @Override annotation //@Override public int translateInterestOps(int ops) { try { copyToDelegate(); return invokeMethod(SelChImpl.class, selChImplDelegate, "translateInterestOps", Integer.TYPE, ops, Integer.TYPE); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } // Note: this method was absent in earlier JDKs so we cannot use @Override annotation //@Override public void park(int event, long nanos) throws IOException { try { copyToDelegate(); invokeMethod(SelChImpl.class, selChImplDelegate, "park", Integer.TYPE, event, Long.TYPE, nanos, Void.TYPE); } catch (Exception e) { throw ExceptionUtil.throwException(e); } finally { copyFromDelegate(); } } // Note: this method was absent in earlier JDKs so we cannot use @Override annotation //@Override public void park(int event) throws IOException { try { copyToDelegate(); invokeMethod(SelChImpl.class, selChImplDelegate, "park", Integer.TYPE, event, Void.TYPE); } catch (Exception e) { throw ExceptionUtil.throwException(e); } finally { copyFromDelegate(); } } }
sniffy-module-nio/src/main/java/io/sniffy/nio/SniffyServerSocketChannel.java
package io.sniffy.nio; import io.sniffy.util.ExceptionUtil; import io.sniffy.util.ReflectionCopier; import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement; import sun.nio.ch.SelChImpl; import sun.nio.ch.SelectionKeyImpl; import java.io.FileDescriptor; import java.io.IOException; import java.net.ServerSocket; import java.net.SocketAddress; import java.net.SocketOption; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; import java.nio.channels.spi.AbstractSelectableChannel; import java.nio.channels.spi.SelectorProvider; import java.util.Set; import static io.sniffy.util.ReflectionUtil.invokeMethod; public class SniffyServerSocketChannel extends ServerSocketChannel implements SelChImpl { private static final ReflectionCopier<ServerSocketChannel> socketChannelFieldsCopier = new ReflectionCopier<ServerSocketChannel>(ServerSocketChannel.class, "provider"); private final ServerSocketChannel delegate; private final SelChImpl selChImplDelegate; public SniffyServerSocketChannel(SelectorProvider provider, ServerSocketChannel delegate) { super(provider); this.delegate = delegate; this.selChImplDelegate = (SelChImpl) delegate; } private void copyToDelegate() { socketChannelFieldsCopier.copy(this, delegate); } private void copyFromDelegate() { socketChannelFieldsCopier.copy(delegate, this); } @Override @IgnoreJRERequirement public ServerSocketChannel bind(SocketAddress local, int backlog) throws IOException { try { copyToDelegate(); delegate.bind(local, backlog); return this; } finally { copyFromDelegate(); } } @Override @IgnoreJRERequirement public <T> ServerSocketChannel setOption(SocketOption<T> name, T value) throws IOException { try { copyToDelegate(); delegate.setOption(name, value); return this; } finally { copyFromDelegate(); } } @Override public ServerSocket socket() { try { copyToDelegate(); return delegate.socket(); // TODO: should we wrap it with SniffyServerSocket ?? } finally { copyFromDelegate(); } } @Override public SocketChannel accept() throws IOException { try { copyToDelegate(); return new SniffySocketChannelAdapter(provider(), delegate.accept()); } finally { copyFromDelegate(); } } @Override @IgnoreJRERequirement public SocketAddress getLocalAddress() throws IOException { try { copyToDelegate(); return delegate.getLocalAddress(); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } @Override public void implCloseSelectableChannel() { copyToDelegate(); try { invokeMethod(AbstractSelectableChannel.class, delegate, "implCloseSelectableChannel", Void.class); } catch (Exception e) { ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } @Override public void implConfigureBlocking(boolean block) { copyToDelegate(); try { invokeMethod(AbstractSelectableChannel.class, delegate, "implConfigureBlocking", Boolean.TYPE, block, Void.class); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } @Override @IgnoreJRERequirement public <T> T getOption(SocketOption<T> name) throws IOException { try { copyToDelegate(); return delegate.getOption(name); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } @Override @IgnoreJRERequirement public Set<SocketOption<?>> supportedOptions() { try { copyToDelegate(); return delegate.supportedOptions(); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } // Modern SelChImpl @Override public FileDescriptor getFD() { copyToDelegate(); try { return selChImplDelegate.getFD(); } finally { copyFromDelegate(); } } @Override public int getFDVal() { copyToDelegate(); try { return selChImplDelegate.getFDVal(); } finally { copyFromDelegate(); } } @Override public boolean translateAndUpdateReadyOps(int ops, SelectionKeyImpl ski) { copyToDelegate(); try { return selChImplDelegate.translateAndUpdateReadyOps(ops, ski); } finally { copyFromDelegate(); } } @Override public boolean translateAndSetReadyOps(int ops, SelectionKeyImpl ski) { copyToDelegate(); try { return selChImplDelegate.translateAndSetReadyOps(ops, ski); } finally { copyFromDelegate(); } } @Override public void kill() throws IOException { copyToDelegate(); try { selChImplDelegate.kill(); } finally { copyFromDelegate(); } } // Note: this method is absent in newer JDKs so we cannot use @Override annotation // @Override public void translateAndSetInterestOps(int ops, SelectionKeyImpl sk) { try { copyToDelegate(); invokeMethod(SelChImpl.class, selChImplDelegate, "translateAndSetInterestOps", Integer.TYPE, ops, SelectionKeyImpl.class, sk, Void.TYPE); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } // Note: this method was absent in earlier JDKs so we cannot use @Override annotation //@Override public int translateInterestOps(int ops) { try { copyToDelegate(); return invokeMethod(SelChImpl.class, selChImplDelegate, "translateInterestOps", Integer.TYPE, ops, Integer.TYPE); } catch (Exception e) { throw ExceptionUtil.processException(e); } finally { copyFromDelegate(); } } // Note: this method was absent in earlier JDKs so we cannot use @Override annotation //@Override public void park(int event, long nanos) throws IOException { try { copyToDelegate(); invokeMethod(SelChImpl.class, selChImplDelegate, "park", Integer.TYPE, event, Long.TYPE, nanos, Void.TYPE); } catch (Exception e) { throw ExceptionUtil.throwException(e); } finally { copyFromDelegate(); } } // Note: this method was absent in earlier JDKs so we cannot use @Override annotation //@Override public void park(int event) throws IOException { try { copyToDelegate(); invokeMethod(SelChImpl.class, selChImplDelegate, "park", Integer.TYPE, event, Void.TYPE); } catch (Exception e) { throw ExceptionUtil.throwException(e); } finally { copyFromDelegate(); } } }
Do not wrap channel in Windows Pipe implementation
sniffy-module-nio/src/main/java/io/sniffy/nio/SniffyServerSocketChannel.java
Do not wrap channel in Windows Pipe implementation
<ide><path>niffy-module-nio/src/main/java/io/sniffy/nio/SniffyServerSocketChannel.java <ide> package io.sniffy.nio; <ide> <ide> import io.sniffy.util.ExceptionUtil; <add>import io.sniffy.util.OSUtil; <ide> import io.sniffy.util.ReflectionCopier; <add>import io.sniffy.util.StackTraceExtractor; <ide> import org.codehaus.mojo.animal_sniffer.IgnoreJRERequirement; <ide> import sun.nio.ch.SelChImpl; <ide> import sun.nio.ch.SelectionKeyImpl; <ide> public SocketChannel accept() throws IOException { <ide> try { <ide> copyToDelegate(); <del> return new SniffySocketChannelAdapter(provider(), delegate.accept()); <add> return OSUtil.isWindows() && StackTraceExtractor.hasClassInStackTrace("sun.nio.ch.Pipe") ? <add> delegate.accept() : <add> new SniffySocketChannelAdapter(provider(), delegate.accept()); <ide> } finally { <ide> copyFromDelegate(); <ide> }
Java
apache-2.0
8ad1282b5c493717ab10143bb669827857949e5d
0
poxrucker/collaborative-learning-simulation
package allow.adaptation.test; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.swing.JFrame; import org.jxmapviewer.JXMapViewer; import org.jxmapviewer.OSMTileFactoryInfo; import org.jxmapviewer.painter.CompoundPainter; import org.jxmapviewer.painter.Painter; import org.jxmapviewer.viewer.DefaultTileFactory; import org.jxmapviewer.viewer.DefaultWaypoint; import org.jxmapviewer.viewer.GeoPosition; import org.jxmapviewer.viewer.TileFactoryInfo; import org.jxmapviewer.viewer.Waypoint; import org.jxmapviewer.viewer.WaypointPainter; import allow.simulator.adaptation.AdaptationManager; import allow.simulator.adaptation.Ensemble; import allow.simulator.adaptation.Group; import allow.simulator.adaptation.IEnsembleParticipant; import allow.simulator.adaptation.SelfishAdaptation; import allow.simulator.entity.Gender; import allow.simulator.entity.Person; import allow.simulator.entity.Profile; import allow.simulator.entity.PublicTransportation; import allow.simulator.mobility.data.TType; import allow.simulator.mobility.planner.BikeRentalPlanner; import allow.simulator.mobility.planner.Itinerary; import allow.simulator.mobility.planner.JourneyPlanner; import allow.simulator.mobility.planner.JourneyRequest; import allow.simulator.mobility.planner.OTPPlannerService; import allow.simulator.mobility.planner.RequestId; import allow.simulator.mobility.planner.TaxiPlanner; import allow.simulator.util.Coordinate; public class GroupingTest { public static void main(String[] args) { // Create JourneyPlanner instance - will be available through COntext // during simulation OTPPlannerService otpPlanner = new OTPPlannerService("localhost", 8010); List<OTPPlannerService> otpPlanners = new ArrayList<OTPPlannerService>( 1); otpPlanners.add(otpPlanner); TaxiPlanner taxiPlanner = new TaxiPlanner(otpPlanners, new Coordinate( 11.1198448, 46.0719489)); BikeRentalPlanner bikeRentalPlanner = new BikeRentalPlanner( otpPlanners, new Coordinate(11.1248895, 46.0711398)); JourneyPlanner planner = new JourneyPlanner(otpPlanners, taxiPlanner, bikeRentalPlanner, null); // EnsembleManager instance will be part of Context, too AdaptationManager ensembleManager = new AdaptationManager( new SelfishAdaptation(planner)); // Create entities PublicTransportation bus = new PublicTransportation(1, null, null, null, null, 25); bus.setPosition(new Coordinate(11.161968, 46.072994)); // P1: from povo to central station Person passenger1 = new Person(2, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.161968, 46.072994), true, true, true, null, null); passenger1.setCurrentItinerary(new Itinerary()); passenger1.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // P2: from povo to central station Person passenger2 = new Person(3, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.161968, 46.072994), true, true, true, null, null); passenger2.setCurrentItinerary(new Itinerary()); passenger2.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // from Mesiano to Central Station Person passenger3 = new Person(4, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger3.setCurrentItinerary(new Itinerary()); passenger3.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // from Mesiano to Central Station Person passenger4 = new Person(5, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger4.setCurrentItinerary(new Itinerary()); passenger4.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // from Mesiano to Central Station Person passenger5 = new Person(6, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger5.setCurrentItinerary(new Itinerary()); passenger5.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // from Mesiano to Central Station Person passenger6 = new Person(7, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger6.setCurrentItinerary(new Itinerary()); passenger6.getCurrentItinerary().to = new Coordinate(11.154240, 46.066221); // from Mesiano to Central Station Person passenger7 = new Person(8, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger7.setCurrentItinerary(new Itinerary()); passenger7.getCurrentItinerary().to = new Coordinate(11.154240, 46.066221); // from Mesiano to Central Station Person passenger8 = new Person(9, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger8.setCurrentItinerary(new Itinerary()); passenger8.getCurrentItinerary().to = new Coordinate(11.154240, 46.066221); // from Mesiano to Central Station Person passenger9 = new Person(10, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger9.setCurrentItinerary(new Itinerary()); passenger9.getCurrentItinerary().to = new Coordinate(11.154240, 46.066221); // Create Ensemble Ensemble ensemble = ensembleManager.createEnsemble(bus, "current-bus-trip-id-breakdown"); ensemble.addEntity(passenger1); ensemble.addEntity(passenger2); ensemble.addEntity(passenger3); ensemble.addEntity(passenger4); ensemble.addEntity(passenger5); ensemble.addEntity(passenger6); ensemble.addEntity(passenger7); ensemble.addEntity(passenger8); ensemble.addEntity(passenger9); ensemble.addEntity(bus); // FINAL MAP OF GROUPS Map<Object, Group> finalGroups = new HashMap<Object, Group>(); // Retrieve the bus (creator) of the ensemble PublicTransportation creator = (PublicTransportation) ensemble .getCreator(); Coordinate busPos = creator.getPosition(); // retrieve passengers already in the bus List<IEnsembleParticipant> personInBus = new ArrayList<IEnsembleParticipant>(); ensemble.getEntities() .forEach( (temp) -> { if (temp.getClass() == allow.simulator.entity.Person.class) { Person p = (allow.simulator.entity.Person) temp; Coordinate cPos = p.getPosition(); double distanceFromCreator = ensembleManager .distance(cPos.x, cPos.y, busPos.x, busPos.y, "K"); if (distanceFromCreator == 0) { personInBus.add(p); } } }); Group InBus = new Group(creator, personInBus); // retrieve entities not in the bus List<IEnsembleParticipant> notAssigned = new ArrayList<IEnsembleParticipant>(); ensemble.getEntities() .forEach( (temp) -> { if (!personInBus.contains(temp) && (temp.getClass() == allow.simulator.entity.Person.class)) { notAssigned.add(temp); } }); int index = 1; // add groups of person already in the bus at the final result of groups finalGroups.put(index, InBus); Map<Object, Group> Groups = ensembleManager.CreateGroups(creator, ensemble, finalGroups, notAssigned, index); System.out.println(" ######## FINAL GROUPS ######## " + finalGroups.size()); for (int i = 1; i <= finalGroups.size(); i++) { System.out.println("Group " + i + ": " + finalGroups.get(i).getParticipants().toString()); System.out.println("- Leader: " + finalGroups.get(i).getLeader().toString()); } // take a group as an example Group g = finalGroups.get(2); // PublicTransportation leader = // (allow.simulator.entity.PublicTransportation) g // .getLeader(); // Coordinate Person leader = (allow.simulator.entity.Person) g.getLeader(); Coordinate from = leader.getPosition(); List<Coordinate> startingPoints = new ArrayList<Coordinate>(); List<Coordinate> destinations = new ArrayList<Coordinate>(); for (int i = 0; i < g.getParticipants().size(); i++) { Person p = (allow.simulator.entity.Person) g.getParticipants().get( i); startingPoints.add(p.getPosition()); destinations.add(p.getCurrentItinerary().to); } RequestId reqId = new RequestId(); TType[] mean = new TType[1]; mean[0] = TType.SHARED_TAXI; boolean arriveBy = false; String str = "2016-08-25 12:30"; DateTimeFormatter formatter = DateTimeFormatter .ofPattern("yyyy-MM-dd HH:mm"); LocalDateTime dateTime = LocalDateTime.parse(str, formatter); // For each group derive the journey for each participant JourneyRequest r = JourneyRequest.createSharedRequest(from, startingPoints, destinations, dateTime, arriveBy, mean, reqId); // Planning Instantiation OTPPlannerService otp = new OTPPlannerService("localhost", 8010); List<OTPPlannerService> planners = new ArrayList<OTPPlannerService>(); planners.add(otp); // TaxiPlanner Instantiation Coordinate taxiRank = new Coordinate(11.1198448, 46.0719489); TaxiPlanner tp = new TaxiPlanner(planners, taxiRank); // request taxi journey List<Itinerary> resultItineraries = new ArrayList<Itinerary>(); tp.requestSingleJourney(r, resultItineraries); System.out .println("Number of Itineraries: " + resultItineraries.size()); System.out.println("Trip Type : " + resultItineraries.get(0).itineraryType); for (int i = 0; i < resultItineraries.size(); i++) { System.out.println("Itinerary : " + resultItineraries.get(i).toString()); } for (int i = 0; i < resultItineraries.get(0).subItineraries.size(); i++) { System.out .println("Sub Itinerary : " + resultItineraries.get(0).subItineraries.get(i) .toString()); } // print the result using a map ShowOnMapNew(resultItineraries); } private static void ShowOnMapNew(List<Itinerary> itineraries) { JXMapViewer mapViewer = new JXMapViewer(); // Create a TileFactoryInfo for OpenStreetMap TileFactoryInfo info = new OSMTileFactoryInfo(); DefaultTileFactory tileFactory = new DefaultTileFactory(info); tileFactory.setThreadPoolSize(8); mapViewer.setTileFactory(tileFactory); // Set the focus on Trento GeoPosition trento = new GeoPosition(46.0719489, 11.1198448); mapViewer.setZoom(7); mapViewer.setAddressLocation(trento); GeoPosition[] positions = new GeoPosition[0]; DefaultWaypoint[] waypoints = new DefaultWaypoint[0]; for (int i = 0; i < itineraries.get(0).subItineraries.size(); i++) { // System.out.println(i); // System.out.println("worker: " // + itineraries.get(0).subItineraries.get(i).reqId); GeoPosition position = new GeoPosition( itineraries.get(0).subItineraries.get(i).from.y, itineraries.get(0).subItineraries.get(i).from.x); // System.out.println("position: " + position); positions = addElement(positions, position); DefaultWaypoint point = new DefaultWaypoint(position); waypoints = addPoint(waypoints, point); } List<GeoPosition> track = Arrays.asList(positions); RoutePainter routePainter = new RoutePainter(track); // Create waypoints from the geo-positions Set<Waypoint> waypointsSet = new HashSet<Waypoint>( Arrays.asList(waypoints)); // Create a waypoint painter that takes all the waypoints WaypointPainter<Waypoint> waypointPainter = new WaypointPainter<Waypoint>(); waypointPainter.setWaypoints(waypointsSet); // Create a compound painter that uses both the route-painter and the // waypoint-painter List<Painter<JXMapViewer>> painters = new ArrayList<Painter<JXMapViewer>>(); painters.add(routePainter); painters.add(waypointPainter); CompoundPainter<JXMapViewer> painter = new CompoundPainter<JXMapViewer>( painters); mapViewer.setOverlayPainter(painter); // Display the viewer in a JFrame JFrame frame = new JFrame("Collective Adaptation"); frame.getContentPane().add(mapViewer); frame.setSize(800, 600); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); } private static GeoPosition[] addElement(GeoPosition[] positions, GeoPosition position) { GeoPosition[] result = Arrays.copyOf(positions, positions.length + 1); result[positions.length] = position; return result; } private static DefaultWaypoint[] addPoint(DefaultWaypoint[] points, DefaultWaypoint point) { DefaultWaypoint[] result = Arrays.copyOf(points, points.length + 1); result[points.length] = point; return result; } }
Simulator/src/allow/adaptation/test/GroupingTest.java
package allow.adaptation.test; import java.time.LocalDateTime; import java.time.format.DateTimeFormatter; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import javax.swing.JFrame; import org.jxmapviewer.JXMapViewer; import org.jxmapviewer.OSMTileFactoryInfo; import org.jxmapviewer.painter.CompoundPainter; import org.jxmapviewer.painter.Painter; import org.jxmapviewer.viewer.DefaultTileFactory; import org.jxmapviewer.viewer.DefaultWaypoint; import org.jxmapviewer.viewer.GeoPosition; import org.jxmapviewer.viewer.TileFactoryInfo; import org.jxmapviewer.viewer.Waypoint; import org.jxmapviewer.viewer.WaypointPainter; import allow.simulator.adaptation.AdaptationManager; import allow.simulator.adaptation.Ensemble; import allow.simulator.adaptation.Group; import allow.simulator.adaptation.IEnsembleParticipant; import allow.simulator.adaptation.SelfishAdaptation; import allow.simulator.entity.Gender; import allow.simulator.entity.Person; import allow.simulator.entity.Profile; import allow.simulator.entity.PublicTransportation; import allow.simulator.mobility.data.TType; import allow.simulator.mobility.planner.BikeRentalPlanner; import allow.simulator.mobility.planner.Itinerary; import allow.simulator.mobility.planner.JourneyPlanner; import allow.simulator.mobility.planner.JourneyRequest; import allow.simulator.mobility.planner.OTPPlannerService; import allow.simulator.mobility.planner.RequestId; import allow.simulator.mobility.planner.TaxiPlanner; import allow.simulator.util.Coordinate; public class GroupingTest { public static void main(String[] args) { // Create JourneyPlanner instance - will be available through COntext // during simulation OTPPlannerService otpPlanner = new OTPPlannerService("localhost", 8010); List<OTPPlannerService> otpPlanners = new ArrayList<OTPPlannerService>( 1); otpPlanners.add(otpPlanner); TaxiPlanner taxiPlanner = new TaxiPlanner(otpPlanners, new Coordinate( 11.1198448, 46.0719489)); BikeRentalPlanner bikeRentalPlanner = new BikeRentalPlanner( otpPlanners, new Coordinate(11.1248895, 46.0711398)); JourneyPlanner planner = new JourneyPlanner(otpPlanners, taxiPlanner, bikeRentalPlanner, null); // EnsembleManager instance will be part of Context, too AdaptationManager ensembleManager = new AdaptationManager( new SelfishAdaptation(planner)); // Create entities PublicTransportation bus = new PublicTransportation(1, null, null, null, null, 25); bus.setPosition(new Coordinate(11.161968, 46.072994)); // P1: from povo to central station Person passenger1 = new Person(2, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.161968, 46.072994), true, true, true, null, null); passenger1.setCurrentItinerary(new Itinerary()); passenger1.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // P2: from povo to central station Person passenger2 = new Person(3, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.161968, 46.072994), true, true, true, null, null); passenger2.setCurrentItinerary(new Itinerary()); passenger2.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // from Mesiano to Central Station Person passenger3 = new Person(4, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger3.setCurrentItinerary(new Itinerary()); passenger3.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // from Mesiano to Central Station Person passenger4 = new Person(5, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger4.setCurrentItinerary(new Itinerary()); passenger4.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // from Mesiano to Central Station Person passenger5 = new Person(6, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger5.setCurrentItinerary(new Itinerary()); passenger5.getCurrentItinerary().to = new Coordinate(11.120432, 46.072294); // from Mesiano to Central Station Person passenger6 = new Person(7, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger6.setCurrentItinerary(new Itinerary()); passenger6.getCurrentItinerary().to = new Coordinate(11.154240, 46.066221); // from Mesiano to Central Station Person passenger7 = new Person(8, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger7.setCurrentItinerary(new Itinerary()); passenger7.getCurrentItinerary().to = new Coordinate(11.154240, 46.066221); // from Mesiano to Central Station Person passenger8 = new Person(9, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger8.setCurrentItinerary(new Itinerary()); passenger8.getCurrentItinerary().to = new Coordinate(11.154240, 46.066221); // from Mesiano to Central Station Person passenger9 = new Person(10, Gender.MALE, Profile.WORKER, null, null, new Coordinate(11.142697, 46.065027), true, true, true, null, null); passenger9.setCurrentItinerary(new Itinerary()); passenger9.getCurrentItinerary().to = new Coordinate(11.154240, 46.066221); // Create Ensemble Ensemble ensemble = ensembleManager.createEnsemble(bus, "current-bus-trip-id-breakdown"); ensemble.addEntity(passenger1); ensemble.addEntity(passenger2); ensemble.addEntity(passenger3); ensemble.addEntity(passenger4); ensemble.addEntity(passenger5); ensemble.addEntity(passenger6); ensemble.addEntity(passenger7); ensemble.addEntity(passenger8); ensemble.addEntity(passenger9); ensemble.addEntity(bus); // FINAL MAP OF GROUPS Map<Object, Group> finalGroups = new HashMap<Object, Group>(); // Retrieve the bus (creator) of the ensemble PublicTransportation creator = (PublicTransportation) ensemble .getCreator(); Coordinate busPos = creator.getPosition(); // retrieve passengers already in the bus List<IEnsembleParticipant> personInBus = new ArrayList<IEnsembleParticipant>(); ensemble.getEntities() .forEach( (temp) -> { if (temp.getClass() == allow.simulator.entity.Person.class) { Person p = (allow.simulator.entity.Person) temp; Coordinate cPos = p.getPosition(); double distanceFromCreator = ensembleManager .distance(cPos.x, cPos.y, busPos.x, busPos.y, "K"); if (distanceFromCreator == 0) { personInBus.add(p); } } }); Group InBus = new Group(creator, personInBus); // retrieve entities not in the bus List<IEnsembleParticipant> notAssigned = new ArrayList<IEnsembleParticipant>(); ensemble.getEntities() .forEach( (temp) -> { if (!personInBus.contains(temp) && (temp.getClass() == allow.simulator.entity.Person.class)) { notAssigned.add(temp); } }); int index = 1; // add groups of person already in the bus at the final result of groups finalGroups.put(index, InBus); Map<Object, Group> Groups = ensembleManager.CreateGroups(creator, ensemble, finalGroups, notAssigned, index); System.out.println(" ######## FINAL GROUPS ######## " + finalGroups.size()); for (int i = 1; i <= finalGroups.size(); i++) { System.out.println("Group " + i + ": " + finalGroups.get(i).getParticipants().toString()); System.out.println("- Leader: " + finalGroups.get(i).getLeader().toString()); } // take a group as an example Group g = finalGroups.get(2); // PublicTransportation leader = // (allow.simulator.entity.PublicTransportation) g // .getLeader(); // Coordinate Person leader = (allow.simulator.entity.Person) g.getLeader(); Coordinate from = leader.getPosition(); List<Coordinate> startingPoints = new ArrayList<Coordinate>(); List<Coordinate> destinations = new ArrayList<Coordinate>(); for (int i = 0; i < g.getParticipants().size(); i++) { Person p = (allow.simulator.entity.Person) g.getParticipants().get( i); startingPoints.add(p.getPosition()); destinations.add(p.getCurrentItinerary().to); } RequestId reqId = new RequestId(); TType[] mean = new TType[1]; mean[0] = TType.SHARED_TAXI; boolean arriveBy = false; String str = "2016-08-25 12:30"; DateTimeFormatter formatter = DateTimeFormatter .ofPattern("yyyy-MM-dd HH:mm"); LocalDateTime dateTime = LocalDateTime.parse(str, formatter); // For each group derive the journey for each participant JourneyRequest r = JourneyRequest.createSharedRequest(from, startingPoints, destinations, dateTime, arriveBy, mean, reqId); // Planning Instantiation OTPPlannerService otp = new OTPPlannerService("localhost", 8010); List<OTPPlannerService> planners = new ArrayList<OTPPlannerService>(); planners.add(otp); // TaxiPlanner Instantiation Coordinate taxiRank = new Coordinate(11.1198448, 46.0719489); TaxiPlanner tp = new TaxiPlanner(planners, taxiRank); // request taxi journey List<Itinerary> resultItineraries = new ArrayList<Itinerary>(); tp.requestSingleJourney(r, resultItineraries); System.out .println("Number of Itineraries: " + resultItineraries.size()); System.out.println("Trip Type : " + resultItineraries.get(0).itineraryType); System.out.println("From : " + resultItineraries.get(0).from); System.out.println("To : " + resultItineraries.get(0).to); System.out.println("Walking Distance : " + resultItineraries.get(0).walkDistance); // print the result using a map ShowOnMapNew(resultItineraries); } private static void ShowOnMapNew(List<Itinerary> itineraries) { JXMapViewer mapViewer = new JXMapViewer(); // Create a TileFactoryInfo for OpenStreetMap TileFactoryInfo info = new OSMTileFactoryInfo(); DefaultTileFactory tileFactory = new DefaultTileFactory(info); tileFactory.setThreadPoolSize(8); mapViewer.setTileFactory(tileFactory); // Set the focus on Trento GeoPosition trento = new GeoPosition(46.0719489, 11.1198448); mapViewer.setZoom(7); mapViewer.setAddressLocation(trento); GeoPosition[] positions = new GeoPosition[0]; DefaultWaypoint[] waypoints = new DefaultWaypoint[0]; for (int i = 0; i < itineraries.get(0).subItineraries.size(); i++) { System.out.println(i); System.out.println("worker: " + itineraries.get(0).subItineraries.get(i).reqId); GeoPosition position = new GeoPosition( itineraries.get(0).subItineraries.get(i).from.y, itineraries.get(0).subItineraries.get(i).from.x); System.out.println("position: " + position); positions = addElement(positions, position); DefaultWaypoint point = new DefaultWaypoint(position); waypoints = addPoint(waypoints, point); } List<GeoPosition> track = Arrays.asList(positions); RoutePainter routePainter = new RoutePainter(track); // Create waypoints from the geo-positions Set<Waypoint> waypointsSet = new HashSet<Waypoint>( Arrays.asList(waypoints)); // Create a waypoint painter that takes all the waypoints WaypointPainter<Waypoint> waypointPainter = new WaypointPainter<Waypoint>(); waypointPainter.setWaypoints(waypointsSet); // Create a compound painter that uses both the route-painter and the // waypoint-painter List<Painter<JXMapViewer>> painters = new ArrayList<Painter<JXMapViewer>>(); painters.add(routePainter); painters.add(waypointPainter); CompoundPainter<JXMapViewer> painter = new CompoundPainter<JXMapViewer>( painters); mapViewer.setOverlayPainter(painter); // Display the viewer in a JFrame JFrame frame = new JFrame("Collective Adaptation"); frame.getContentPane().add(mapViewer); frame.setSize(800, 600); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); frame.setVisible(true); } private static GeoPosition[] addElement(GeoPosition[] positions, GeoPosition position) { GeoPosition[] result = Arrays.copyOf(positions, positions.length + 1); result[positions.length] = position; return result; } private static DefaultWaypoint[] addPoint(DefaultWaypoint[] points, DefaultWaypoint point) { DefaultWaypoint[] result = Arrays.copyOf(points, points.length + 1); result[points.length] = point; return result; } }
Map added
Simulator/src/allow/adaptation/test/GroupingTest.java
Map added
<ide><path>imulator/src/allow/adaptation/test/GroupingTest.java <ide> .println("Number of Itineraries: " + resultItineraries.size()); <ide> System.out.println("Trip Type : " <ide> + resultItineraries.get(0).itineraryType); <del> System.out.println("From : " + resultItineraries.get(0).from); <del> System.out.println("To : " + resultItineraries.get(0).to); <del> System.out.println("Walking Distance : " <del> + resultItineraries.get(0).walkDistance); <add> for (int i = 0; i < resultItineraries.size(); i++) { <add> System.out.println("Itinerary : " <add> + resultItineraries.get(i).toString()); <add> <add> } <add> for (int i = 0; i < resultItineraries.get(0).subItineraries.size(); i++) { <add> System.out <add> .println("Sub Itinerary : " <add> + resultItineraries.get(0).subItineraries.get(i) <add> .toString()); <add> } <ide> <ide> // print the result using a map <ide> ShowOnMapNew(resultItineraries); <ide> DefaultWaypoint[] waypoints = new DefaultWaypoint[0]; <ide> <ide> for (int i = 0; i < itineraries.get(0).subItineraries.size(); i++) { <del> System.out.println(i); <del> System.out.println("worker: " <del> + itineraries.get(0).subItineraries.get(i).reqId); <add> // System.out.println(i); <add> // System.out.println("worker: " <add> // + itineraries.get(0).subItineraries.get(i).reqId); <ide> GeoPosition position = new GeoPosition( <ide> itineraries.get(0).subItineraries.get(i).from.y, <ide> itineraries.get(0).subItineraries.get(i).from.x); <del> System.out.println("position: " + position); <add> // System.out.println("position: " + position); <ide> positions = addElement(positions, position); <ide> DefaultWaypoint point = new DefaultWaypoint(position); <ide> waypoints = addPoint(waypoints, point);
Java
apache-2.0
0ff6e508852042cc3c123a817ced4e1ce340eca7
0
OpenXIP/xip-host,OpenXIP/xip-host
/** * Copyright (c) 2008 Washington University in St. Louis. All Rights Reserved. */ package edu.wustl.xipHost.dicom; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Properties; import javax.swing.JFrame; import edu.wustl.xipHost.localFileSystem.HostFileChooser; /** * @author Jaroslaw Krych * */ public class PreloadDICOMtoDICOMDataSource { public static void main(String[] args) { HostFileChooser fileChooser = new HostFileChooser(true, new File("./dicom-dataset-demo")); JFrame frame = new JFrame(); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); fileChooser.showOpenDialog(frame); File[] files = fileChooser.getSelectedFiles(); if(files == null){ return; } PacsLocation loc = new PacsLocation("127.0.0.1", 3001, "WORKSTATION1", "XIPHost embedded database"); //PacsLocation loc = new PacsLocation("127.0.0.1", 3002, "WORKSTATION2", "XIPHost test database"); DicomManager dicomMgr = DicomManagerFactory.getInstance(); Properties workstation1Prop = new Properties(); try { workstation1Prop.load(new FileInputStream("./pixelmed-server-hsqldb/workstation1.properties")); //workstation1Prop.load(new FileInputStream("./src-tests/edu/wustl/xipHost/dicom/server/workstation2.properties")); workstation1Prop.setProperty("Application.SavedImagesFolderName", new File("./dicom-dataset-demo").getCanonicalPath()); //workstation1Prop.setProperty("Application.SavedImagesFolderName", new File("./test-content/WORKSTATION2").getCanonicalPath()); } catch (FileNotFoundException e1) { System.err.println(e1.getMessage()); System.exit(0); } catch (IOException e1) { System.err.println(e1.getMessage()); System.exit(0); } dicomMgr.runDicomStartupSequence("./pixelmed-server-hsqldb/server", workstation1Prop); //dicomMgr.runDicomStartupSequence("./src-tests/edu/wustl/xipHost/dicom/server/serverTest", workstation1Prop); dicomMgr.submit(files, loc); dicomMgr.runDicomShutDownSequence("jdbc:hsqldb:./pixelmed-server-hsqldb/hsqldb/data/ws1db", "sa", ""); //dicomMgr.runDicomShutDownSequence("jdbc:hsqldb:./src-tests/edu/wustl/xipHost/dicom/server/hsqldb/data/ws2db", "sa", ""); System.exit(0); } }
src/edu/wustl/xipHost/dicom/PreloadDICOMtoDICOMDataSource.java
/** * Copyright (c) 2008 Washington University in St. Louis. All Rights Reserved. */ package edu.wustl.xipHost.dicom; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.util.Properties; import javax.swing.JFrame; import edu.wustl.xipHost.localFileSystem.HostFileChooser; /** * @author Jaroslaw Krych * */ public class PreloadDICOMtoDICOMDataSource { public static void main(String[] args) { HostFileChooser fileChooser = new HostFileChooser(true, new File("./dicom-dataset-demo")); JFrame frame = new JFrame(); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); fileChooser.showOpenDialog(frame); File[] files = fileChooser.getSelectedFiles(); if(files == null){ return; } //PacsLocation loc = new PacsLocation("127.0.0.1", 3001, "WORKSTATION1", "XIPHost embedded database"); PacsLocation loc = new PacsLocation("127.0.0.1", 3002, "WORKSTATION2", "XIPHost test database"); DicomManager dicomMgr = DicomManagerFactory.getInstance(); Properties workstation1Prop = new Properties(); try { //workstation1Prop.load(new FileInputStream("./pixelmed-server-hsqldb/workstation1.properties")); workstation1Prop.load(new FileInputStream("./src-tests/edu/wustl/xipHost/dicom/server/workstation2.properties")); workstation1Prop.setProperty("Application.SavedImagesFolderName", new File("./test-content/WORKSTATION2").getCanonicalPath()); } catch (FileNotFoundException e1) { System.err.println(e1.getMessage()); System.exit(0); } catch (IOException e1) { System.err.println(e1.getMessage()); System.exit(0); } //dicomMgr.runDicomStartupSequence("./pixelmed-server-hsqldb/server", workstation1Prop); dicomMgr.runDicomStartupSequence("./src-tests/edu/wustl/xipHost/dicom/server/serverTest", workstation1Prop); dicomMgr.submit(files, loc); //dicomMgr.runDicomShutDownSequence("jdbc:hsqldb:./pixelmed-server-hsqldb/hsqldb/data/ws1db", "sa", ""); dicomMgr.runDicomShutDownSequence("jdbc:hsqldb:./src-tests/edu/wustl/xipHost/dicom/server/hsqldb/data/ws2db", "sa", ""); System.exit(0); } }
Changed server settings from JUnit test server to embedded server in PreloadDICOMtoDICOMDataSource. Preloading exceptions on Mac OS X were resolved. SVN-Revision: 1115
src/edu/wustl/xipHost/dicom/PreloadDICOMtoDICOMDataSource.java
Changed server settings from JUnit test server to embedded server in PreloadDICOMtoDICOMDataSource. Preloading exceptions on Mac OS X were resolved.
<ide><path>rc/edu/wustl/xipHost/dicom/PreloadDICOMtoDICOMDataSource.java <ide> if(files == null){ <ide> return; <ide> } <del> //PacsLocation loc = new PacsLocation("127.0.0.1", 3001, "WORKSTATION1", "XIPHost embedded database"); <del> PacsLocation loc = new PacsLocation("127.0.0.1", 3002, "WORKSTATION2", "XIPHost test database"); <add> PacsLocation loc = new PacsLocation("127.0.0.1", 3001, "WORKSTATION1", "XIPHost embedded database"); <add> //PacsLocation loc = new PacsLocation("127.0.0.1", 3002, "WORKSTATION2", "XIPHost test database"); <ide> DicomManager dicomMgr = DicomManagerFactory.getInstance(); <ide> Properties workstation1Prop = new Properties(); <ide> try { <del> //workstation1Prop.load(new FileInputStream("./pixelmed-server-hsqldb/workstation1.properties")); <del> workstation1Prop.load(new FileInputStream("./src-tests/edu/wustl/xipHost/dicom/server/workstation2.properties")); <del> workstation1Prop.setProperty("Application.SavedImagesFolderName", new File("./test-content/WORKSTATION2").getCanonicalPath()); <add> workstation1Prop.load(new FileInputStream("./pixelmed-server-hsqldb/workstation1.properties")); <add> //workstation1Prop.load(new FileInputStream("./src-tests/edu/wustl/xipHost/dicom/server/workstation2.properties")); <add> workstation1Prop.setProperty("Application.SavedImagesFolderName", new File("./dicom-dataset-demo").getCanonicalPath()); <add> //workstation1Prop.setProperty("Application.SavedImagesFolderName", new File("./test-content/WORKSTATION2").getCanonicalPath()); <ide> } catch (FileNotFoundException e1) { <ide> System.err.println(e1.getMessage()); <ide> System.exit(0); <ide> System.err.println(e1.getMessage()); <ide> System.exit(0); <ide> } <del> //dicomMgr.runDicomStartupSequence("./pixelmed-server-hsqldb/server", workstation1Prop); <del> dicomMgr.runDicomStartupSequence("./src-tests/edu/wustl/xipHost/dicom/server/serverTest", workstation1Prop); <add> dicomMgr.runDicomStartupSequence("./pixelmed-server-hsqldb/server", workstation1Prop); <add> //dicomMgr.runDicomStartupSequence("./src-tests/edu/wustl/xipHost/dicom/server/serverTest", workstation1Prop); <ide> dicomMgr.submit(files, loc); <del> //dicomMgr.runDicomShutDownSequence("jdbc:hsqldb:./pixelmed-server-hsqldb/hsqldb/data/ws1db", "sa", ""); <del> dicomMgr.runDicomShutDownSequence("jdbc:hsqldb:./src-tests/edu/wustl/xipHost/dicom/server/hsqldb/data/ws2db", "sa", ""); <add> dicomMgr.runDicomShutDownSequence("jdbc:hsqldb:./pixelmed-server-hsqldb/hsqldb/data/ws1db", "sa", ""); <add> //dicomMgr.runDicomShutDownSequence("jdbc:hsqldb:./src-tests/edu/wustl/xipHost/dicom/server/hsqldb/data/ws2db", "sa", ""); <ide> System.exit(0); <ide> } <ide> }
Java
agpl-3.0
9ddf8c997a152ccceb55b642462c9303686ee738
0
zeineb/scheduling,fviale/scheduling,sandrineBeauche/scheduling,yinan-liu/scheduling,ShatalovYaroslav/scheduling,youribonnaffe/scheduling,youribonnaffe/scheduling,marcocast/scheduling,zeineb/scheduling,yinan-liu/scheduling,mbenguig/scheduling,paraita/scheduling,mbenguig/scheduling,sandrineBeauche/scheduling,sandrineBeauche/scheduling,mbenguig/scheduling,jrochas/scheduling,sandrineBeauche/scheduling,laurianed/scheduling,tobwiens/scheduling,youribonnaffe/scheduling,laurianed/scheduling,yinan-liu/scheduling,yinan-liu/scheduling,sgRomaric/scheduling,lpellegr/scheduling,ShatalovYaroslav/scheduling,youribonnaffe/scheduling,sgRomaric/scheduling,fviale/scheduling,mbenguig/scheduling,fviale/scheduling,sgRomaric/scheduling,sgRomaric/scheduling,marcocast/scheduling,fviale/scheduling,tobwiens/scheduling,ShatalovYaroslav/scheduling,tobwiens/scheduling,laurianed/scheduling,sgRomaric/scheduling,yinan-liu/scheduling,jrochas/scheduling,tobwiens/scheduling,tobwiens/scheduling,mbenguig/scheduling,sandrineBeauche/scheduling,zeineb/scheduling,paraita/scheduling,lpellegr/scheduling,zeineb/scheduling,lpellegr/scheduling,youribonnaffe/scheduling,lpellegr/scheduling,mbenguig/scheduling,ow2-proactive/scheduling,ow2-proactive/scheduling,zeineb/scheduling,lpellegr/scheduling,ow2-proactive/scheduling,fviale/scheduling,lpellegr/scheduling,sandrineBeauche/scheduling,youribonnaffe/scheduling,laurianed/scheduling,tobwiens/scheduling,fviale/scheduling,yinan-liu/scheduling,paraita/scheduling,laurianed/scheduling,ShatalovYaroslav/scheduling,zeineb/scheduling,lpellegr/scheduling,zeineb/scheduling,jrochas/scheduling,jrochas/scheduling,tobwiens/scheduling,ow2-proactive/scheduling,sgRomaric/scheduling,yinan-liu/scheduling,paraita/scheduling,ow2-proactive/scheduling,jrochas/scheduling,ow2-proactive/scheduling,sandrineBeauche/scheduling,jrochas/scheduling,marcocast/scheduling,mbenguig/scheduling,ShatalovYaroslav/scheduling,youribonnaffe/scheduling,laurianed/scheduling,marcocast/scheduling,marcocast/scheduling,sgRomaric/scheduling,ShatalovYaroslav/scheduling,ShatalovYaroslav/scheduling,marcocast/scheduling,paraita/scheduling,paraita/scheduling,paraita/scheduling,marcocast/scheduling,ow2-proactive/scheduling,fviale/scheduling,jrochas/scheduling,laurianed/scheduling
/* * ################################################################ * * ProActive Parallel Suite(TM): The Java(TM) library for * Parallel, Distributed, Multi-Core Computing for * Enterprise Grids & Clouds * * Copyright (C) 1997-2011 INRIA/University of * Nice-Sophia Antipolis/ActiveEon * Contact: [email protected] or [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation; version 3 of * the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. * * Initial developer(s): The ProActive Team * http://proactive.inria.fr/team_members.htm * Contributor(s): ActiveEon Team - http://www.activeeon.com * * ################################################################ * $$ACTIVEEON_CONTRIBUTOR$$ */ package functionaltests.utils; import java.io.File; import java.io.IOException; import java.net.ServerSocket; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ExecutionException; import org.objectweb.proactive.core.ProActiveTimeoutException; import org.objectweb.proactive.core.config.CentralPAPropertyRepository; import org.objectweb.proactive.core.node.Node; import org.objectweb.proactive.core.node.NodeException; import org.objectweb.proactive.core.node.NodeFactory; import org.objectweb.proactive.core.node.StartNode; import org.objectweb.proactive.core.process.JVMProcess; import org.objectweb.proactive.core.process.JVMProcessImpl; import org.objectweb.proactive.extensions.pnp.PNPConfig; import org.ow2.proactive.resourcemanager.RMFactory; import org.ow2.proactive.resourcemanager.authentication.RMAuthentication; import org.ow2.proactive.resourcemanager.common.event.RMEventType; import org.ow2.proactive.resourcemanager.common.event.RMNodeEvent; import org.ow2.proactive.resourcemanager.core.properties.PAResourceManagerProperties; import org.ow2.proactive.resourcemanager.frontend.ResourceManager; import org.ow2.proactive.resourcemanager.nodesource.NodeSource; import org.ow2.proactive.resourcemanager.nodesource.infrastructure.LocalInfrastructure; import org.ow2.proactive.resourcemanager.nodesource.policy.StaticPolicy; import org.ow2.proactive.utils.FileToBytesConverter; import org.ow2.tests.ProActiveSetup; import functionaltests.monitor.RMMonitorEventReceiver; import functionaltests.monitor.RMMonitorsHandler; /** * * Static helpers for Resource Manager functional tests. * It provides waiters methods that check correct event dispatching. * * @author ProActive team * */ public class RMTHelper { /** * Timeout for local infrastructure */ public static final int DEFAULT_NODES_TIMEOUT = 120 * 1000; //120s /** * Number of nodes deployed with default deployment descriptor */ private static final int DEFAULT_NODES_NUMBER = 2; private final static ProActiveSetup setup = new ProActiveSetup(); private static TestRM rm = new TestRM(); static { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { try { rm.kill(); } catch (Exception e) { e.printStackTrace(); } } })); } private static RMTestUser connectedUser = new RMTestUser(TestUsers.TEST); private String currentTestConfiguration; public static void log(String s) { System.out.println("------------------------------ " + s); } /** * Creates a Local node source * @throws Exception */ public void createNodeSource() throws Exception { createNodeSource(this.getClass().getSimpleName()); } /** * Creates a Local node source with specified name * @throws Exception * @return expected number of nodes */ public int createNodeSource(String name) throws Exception { createNodeSource(name, RMTHelper.DEFAULT_NODES_NUMBER); return RMTHelper.DEFAULT_NODES_NUMBER; } public void createNodeSource(String name, int nodeNumber) throws Exception { createNodeSource(name, nodeNumber, getResourceManager(), getMonitorsHandler()); } /** * Creates a Local node source with specified name */ public static void createNodeSource(String name, int nodeNumber, ResourceManager rm, RMMonitorsHandler monitor) throws Exception { RMFactory.setOsJavaProperty(); System.out.println("Creating a node source " + name); //first emtpy im parameter is default rm url byte[] creds = FileToBytesConverter.convertFileToByteArray(new File(PAResourceManagerProperties .getAbsolutePath(PAResourceManagerProperties.RM_CREDS.getValueAsString()))); rm.createNodeSource(name, LocalInfrastructure.class.getName(), new Object[] { creds, nodeNumber, RMTHelper.DEFAULT_NODES_TIMEOUT, setup.getJvmParameters() + " " + CentralPAPropertyRepository.PA_COMMUNICATION_PROTOCOL.getCmdLine() + "pnp" }, StaticPolicy.class.getName(), null); rm.setNodeSourcePingFrequency(5000, name); waitForNodeSourceCreation(name, nodeNumber, monitor); } /** Wait for the node source to be created when the node source is empty */ public void waitForNodeSourceCreation(String name) { waitForNodeSourceCreation(name, 0); } public static void waitForNodeSourceCreation(String name, int nodeNumber, RMMonitorsHandler monitor) { waitForNodeSourceEvent(RMEventType.NODESOURCE_CREATED, name, monitor); for (int i = 0; i < nodeNumber; i++) { waitForAnyNodeEvent(RMEventType.NODE_ADDED, monitor); waitForAnyNodeEvent(RMEventType.NODE_REMOVED, monitor); waitForAnyNodeEvent(RMEventType.NODE_ADDED, monitor); waitForAnyNodeEvent(RMEventType.NODE_STATE_CHANGED, monitor); } } /** Wait for the node source to be created and the nodes to be connected */ public void waitForNodeSourceCreation(String name, int nodeNumber) { waitForNodeSourceEvent(RMEventType.NODESOURCE_CREATED, name); for (int i = 0; i < nodeNumber; i++) { waitForAnyNodeEvent(RMEventType.NODE_ADDED); waitForAnyNodeEvent(RMEventType.NODE_REMOVED); waitForAnyNodeEvent(RMEventType.NODE_ADDED); waitForAnyNodeEvent(RMEventType.NODE_STATE_CHANGED); } } /** * Create a ProActive Node in a new JVM on the local host * This method can be used to test adding nodes mechanism * with already deploy ProActive nodes. * @param nodeName node's name to create * @return created node URL * @throws IOException if the external JVM cannot be created * @throws NodeException if lookup of the new node fails. */ public static TestNode createNode(String nodeName) throws IOException, NodeException, InterruptedException { return createNode(nodeName, new HashMap<String, String>()); } public static TestNode createNode(String nodeName, Map<String, String> vmParameters) throws IOException, NodeException, InterruptedException { return createNode(nodeName, vmParameters, null); } public static TestNode createNode(String nodeName, int pnpPort) throws IOException, NodeException, InterruptedException { return createNode(nodeName, new HashMap<String, String>(), new ArrayList<String>(), pnpPort); } public List<TestNode> createNodes(final String nodeName, int number) throws IOException, NodeException, ExecutionException, InterruptedException { ArrayList<TestNode> nodes = new ArrayList<>(number); for (int i = 0; i < number; i++) { nodes.add(createNode(nodeName + i, findFreePort())); } return nodes; } public void createNodeSource(int nodesNumber) throws Exception { createNodeSource(nodesNumber, new ArrayList<String>()); } public void createNodeSource(int nodesNumber, List<String> vmOptions) throws Exception { createNodeSource(nodesNumber, vmOptions, getResourceManager(), getMonitorsHandler()); } public static void createNodeSource(int nodesNumber, List<String> vmOptions, ResourceManager resourceManager, RMMonitorsHandler monitor) throws Exception { Map<String, String> map = new HashMap<>(); map.put(CentralPAPropertyRepository.PA_HOME.getName(), CentralPAPropertyRepository.PA_HOME.getValue()); for (int i = 0; i < nodesNumber; i++) { String nodeName = "node-" + i; TestNode node = createNode(nodeName, map, vmOptions); resourceManager.addNode(node.getNode().getNodeInformation().getURL()); } waitForNodeSourceEvent(RMEventType.NODESOURCE_CREATED, NodeSource.DEFAULT, monitor); for (int i = 0; i < nodesNumber; i++) { waitForAnyNodeEvent(RMEventType.NODE_STATE_CHANGED, monitor); } } static int findFreePort() throws IOException { ServerSocket server = new ServerSocket(0); int port = server.getLocalPort(); server.close(); return port; } private static TestNode createNode(String nodeName, Map<String, String> vmParameters, List<String> vmOptions) throws IOException, NodeException, InterruptedException { return createNode(nodeName, vmParameters, vmOptions, 0); } /** * Create a ProActive Node in a new JVM on the local host * with specific java parameters. * This method can be used to test adding nodes mechanism * with already deploy ProActive nodes. * @param nodeName node's name to create * @param vmParameters an HashMap containing key and value String * of type :-Dkey=value * @return created node URL * @throws IOException if the external JVM cannot be created * @throws NodeException if lookup of the new node fails. */ private static TestNode createNode(String nodeName, Map<String, String> vmParameters, List<String> vmOptions, int pnpPort) throws IOException, NodeException, InterruptedException { if (pnpPort <= 0) { pnpPort = findFreePort(); } String nodeUrl = "pnp://localhost:" + pnpPort + "/" + nodeName; vmParameters.put(PNPConfig.PA_PNP_PORT.getName(), Integer.toString(pnpPort)); JVMProcessImpl nodeProcess = createJvmProcess(StartNode.class.getName(), Collections.singletonList(nodeName), vmParameters, vmOptions); return createNode(nodeName, nodeUrl, nodeProcess); } public static TestNode createNode(String nodeName, String expectedUrl, JVMProcess nodeProcess) throws IOException, NodeException, InterruptedException { Node newNode = null; final long NODE_START_TIMEOUT_IN_MS = 120000; long startTimeStamp = System.currentTimeMillis(); NodeException toThrow = null; while ((System.currentTimeMillis() - startTimeStamp) < NODE_START_TIMEOUT_IN_MS) { try { newNode = NodeFactory.getNode(expectedUrl); } catch (NodeException e) { toThrow = e; //nothing, wait another loop } if (newNode != null) { return new TestNode(nodeProcess, newNode); } else { Thread.sleep(100); } } throw toThrow == null ? new NodeException("unable to create the node " + nodeName) : toThrow; } public static JVMProcessImpl createJvmProcess(String className, List<String> parameters, Map<String, String> vmParameters, List<String> vmOptions) throws IOException { JVMProcessImpl nodeProcess = new JVMProcessImpl( new org.objectweb.proactive.core.process.AbstractExternalProcess.StandardOutputMessageLogger()); nodeProcess.setClassname(className); ArrayList<String> jvmParameters = new ArrayList<>(); if (vmParameters == null) { vmParameters = new HashMap<>(); } vmParameters.put(CentralPAPropertyRepository.PA_COMMUNICATION_PROTOCOL.getName(), "pnp"); if (!vmParameters.containsKey(CentralPAPropertyRepository.PA_HOME.getName())) { vmParameters.put(CentralPAPropertyRepository.PA_HOME.getName(), CentralPAPropertyRepository.PA_HOME.getValue()); } if (!vmParameters.containsKey(PAResourceManagerProperties.RM_HOME.getKey())) { vmParameters.put(PAResourceManagerProperties.RM_HOME.getKey(), PAResourceManagerProperties.RM_HOME.getValueAsString()); } for (Entry<String, String> entry : vmParameters.entrySet()) { if (!entry.getKey().equals("") && !entry.getValue().equals("")) { jvmParameters.add("-D" + entry.getKey() + "=" + entry.getValue()); } } if (vmOptions != null) { jvmParameters.addAll(vmOptions); } jvmParameters.addAll(setup.getJvmParametersAsList()); nodeProcess.setJvmOptions(jvmParameters); nodeProcess.setParameters(parameters); nodeProcess.startProcess(); return nodeProcess; } /** * Returns the list of alive Nodes * @return list of ProActive Nodes urls */ public Set<String> listAliveNodesUrls() throws Exception { return getResourceManager().listAliveNodeUrls(); } public void killRM() throws Exception { if (connectedUser != null) { connectedUser.disconnect(); } rm.kill(); } /** * Wait for an event regarding node sources: created, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param nodeSourceEvent awaited event. * @param nodeSourceName corresponding node source name for which an event is awaited. */ public void waitForNodeSourceEvent(RMEventType nodeSourceEvent, String nodeSourceName) { waitForNodeSourceEvent(nodeSourceEvent, nodeSourceName, getMonitorsHandler()); } public static void waitForNodeSourceEvent(RMEventType nodeSourceEvent, String nodeSourceName, RMMonitorsHandler monitorsHandler) { try { waitForNodeSourceEvent(nodeSourceEvent, nodeSourceName, 0, monitorsHandler); } catch (ProActiveTimeoutException e) { //unreachable block, 0 means infinite, no timeout //log something ? } } /** * Wait for an event regarding node sources: created, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param eventType awaited event. * @param nodeSourceName corresponding node source name for which an event is awaited. * @param timeout in milliseconds * @throws ProActiveTimeoutException if timeout is reached */ public void waitForNodeSourceEvent(RMEventType eventType, String nodeSourceName, long timeout) throws ProActiveTimeoutException { waitForNodeSourceEvent(eventType, nodeSourceName, timeout, getMonitorsHandler()); } public static void waitForNodeSourceEvent(RMEventType eventType, String nodeSourceName, long timeout, RMMonitorsHandler monitorsHandler) throws ProActiveTimeoutException { monitorsHandler.waitForNodesourceEvent(eventType, nodeSourceName, timeout); } /** * Wait for an event on a specific node : created, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param nodeEvent awaited event. * @param nodeUrl Url's of the node for which a new state is awaited. * @return RMNodeEvent object received by event receiver. */ public RMNodeEvent waitForNodeEvent(RMEventType nodeEvent, String nodeUrl) { try { return waitForNodeEvent(nodeEvent, nodeUrl, 0); } catch (ProActiveTimeoutException e) { //unreachable block, 0 means infinite, no timeout //log string ? return null; } } /** * Wait for an event on a specific node : created, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param eventType awaited event. * @param nodeUrl Url's of the node for which a new state is awaited * @param timeout in milliseconds * @return RMNodeEvent object received by event receiver. * @throws ProActiveTimeoutException if timeout is reached */ public RMNodeEvent waitForNodeEvent(RMEventType eventType, String nodeUrl, long timeout) throws ProActiveTimeoutException { return waitForNodeEvent(eventType, nodeUrl, timeout, getMonitorsHandler()); } public static RMNodeEvent waitForNodeEvent(RMEventType eventType, String nodeUrl, long timeout, RMMonitorsHandler monitorsHandler) throws ProActiveTimeoutException { return monitorsHandler.waitForNodeEvent(eventType, nodeUrl, timeout); } /** * Wait for an event on any node: added, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param eventType awaited event. * @return RMNodeEvent object received by event receiver. */ public RMNodeEvent waitForAnyNodeEvent(RMEventType eventType) { return waitForAnyNodeEvent(eventType, getMonitorsHandler()); } public static RMNodeEvent waitForAnyNodeEvent(RMEventType eventType, RMMonitorsHandler monitorsHandler) { try { return waitForAnyNodeEvent(eventType, 0, monitorsHandler); } catch (ProActiveTimeoutException e) { //unreachable block, 0 means infinite, no timeout //log sthing ? return null; } } /** * Kills the node with specified url * @param url of the node * @throws NodeException if node cannot be looked up */ public static void killNode(String url) throws NodeException { Node node = NodeFactory.getNode(url); try { node.getProActiveRuntime().killRT(false); } catch (Exception ignored) { } } /** * Wait for an event on any node: added, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param eventType awaited event. * @param timeout in milliseconds * @return RMNodeEvent object received by event receiver. * @throws ProActiveTimeoutException if timeout is reached */ public RMNodeEvent waitForAnyNodeEvent(RMEventType eventType, long timeout) throws ProActiveTimeoutException { return waitForAnyNodeEvent(eventType, timeout, getMonitorsHandler()); } public static RMNodeEvent waitForAnyNodeEvent(RMEventType eventType, long timeout, RMMonitorsHandler monitorsHandler) throws ProActiveTimeoutException { return monitorsHandler.waitForAnyNodeEvent(eventType, timeout); } public void startRM(String configurationFile) throws Exception { startRM(configurationFile, TestRM.PA_PNP_PORT); } /** * Start the RM using a forked JVM * * @param configurationFile the RM's configuration file to use (default is functionalTSchedulerProperties.ini) * null to use the default one. * @throws Exception if an error occurs. */ public String startRM(String configurationFile, int pnpPort, String... jvmArgs) throws Exception { if (!rm.isStartedWithSameConfiguration(configurationFile)) { log("Starting RM"); rm.start(configurationFile, pnpPort, jvmArgs); currentTestConfiguration = configurationFile; } return rm.getUrl(); } public ResourceManager getResourceManager() throws Exception { return getResourceManager(TestUsers.TEST); } /** * Idem than getResourceManager but allow to specify a propertyFile * @return the resource manager * @throws Exception */ public ResourceManager getResourceManager(TestUsers user) throws Exception { startRM(currentTestConfiguration, TestRM.PA_PNP_PORT); if (!connectedUser.is(user)) { // changing user on the fly if (connectedUser != null) { connectedUser.disconnect(); } connectedUser = new RMTestUser(user); connectedUser.connect(rm.getAuth()); } if (!connectedUser.isConnected()) { connectedUser.connect(rm.getAuth()); } return connectedUser.getResourceManager(); } public static String getLocalUrl() { return rm.getUrl(); } public RMMonitorsHandler getMonitorsHandler() { return connectedUser.getMonitorsHandler(); } public RMMonitorEventReceiver getEventReceiver() { return connectedUser.getEventReceiver(); } public RMAuthentication getRMAuth() throws Exception { startRM(currentTestConfiguration, TestRM.PA_PNP_PORT); return rm.getAuth(); } public void disconnect() throws Exception { connectedUser.disconnect(); } }
rm/rm-server/src/test/java/functionaltests/utils/RMTHelper.java
/* * ################################################################ * * ProActive Parallel Suite(TM): The Java(TM) library for * Parallel, Distributed, Multi-Core Computing for * Enterprise Grids & Clouds * * Copyright (C) 1997-2011 INRIA/University of * Nice-Sophia Antipolis/ActiveEon * Contact: [email protected] or [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation; version 3 of * the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. * * Initial developer(s): The ProActive Team * http://proactive.inria.fr/team_members.htm * Contributor(s): ActiveEon Team - http://www.activeeon.com * * ################################################################ * $$ACTIVEEON_CONTRIBUTOR$$ */ package functionaltests.utils; import java.io.File; import java.io.IOException; import java.net.ServerSocket; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.ExecutionException; import org.objectweb.proactive.core.ProActiveTimeoutException; import org.objectweb.proactive.core.config.CentralPAPropertyRepository; import org.objectweb.proactive.core.node.Node; import org.objectweb.proactive.core.node.NodeException; import org.objectweb.proactive.core.node.NodeFactory; import org.objectweb.proactive.core.node.StartNode; import org.objectweb.proactive.core.process.JVMProcess; import org.objectweb.proactive.core.process.JVMProcessImpl; import org.objectweb.proactive.extensions.pnp.PNPConfig; import org.ow2.proactive.resourcemanager.RMFactory; import org.ow2.proactive.resourcemanager.authentication.RMAuthentication; import org.ow2.proactive.resourcemanager.common.event.RMEventType; import org.ow2.proactive.resourcemanager.common.event.RMNodeEvent; import org.ow2.proactive.resourcemanager.core.properties.PAResourceManagerProperties; import org.ow2.proactive.resourcemanager.frontend.ResourceManager; import org.ow2.proactive.resourcemanager.nodesource.NodeSource; import org.ow2.proactive.resourcemanager.nodesource.infrastructure.LocalInfrastructure; import org.ow2.proactive.resourcemanager.nodesource.policy.StaticPolicy; import org.ow2.proactive.utils.FileToBytesConverter; import org.ow2.tests.ProActiveSetup; import functionaltests.monitor.RMMonitorEventReceiver; import functionaltests.monitor.RMMonitorsHandler; /** * * Static helpers for Resource Manager functional tests. * It provides waiters methods that check correct event dispatching. * * @author ProActive team * */ public class RMTHelper { /** * Timeout for local infrastructure */ public static final int DEFAULT_NODES_TIMEOUT = 60 * 1000; //60s /** * Number of nodes deployed with default deployment descriptor */ private static final int DEFAULT_NODES_NUMBER = 2; private final static ProActiveSetup setup = new ProActiveSetup(); private static TestRM rm = new TestRM(); static { Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() { @Override public void run() { try { rm.kill(); } catch (Exception e) { e.printStackTrace(); } } })); } private static RMTestUser connectedUser = new RMTestUser(TestUsers.TEST); private String currentTestConfiguration; public static void log(String s) { System.out.println("------------------------------ " + s); } /** * Creates a Local node source * @throws Exception */ public void createNodeSource() throws Exception { createNodeSource(this.getClass().getSimpleName()); } /** * Creates a Local node source with specified name * @throws Exception * @return expected number of nodes */ public int createNodeSource(String name) throws Exception { createNodeSource(name, RMTHelper.DEFAULT_NODES_NUMBER); return RMTHelper.DEFAULT_NODES_NUMBER; } public void createNodeSource(String name, int nodeNumber) throws Exception { createNodeSource(name, nodeNumber, getResourceManager(), getMonitorsHandler()); } /** * Creates a Local node source with specified name */ public static void createNodeSource(String name, int nodeNumber, ResourceManager rm, RMMonitorsHandler monitor) throws Exception { RMFactory.setOsJavaProperty(); System.out.println("Creating a node source " + name); //first emtpy im parameter is default rm url byte[] creds = FileToBytesConverter.convertFileToByteArray(new File(PAResourceManagerProperties .getAbsolutePath(PAResourceManagerProperties.RM_CREDS.getValueAsString()))); rm.createNodeSource(name, LocalInfrastructure.class.getName(), new Object[] { creds, nodeNumber, RMTHelper.DEFAULT_NODES_TIMEOUT, setup.getJvmParameters() + " " + CentralPAPropertyRepository.PA_COMMUNICATION_PROTOCOL.getCmdLine() + "pnp" }, StaticPolicy.class.getName(), null); rm.setNodeSourcePingFrequency(5000, name); waitForNodeSourceCreation(name, nodeNumber, monitor); } /** Wait for the node source to be created when the node source is empty */ public void waitForNodeSourceCreation(String name) { waitForNodeSourceCreation(name, 0); } public static void waitForNodeSourceCreation(String name, int nodeNumber, RMMonitorsHandler monitor) { waitForNodeSourceEvent(RMEventType.NODESOURCE_CREATED, name, monitor); for (int i = 0; i < nodeNumber; i++) { waitForAnyNodeEvent(RMEventType.NODE_ADDED, monitor); waitForAnyNodeEvent(RMEventType.NODE_REMOVED, monitor); waitForAnyNodeEvent(RMEventType.NODE_ADDED, monitor); waitForAnyNodeEvent(RMEventType.NODE_STATE_CHANGED, monitor); } } /** Wait for the node source to be created and the nodes to be connected */ public void waitForNodeSourceCreation(String name, int nodeNumber) { waitForNodeSourceEvent(RMEventType.NODESOURCE_CREATED, name); for (int i = 0; i < nodeNumber; i++) { waitForAnyNodeEvent(RMEventType.NODE_ADDED); waitForAnyNodeEvent(RMEventType.NODE_REMOVED); waitForAnyNodeEvent(RMEventType.NODE_ADDED); waitForAnyNodeEvent(RMEventType.NODE_STATE_CHANGED); } } /** * Create a ProActive Node in a new JVM on the local host * This method can be used to test adding nodes mechanism * with already deploy ProActive nodes. * @param nodeName node's name to create * @return created node URL * @throws IOException if the external JVM cannot be created * @throws NodeException if lookup of the new node fails. */ public static TestNode createNode(String nodeName) throws IOException, NodeException, InterruptedException { return createNode(nodeName, new HashMap<String, String>()); } public static TestNode createNode(String nodeName, Map<String, String> vmParameters) throws IOException, NodeException, InterruptedException { return createNode(nodeName, vmParameters, null); } public static TestNode createNode(String nodeName, int pnpPort) throws IOException, NodeException, InterruptedException { return createNode(nodeName, new HashMap<String, String>(), new ArrayList<String>(), pnpPort); } public List<TestNode> createNodes(final String nodeName, int number) throws IOException, NodeException, ExecutionException, InterruptedException { ArrayList<TestNode> nodes = new ArrayList<>(number); for (int i = 0; i < number; i++) { nodes.add(createNode(nodeName + i, findFreePort())); } return nodes; } public void createNodeSource(int nodesNumber) throws Exception { createNodeSource(nodesNumber, new ArrayList<String>()); } public void createNodeSource(int nodesNumber, List<String> vmOptions) throws Exception { createNodeSource(nodesNumber, vmOptions, getResourceManager(), getMonitorsHandler()); } public static void createNodeSource(int nodesNumber, List<String> vmOptions, ResourceManager resourceManager, RMMonitorsHandler monitor) throws Exception { Map<String, String> map = new HashMap<>(); map.put(CentralPAPropertyRepository.PA_HOME.getName(), CentralPAPropertyRepository.PA_HOME.getValue()); for (int i = 0; i < nodesNumber; i++) { String nodeName = "node-" + i; TestNode node = createNode(nodeName, map, vmOptions); resourceManager.addNode(node.getNode().getNodeInformation().getURL()); } waitForNodeSourceEvent(RMEventType.NODESOURCE_CREATED, NodeSource.DEFAULT, monitor); for (int i = 0; i < nodesNumber; i++) { waitForAnyNodeEvent(RMEventType.NODE_STATE_CHANGED, monitor); } } static int findFreePort() throws IOException { ServerSocket server = new ServerSocket(0); int port = server.getLocalPort(); server.close(); return port; } private static TestNode createNode(String nodeName, Map<String, String> vmParameters, List<String> vmOptions) throws IOException, NodeException, InterruptedException { return createNode(nodeName, vmParameters, vmOptions, 0); } /** * Create a ProActive Node in a new JVM on the local host * with specific java parameters. * This method can be used to test adding nodes mechanism * with already deploy ProActive nodes. * @param nodeName node's name to create * @param vmParameters an HashMap containing key and value String * of type :-Dkey=value * @return created node URL * @throws IOException if the external JVM cannot be created * @throws NodeException if lookup of the new node fails. */ private static TestNode createNode(String nodeName, Map<String, String> vmParameters, List<String> vmOptions, int pnpPort) throws IOException, NodeException, InterruptedException { if (pnpPort <= 0) { pnpPort = findFreePort(); } String nodeUrl = "pnp://localhost:" + pnpPort + "/" + nodeName; vmParameters.put(PNPConfig.PA_PNP_PORT.getName(), Integer.toString(pnpPort)); JVMProcessImpl nodeProcess = createJvmProcess(StartNode.class.getName(), Collections.singletonList(nodeName), vmParameters, vmOptions); return createNode(nodeName, nodeUrl, nodeProcess); } public static TestNode createNode(String nodeName, String expectedUrl, JVMProcess nodeProcess) throws IOException, NodeException, InterruptedException { Node newNode = null; final long NODE_START_TIMEOUT_IN_MS = 60000; long startTimeStamp = System.currentTimeMillis(); NodeException toThrow = null; while ((System.currentTimeMillis() - startTimeStamp) < NODE_START_TIMEOUT_IN_MS) { try { newNode = NodeFactory.getNode(expectedUrl); } catch (NodeException e) { toThrow = e; //nothing, wait another loop } if (newNode != null) { return new TestNode(nodeProcess, newNode); } else { Thread.sleep(100); } } throw toThrow == null ? new NodeException("unable to create the node " + nodeName) : toThrow; } public static JVMProcessImpl createJvmProcess(String className, List<String> parameters, Map<String, String> vmParameters, List<String> vmOptions) throws IOException { JVMProcessImpl nodeProcess = new JVMProcessImpl( new org.objectweb.proactive.core.process.AbstractExternalProcess.StandardOutputMessageLogger()); nodeProcess.setClassname(className); ArrayList<String> jvmParameters = new ArrayList<>(); if (vmParameters == null) { vmParameters = new HashMap<>(); } vmParameters.put(CentralPAPropertyRepository.PA_COMMUNICATION_PROTOCOL.getName(), "pnp"); if (!vmParameters.containsKey(CentralPAPropertyRepository.PA_HOME.getName())) { vmParameters.put(CentralPAPropertyRepository.PA_HOME.getName(), CentralPAPropertyRepository.PA_HOME.getValue()); } if (!vmParameters.containsKey(PAResourceManagerProperties.RM_HOME.getKey())) { vmParameters.put(PAResourceManagerProperties.RM_HOME.getKey(), PAResourceManagerProperties.RM_HOME.getValueAsString()); } for (Entry<String, String> entry : vmParameters.entrySet()) { if (!entry.getKey().equals("") && !entry.getValue().equals("")) { jvmParameters.add("-D" + entry.getKey() + "=" + entry.getValue()); } } if (vmOptions != null) { jvmParameters.addAll(vmOptions); } jvmParameters.addAll(setup.getJvmParametersAsList()); nodeProcess.setJvmOptions(jvmParameters); nodeProcess.setParameters(parameters); nodeProcess.startProcess(); return nodeProcess; } /** * Returns the list of alive Nodes * @return list of ProActive Nodes urls */ public Set<String> listAliveNodesUrls() throws Exception { return getResourceManager().listAliveNodeUrls(); } public void killRM() throws Exception { if (connectedUser != null) { connectedUser.disconnect(); } rm.kill(); } /** * Wait for an event regarding node sources: created, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param nodeSourceEvent awaited event. * @param nodeSourceName corresponding node source name for which an event is awaited. */ public void waitForNodeSourceEvent(RMEventType nodeSourceEvent, String nodeSourceName) { waitForNodeSourceEvent(nodeSourceEvent, nodeSourceName, getMonitorsHandler()); } public static void waitForNodeSourceEvent(RMEventType nodeSourceEvent, String nodeSourceName, RMMonitorsHandler monitorsHandler) { try { waitForNodeSourceEvent(nodeSourceEvent, nodeSourceName, 0, monitorsHandler); } catch (ProActiveTimeoutException e) { //unreachable block, 0 means infinite, no timeout //log something ? } } /** * Wait for an event regarding node sources: created, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param eventType awaited event. * @param nodeSourceName corresponding node source name for which an event is awaited. * @param timeout in milliseconds * @throws ProActiveTimeoutException if timeout is reached */ public void waitForNodeSourceEvent(RMEventType eventType, String nodeSourceName, long timeout) throws ProActiveTimeoutException { waitForNodeSourceEvent(eventType, nodeSourceName, timeout, getMonitorsHandler()); } public static void waitForNodeSourceEvent(RMEventType eventType, String nodeSourceName, long timeout, RMMonitorsHandler monitorsHandler) throws ProActiveTimeoutException { monitorsHandler.waitForNodesourceEvent(eventType, nodeSourceName, timeout); } /** * Wait for an event on a specific node : created, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param nodeEvent awaited event. * @param nodeUrl Url's of the node for which a new state is awaited. * @return RMNodeEvent object received by event receiver. */ public RMNodeEvent waitForNodeEvent(RMEventType nodeEvent, String nodeUrl) { try { return waitForNodeEvent(nodeEvent, nodeUrl, 0); } catch (ProActiveTimeoutException e) { //unreachable block, 0 means infinite, no timeout //log string ? return null; } } /** * Wait for an event on a specific node : created, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param eventType awaited event. * @param nodeUrl Url's of the node for which a new state is awaited * @param timeout in milliseconds * @return RMNodeEvent object received by event receiver. * @throws ProActiveTimeoutException if timeout is reached */ public RMNodeEvent waitForNodeEvent(RMEventType eventType, String nodeUrl, long timeout) throws ProActiveTimeoutException { return waitForNodeEvent(eventType, nodeUrl, timeout, getMonitorsHandler()); } public static RMNodeEvent waitForNodeEvent(RMEventType eventType, String nodeUrl, long timeout, RMMonitorsHandler monitorsHandler) throws ProActiveTimeoutException { return monitorsHandler.waitForNodeEvent(eventType, nodeUrl, timeout); } /** * Wait for an event on any node: added, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param eventType awaited event. * @return RMNodeEvent object received by event receiver. */ public RMNodeEvent waitForAnyNodeEvent(RMEventType eventType) { return waitForAnyNodeEvent(eventType, getMonitorsHandler()); } public static RMNodeEvent waitForAnyNodeEvent(RMEventType eventType, RMMonitorsHandler monitorsHandler) { try { return waitForAnyNodeEvent(eventType, 0, monitorsHandler); } catch (ProActiveTimeoutException e) { //unreachable block, 0 means infinite, no timeout //log sthing ? return null; } } /** * Kills the node with specified url * @param url of the node * @throws NodeException if node cannot be looked up */ public static void killNode(String url) throws NodeException { Node node = NodeFactory.getNode(url); try { node.getProActiveRuntime().killRT(false); } catch (Exception ignored) { } } /** * Wait for an event on any node: added, removed.... * If a corresponding event has been already thrown by RM, returns immediately, * otherwise wait for reception of the corresponding event. * @param eventType awaited event. * @param timeout in milliseconds * @return RMNodeEvent object received by event receiver. * @throws ProActiveTimeoutException if timeout is reached */ public RMNodeEvent waitForAnyNodeEvent(RMEventType eventType, long timeout) throws ProActiveTimeoutException { return waitForAnyNodeEvent(eventType, timeout, getMonitorsHandler()); } public static RMNodeEvent waitForAnyNodeEvent(RMEventType eventType, long timeout, RMMonitorsHandler monitorsHandler) throws ProActiveTimeoutException { return monitorsHandler.waitForAnyNodeEvent(eventType, timeout); } public void startRM(String configurationFile) throws Exception { startRM(configurationFile, TestRM.PA_PNP_PORT); } /** * Start the RM using a forked JVM * * @param configurationFile the RM's configuration file to use (default is functionalTSchedulerProperties.ini) * null to use the default one. * @throws Exception if an error occurs. */ public String startRM(String configurationFile, int pnpPort, String... jvmArgs) throws Exception { if (!rm.isStartedWithSameConfiguration(configurationFile)) { log("Starting RM"); rm.start(configurationFile, pnpPort, jvmArgs); currentTestConfiguration = configurationFile; } return rm.getUrl(); } public ResourceManager getResourceManager() throws Exception { return getResourceManager(TestUsers.TEST); } /** * Idem than getResourceManager but allow to specify a propertyFile * @return the resource manager * @throws Exception */ public ResourceManager getResourceManager(TestUsers user) throws Exception { startRM(currentTestConfiguration, TestRM.PA_PNP_PORT); if (!connectedUser.is(user)) { // changing user on the fly if (connectedUser != null) { connectedUser.disconnect(); } connectedUser = new RMTestUser(user); connectedUser.connect(rm.getAuth()); } if (!connectedUser.isConnected()) { connectedUser.connect(rm.getAuth()); } return connectedUser.getResourceManager(); } public static String getLocalUrl() { return rm.getUrl(); } public RMMonitorsHandler getMonitorsHandler() { return connectedUser.getMonitorsHandler(); } public RMMonitorEventReceiver getEventReceiver() { return connectedUser.getEventReceiver(); } public RMAuthentication getRMAuth() throws Exception { startRM(currentTestConfiguration, TestRM.PA_PNP_PORT); return rm.getAuth(); } public void disconnect() throws Exception { connectedUser.disconnect(); } }
Bigger timeouts for nodes deployment
rm/rm-server/src/test/java/functionaltests/utils/RMTHelper.java
Bigger timeouts for nodes deployment
<ide><path>m/rm-server/src/test/java/functionaltests/utils/RMTHelper.java <ide> /** <ide> * Timeout for local infrastructure <ide> */ <del> public static final int DEFAULT_NODES_TIMEOUT = 60 * 1000; //60s <add> public static final int DEFAULT_NODES_TIMEOUT = 120 * 1000; //120s <ide> <ide> /** <ide> * Number of nodes deployed with default deployment descriptor <ide> throws IOException, NodeException, InterruptedException { <ide> Node newNode = null; <ide> <del> final long NODE_START_TIMEOUT_IN_MS = 60000; <add> final long NODE_START_TIMEOUT_IN_MS = 120000; <ide> long startTimeStamp = System.currentTimeMillis(); <ide> <ide> NodeException toThrow = null;
Java
apache-2.0
3311dc6b1d7f1705016c8fe576bd47a5f21e3e22
0
torakiki/fx-progress-circle
/* * Copyright (c) 2014, Andrea Vacondio * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.pdfsam.ui; import javafx.animation.KeyFrame; import javafx.animation.KeyValue; import javafx.animation.Timeline; import javafx.scene.Node; import javafx.scene.control.Label; import javafx.scene.control.Skin; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.Region; import javafx.scene.layout.StackPane; import javafx.scene.shape.Circle; import javafx.scene.shape.Rectangle; import javafx.util.Duration; /** * Skin for the {@link FillProgressIndicator} that represents progress and a circle that fills * * @author Andrea Vacondio * */ public class FillProgressIndicatorSkin implements Skin<FillProgressIndicator> { private final FillProgressIndicator indicator; private final StackPane container = new StackPane(); private final Label percentLabel = new Label(); private final Rectangle cover = new Rectangle(); private final Circle borderCircle = new Circle(); private final Circle fillerCircle = new Circle(); private final Timeline transition = new Timeline(); public FillProgressIndicatorSkin(final FillProgressIndicator indicator) { this.indicator = indicator; initContainer(indicator); updateRadii(); initStyles(); AnchorPane coverPane = new AnchorPane(); cover.getStyleClass().add("fillindicator-filler-cover"); cover.widthProperty().bind(coverPane.widthProperty()); cover.setManaged(false); AnchorPane.setTopAnchor(cover, 0.0); AnchorPane.setLeftAnchor(cover, 0.0); AnchorPane.setRightAnchor(cover, 0.0); coverPane.getChildren().addAll(cover); this.indicator.indeterminateProperty().addListener((o, oldVal, newVal) -> { initIndeterminate(newVal); }); this.indicator.progressProperty().addListener((o, oldVal, newVal) -> { setProgressLabel(newVal.intValue()); this.cover.setHeight(coverPane.getHeight() * ((100 - newVal.intValue()) / 100d)); }); this.indicator.innerCircleRadiusProperty().addListener((e) -> { updateRadii(); }); coverPane.heightProperty().addListener((o, oldVal, newVal) -> { this.cover.setHeight(newVal.intValue() * ((100 - indicator.getProgress()) / 100d)); }); initLabel(indicator.getProgress()); indicator.visibleProperty().addListener((o, oldVal, newVal) -> { if (newVal && this.indicator.isIndeterminate()) { transition.play(); } else { transition.pause(); } }); this.container.getChildren().addAll(fillerCircle, coverPane, borderCircle, percentLabel); initTransition(); initIndeterminate(indicator.isIndeterminate()); } private void initContainer(final FillProgressIndicator indicator) { container.getStylesheets().addAll(indicator.getStylesheets()); container.getStyleClass().addAll("circleindicator-container"); container.setMaxHeight(Region.USE_PREF_SIZE); container.setMaxWidth(Region.USE_PREF_SIZE); } private void initTransition() { transition.setCycleCount(Timeline.INDEFINITE); transition.setAutoReverse(true); final KeyValue kv = new KeyValue(this.cover.heightProperty(), 0); transition.getKeyFrames().addAll(new KeyFrame(Duration.millis(1500), kv)); } private void initStyles() { fillerCircle.getStyleClass().add("fillindicator-filler-circle"); borderCircle.getStyleClass().add("fillindicator-border-circle"); } private void updateRadii() { fillerCircle.setRadius(this.indicator.getInnerCircleRadius() + 5); borderCircle.setRadius(this.indicator.getInnerCircleRadius()); } private void initLabel(int value) { setProgressLabel(value); percentLabel.getStyleClass().add("circleindicator-label"); } private void setProgressLabel(int value) { if (value >= 0) { percentLabel.setText(String.format("%d%%", value)); } } private void initIndeterminate(boolean newVal) { percentLabel.setVisible(!newVal); if (newVal && indicator.isVisible()) { transition.play(); } else { transition.stop(); } } @Override public FillProgressIndicator getSkinnable() { return indicator; } @Override public Node getNode() { return this.container; } @Override public void dispose() { transition.stop(); } }
src/main/java/org/pdfsam/ui/FillProgressIndicatorSkin.java
/* * Copyright (c) 2014, Andrea Vacondio * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.pdfsam.ui; import javafx.animation.KeyFrame; import javafx.animation.KeyValue; import javafx.animation.Timeline; import javafx.scene.Node; import javafx.scene.control.Label; import javafx.scene.control.Skin; import javafx.scene.layout.AnchorPane; import javafx.scene.layout.Region; import javafx.scene.layout.StackPane; import javafx.scene.shape.Circle; import javafx.scene.shape.Rectangle; import javafx.util.Duration; /** * Skin for the {@link FillProgressIndicator} that represents progress and a circle that fills * * @author Andrea Vacondio * */ public class FillProgressIndicatorSkin implements Skin<FillProgressIndicator> { private final FillProgressIndicator indicator; private final StackPane container = new StackPane(); private final Label percentLabel = new Label(); private final Rectangle cover = new Rectangle(); private final Circle borderCircle = new Circle(); private final Circle fillerCircle = new Circle(); private final Timeline transition = new Timeline(); public FillProgressIndicatorSkin(final FillProgressIndicator indicator) { this.indicator = indicator; initContainer(indicator); updateRadii(); initStyles(); AnchorPane coverPane = new AnchorPane(); cover.getStyleClass().add("fillindicator-filler-cover"); cover.widthProperty().bind(coverPane.widthProperty()); cover.setManaged(false); AnchorPane.setTopAnchor(cover, 0.0); AnchorPane.setLeftAnchor(cover, 0.0); AnchorPane.setRightAnchor(cover, 0.0); cover.setHeight(fillerCircle.getRadius() * 2); coverPane.getChildren().addAll(cover); this.indicator.indeterminateProperty().addListener((o, oldVal, newVal) -> { initIndeterminate(newVal); }); this.indicator.progressProperty().addListener((o, oldVal, newVal) -> { setProgressLabel(newVal.intValue()); this.cover.setHeight(coverPane.getHeight() * ((100 - newVal.intValue()) / 100d)); }); this.indicator.innerCircleRadiusProperty().addListener((e) -> { updateRadii(); }); initLabel(indicator.getProgress()); indicator.visibleProperty().addListener((o, oldVal, newVal) -> { if (newVal && this.indicator.isIndeterminate()) { transition.play(); } else { transition.pause(); } }); this.container.getChildren().addAll(fillerCircle, coverPane, borderCircle, percentLabel); initTransition(); initIndeterminate(indicator.isIndeterminate()); } private void initContainer(final FillProgressIndicator indicator) { container.getStylesheets().addAll(indicator.getStylesheets()); container.getStyleClass().addAll("circleindicator-container"); container.setMaxHeight(Region.USE_PREF_SIZE); container.setMaxWidth(Region.USE_PREF_SIZE); } private void initTransition() { transition.setCycleCount(Timeline.INDEFINITE); transition.setAutoReverse(true); final KeyValue kv = new KeyValue(this.cover.heightProperty(), 0); transition.getKeyFrames().addAll(new KeyFrame(Duration.millis(1500), kv)); } private void initStyles() { fillerCircle.getStyleClass().add("fillindicator-filler-circle"); borderCircle.getStyleClass().add("fillindicator-border-circle"); } private void updateRadii() { fillerCircle.setRadius(this.indicator.getInnerCircleRadius() + 5); borderCircle.setRadius(this.indicator.getInnerCircleRadius()); } private void initLabel(int value) { setProgressLabel(value); percentLabel.getStyleClass().add("circleindicator-label"); } private void setProgressLabel(int value) { if (value >= 0) { percentLabel.setText(String.format("%d%%", value)); } } private void initIndeterminate(boolean newVal) { percentLabel.setVisible(!newVal); if (newVal && indicator.isVisible()) { transition.play(); } else { transition.stop(); } } @Override public FillProgressIndicator getSkinnable() { return indicator; } @Override public Node getNode() { return this.container; } @Override public void dispose() { transition.stop(); } }
cover height was set to 0 when the skin was created, no matter what the progress value was
src/main/java/org/pdfsam/ui/FillProgressIndicatorSkin.java
cover height was set to 0 when the skin was created, no matter what the progress value was
<ide><path>rc/main/java/org/pdfsam/ui/FillProgressIndicatorSkin.java <ide> AnchorPane.setTopAnchor(cover, 0.0); <ide> AnchorPane.setLeftAnchor(cover, 0.0); <ide> AnchorPane.setRightAnchor(cover, 0.0); <del> cover.setHeight(fillerCircle.getRadius() * 2); <ide> coverPane.getChildren().addAll(cover); <ide> <ide> this.indicator.indeterminateProperty().addListener((o, oldVal, newVal) -> { <ide> this.indicator.innerCircleRadiusProperty().addListener((e) -> { <ide> updateRadii(); <ide> }); <add> coverPane.heightProperty().addListener((o, oldVal, newVal) -> { <add> this.cover.setHeight(newVal.intValue() * ((100 - indicator.getProgress()) / 100d)); <add> }); <ide> initLabel(indicator.getProgress()); <ide> indicator.visibleProperty().addListener((o, oldVal, newVal) -> { <ide> if (newVal && this.indicator.isIndeterminate()) { <ide> }); <ide> <ide> this.container.getChildren().addAll(fillerCircle, coverPane, borderCircle, percentLabel); <del> <ide> initTransition(); <ide> initIndeterminate(indicator.isIndeterminate()); <ide> }
Java
apache-2.0
a538b76f6f1bb486aff903dce5f1511de868c5bb
0
HonzaKral/elasticsearch,gfyoung/elasticsearch,nknize/elasticsearch,scorpionvicky/elasticsearch,gfyoung/elasticsearch,gingerwizard/elasticsearch,HonzaKral/elasticsearch,coding0011/elasticsearch,robin13/elasticsearch,robin13/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,scorpionvicky/elasticsearch,robin13/elasticsearch,HonzaKral/elasticsearch,gingerwizard/elasticsearch,gfyoung/elasticsearch,coding0011/elasticsearch,coding0011/elasticsearch,uschindler/elasticsearch,uschindler/elasticsearch,GlenRSmith/elasticsearch,gingerwizard/elasticsearch,uschindler/elasticsearch,gfyoung/elasticsearch,nknize/elasticsearch,GlenRSmith/elasticsearch,coding0011/elasticsearch,uschindler/elasticsearch,scorpionvicky/elasticsearch,robin13/elasticsearch,HonzaKral/elasticsearch,gingerwizard/elasticsearch,scorpionvicky/elasticsearch,scorpionvicky/elasticsearch,GlenRSmith/elasticsearch,GlenRSmith/elasticsearch,coding0011/elasticsearch,robin13/elasticsearch,uschindler/elasticsearch,nknize/elasticsearch,gingerwizard/elasticsearch,nknize/elasticsearch,GlenRSmith/elasticsearch,gfyoung/elasticsearch,gingerwizard/elasticsearch
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.index.translog.TestTranslog; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogTests; import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.ElasticsearchException; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.LongFunction; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.elasticsearch.common.lucene.Lucene.cleanLuceneIndex; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; /** * Simple unit-test IndexShard related operations. */ public class IndexShardTests extends IndexShardTestCase { public static ShardStateMetaData load(Logger logger, Path... shardPaths) throws IOException { return ShardStateMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, shardPaths); } public static void write(ShardStateMetaData shardStateMetaData, Path... shardPaths) throws IOException { ShardStateMetaData.FORMAT.write(shardStateMetaData, shardPaths); } public static Engine getEngineFromShard(IndexShard shard) { return shard.getEngineOrNull(); } public void testWriteShardState() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { ShardId id = new ShardId("foo", "fooUUID", 1); boolean primary = randomBoolean(); AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); ShardStateMetaData state1 = new ShardStateMetaData(primary, "fooUUID", allocationId); write(state1, env.availableShardPaths(id)); ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state1); ShardStateMetaData state2 = new ShardStateMetaData(primary, "fooUUID", allocationId); write(state2, env.availableShardPaths(id)); shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state1); ShardStateMetaData state3 = new ShardStateMetaData(primary, "fooUUID", allocationId); write(state3, env.availableShardPaths(id)); shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state3); assertEquals("fooUUID", state3.indexUUID); } } public void testPersistenceStateMetadataPersistence() throws Exception { IndexShard shard = newStartedShard(); final Path shardStatePath = shard.shardPath().getShardStatePath(); ShardStateMetaData shardStateMetaData = load(logger, shardStatePath); assertEquals(getShardStateMetadata(shard), shardStateMetaData); ShardRouting routing = shard.shardRouting; IndexShardTestCase.updateRoutingEntry(shard, routing); shardStateMetaData = load(logger, shardStatePath); assertEquals(shardStateMetaData, getShardStateMetadata(shard)); assertEquals(shardStateMetaData, new ShardStateMetaData(routing.primary(), shard.indexSettings().getUUID(), routing.allocationId())); routing = TestShardRouting.relocate(shard.shardRouting, "some node", 42L); IndexShardTestCase.updateRoutingEntry(shard, routing); shardStateMetaData = load(logger, shardStatePath); assertEquals(shardStateMetaData, getShardStateMetadata(shard)); assertEquals(shardStateMetaData, new ShardStateMetaData(routing.primary(), shard.indexSettings().getUUID(), routing.allocationId())); closeShards(shard); } public void testFailShard() throws Exception { allowShardFailures(); IndexShard shard = newStartedShard(); final ShardPath shardPath = shard.shardPath(); assertNotNull(shardPath); // fail shard shard.failShard("test shard fail", new CorruptIndexException("", "")); shard.close("do not assert history", false); shard.store().close(); // check state file still exists ShardStateMetaData shardStateMetaData = load(logger, shardPath.getShardStatePath()); assertEquals(shardStateMetaData, getShardStateMetadata(shard)); // but index can't be opened for a failed shard assertThat("store index should be corrupted", Store.canOpenIndex(logger, shardPath.resolveIndex(), shard.shardId(), (shardId, lockTimeoutMS) -> new DummyShardLock(shardId)), equalTo(false)); } ShardStateMetaData getShardStateMetadata(IndexShard shard) { ShardRouting shardRouting = shard.routingEntry(); if (shardRouting == null) { return null; } else { return new ShardStateMetaData(shardRouting.primary(), shard.indexSettings().getUUID(), shardRouting.allocationId()); } } private AllocationId randomAllocationId() { AllocationId allocationId = AllocationId.newInitializing(); if (randomBoolean()) { allocationId = AllocationId.newRelocation(allocationId); } return allocationId; } public void testShardStateMetaHashCodeEquals() { AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); ShardStateMetaData meta = new ShardStateMetaData(randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), allocationId); assertEquals(meta, new ShardStateMetaData(meta.primary, meta.indexUUID, meta.allocationId)); assertEquals(meta.hashCode(), new ShardStateMetaData(meta.primary, meta.indexUUID, meta.allocationId).hashCode()); assertFalse(meta.equals(new ShardStateMetaData(!meta.primary, meta.indexUUID, meta.allocationId))); assertFalse(meta.equals(new ShardStateMetaData(!meta.primary, meta.indexUUID + "foo", meta.allocationId))); assertFalse(meta.equals(new ShardStateMetaData(!meta.primary, meta.indexUUID + "foo", randomAllocationId()))); Set<Integer> hashCodes = new HashSet<>(); for (int i = 0; i < 30; i++) { // just a sanity check that we impl hashcode allocationId = randomBoolean() ? null : randomAllocationId(); meta = new ShardStateMetaData(randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), allocationId); hashCodes.add(meta.hashCode()); } assertTrue("more than one unique hashcode expected but got: " + hashCodes.size(), hashCodes.size() > 1); } public void testClosesPreventsNewOperations() throws InterruptedException, ExecutionException, IOException { IndexShard indexShard = newStartedShard(); closeShards(indexShard); assertThat(indexShard.getActiveOperationsCount(), equalTo(0)); try { indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.WRITE, ""); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected } try { indexShard.acquireReplicaOperationPermit(indexShard.getPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, null, ThreadPool.Names.WRITE, ""); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected } } public void testRejectOperationPermitWithHigherTermWhenNotStarted() throws IOException { IndexShard indexShard = newShard(false); expectThrows(IndexShardNotStartedException.class, () -> indexShard.acquireReplicaOperationPermit(indexShard.getPrimaryTerm() + randomIntBetween(1, 100), SequenceNumbers.UNASSIGNED_SEQ_NO, null, ThreadPool.Names.WRITE, "")); closeShards(indexShard); } public void testPrimaryPromotionDelaysOperations() throws IOException, BrokenBarrierException, InterruptedException { final IndexShard indexShard = newShard(false); recoveryEmptyReplica(indexShard, randomBoolean()); final int operations = scaledRandomIntBetween(1, 64); final CyclicBarrier barrier = new CyclicBarrier(1 + operations); final CountDownLatch latch = new CountDownLatch(operations); final CountDownLatch operationLatch = new CountDownLatch(1); final List<Thread> threads = new ArrayList<>(); for (int i = 0; i < operations; i++) { final String id = "t_" + i; final Thread thread = new Thread(() -> { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } indexShard.acquireReplicaOperationPermit( indexShard.getPrimaryTerm(), indexShard.getGlobalCheckpoint(), new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { latch.countDown(); try { operationLatch.await(); } catch (final InterruptedException e) { throw new RuntimeException(e); } releasable.close(); } @Override public void onFailure(Exception e) { throw new RuntimeException(e); } }, ThreadPool.Names.WRITE, id); }); thread.start(); threads.add(thread); } barrier.await(); latch.await(); final ShardRouting replicaRouting = indexShard.routingEntry(); promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); final int delayedOperations = scaledRandomIntBetween(1, 64); final CyclicBarrier delayedOperationsBarrier = new CyclicBarrier(1 + delayedOperations); final CountDownLatch delayedOperationsLatch = new CountDownLatch(delayedOperations); final AtomicLong counter = new AtomicLong(); final List<Thread> delayedThreads = new ArrayList<>(); for (int i = 0; i < delayedOperations; i++) { final String id = "d_" + i; final Thread thread = new Thread(() -> { try { delayedOperationsBarrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } indexShard.acquirePrimaryOperationPermit( new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { counter.incrementAndGet(); releasable.close(); delayedOperationsLatch.countDown(); } @Override public void onFailure(Exception e) { throw new RuntimeException(e); } }, ThreadPool.Names.WRITE, id); }); thread.start(); delayedThreads.add(thread); } delayedOperationsBarrier.await(); assertThat(counter.get(), equalTo(0L)); operationLatch.countDown(); for (final Thread thread : threads) { thread.join(); } delayedOperationsLatch.await(); assertThat(counter.get(), equalTo((long) delayedOperations)); for (final Thread thread : delayedThreads) { thread.join(); } closeShards(indexShard); } /** * This test makes sure that people can use the shard routing entry to check whether a shard was already promoted to * a primary. Concretely this means, that when we publish the routing entry via {@link IndexShard#routingEntry()} the following * should have happened * 1) Internal state (ala ReplicationTracker) have been updated * 2) Primary term is set to the new term */ public void testPublishingOrderOnPromotion() throws IOException, InterruptedException, BrokenBarrierException { final IndexShard indexShard = newShard(false); recoveryEmptyReplica(indexShard, randomBoolean()); final long promotedTerm = indexShard.getPrimaryTerm() + 1; final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicBoolean stop = new AtomicBoolean(); final Thread thread = new Thread(() -> { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } while(stop.get() == false) { if (indexShard.routingEntry().primary()) { assertThat(indexShard.getPrimaryTerm(), equalTo(promotedTerm)); assertThat(indexShard.getReplicationGroup(), notNullValue()); } } }); thread.start(); barrier.await(); final ShardRouting replicaRouting = indexShard.routingEntry(); promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); stop.set(true); thread.join(); closeShards(indexShard); } public void testPrimaryFillsSeqNoGapsOnPromotion() throws Exception { final IndexShard indexShard = newShard(false); recoveryEmptyReplica(indexShard, randomBoolean()); // most of the time this is large enough that most of the time there will be at least one gap final int operations = 1024 - scaledRandomIntBetween(0, 1024); final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final int maxSeqNo = result.maxSeqNo; final boolean gap = result.gap; // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); /* * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the * gaps are filled. */ final CountDownLatch latch = new CountDownLatch(1); indexShard.acquirePrimaryOperationPermit( new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { releasable.close(); latch.countDown(); } @Override public void onFailure(Exception e) { throw new AssertionError(e); } }, ThreadPool.Names.GENERIC, ""); latch.await(); assertThat(indexShard.getLocalCheckpoint(), equalTo((long) maxSeqNo)); closeShards(indexShard); } public void testPrimaryPromotionRollsGeneration() throws Exception { final IndexShard indexShard = newStartedShard(false); final long currentTranslogGeneration = getTranslog(indexShard).getGeneration().translogFileGeneration; // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); final long newPrimaryTerm = indexShard.getPrimaryTerm() + between(1, 10000); final ShardRouting primaryRouting = newShardRouting( replicaRouting.shardId(), replicaRouting.currentNodeId(), null, true, ShardRoutingState.STARTED, replicaRouting.allocationId()); indexShard.updateShardState(primaryRouting, newPrimaryTerm, (shard, listener) -> {}, 0L, Collections.singleton(primaryRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); /* * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the * translog generation has rolled. */ final CountDownLatch latch = new CountDownLatch(1); indexShard.acquirePrimaryOperationPermit( new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { releasable.close(); latch.countDown(); } @Override public void onFailure(Exception e) { throw new RuntimeException(e); } }, ThreadPool.Names.GENERIC, ""); latch.await(); assertThat(getTranslog(indexShard).getGeneration().translogFileGeneration, equalTo(currentTranslogGeneration + 1)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); closeShards(indexShard); } public void testOperationPermitsOnPrimaryShards() throws InterruptedException, ExecutionException, IOException { final ShardId shardId = new ShardId("test", "_na_", 0); final IndexShard indexShard; if (randomBoolean()) { // relocation target indexShard = newShard(newShardRouting(shardId, "local_node", "other node", true, ShardRoutingState.INITIALIZING, AllocationId.newRelocation(AllocationId.newInitializing()))); } else if (randomBoolean()) { // simulate promotion indexShard = newStartedShard(false); ShardRouting replicaRouting = indexShard.routingEntry(); ShardRouting primaryRouting = newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), null, true, ShardRoutingState.STARTED, replicaRouting.allocationId()); final long newPrimaryTerm = indexShard.getPrimaryTerm() + between(1, 1000); indexShard.updateShardState(primaryRouting, newPrimaryTerm, (shard, listener) -> { assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); }, 0L, Collections.singleton(indexShard.routingEntry().allocationId().getId()), new IndexShardRoutingTable.Builder(indexShard.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); } else { indexShard = newStartedShard(true); } final long primaryTerm = indexShard.getPrimaryTerm(); assertEquals(0, indexShard.getActiveOperationsCount()); if (indexShard.routingEntry().isRelocationTarget() == false) { try { indexShard.acquireReplicaOperationPermit(primaryTerm, indexShard.getGlobalCheckpoint(), null, ThreadPool.Names.WRITE, ""); fail("shard shouldn't accept operations as replica"); } catch (IllegalStateException ignored) { } } Releasable operation1 = acquirePrimaryOperationPermitBlockingly(indexShard); assertEquals(1, indexShard.getActiveOperationsCount()); Releasable operation2 = acquirePrimaryOperationPermitBlockingly(indexShard); assertEquals(2, indexShard.getActiveOperationsCount()); Releasables.close(operation1, operation2); assertEquals(0, indexShard.getActiveOperationsCount()); closeShards(indexShard); } private Releasable acquirePrimaryOperationPermitBlockingly(IndexShard indexShard) throws ExecutionException, InterruptedException { PlainActionFuture<Releasable> fut = new PlainActionFuture<>(); indexShard.acquirePrimaryOperationPermit(fut, ThreadPool.Names.WRITE, ""); return fut.get(); } private Releasable acquireReplicaOperationPermitBlockingly(IndexShard indexShard, long opPrimaryTerm) throws ExecutionException, InterruptedException { PlainActionFuture<Releasable> fut = new PlainActionFuture<>(); indexShard.acquireReplicaOperationPermit(opPrimaryTerm, indexShard.getGlobalCheckpoint(), fut, ThreadPool.Names.WRITE, ""); return fut.get(); } public void testOperationPermitOnReplicaShards() throws Exception { final ShardId shardId = new ShardId("test", "_na_", 0); final IndexShard indexShard; final boolean engineClosed; switch (randomInt(2)) { case 0: // started replica indexShard = newStartedShard(false); engineClosed = false; break; case 1: { // initializing replica / primary final boolean relocating = randomBoolean(); ShardRouting routing = newShardRouting(shardId, "local_node", relocating ? "sourceNode" : null, relocating ? randomBoolean() : false, ShardRoutingState.INITIALIZING, relocating ? AllocationId.newRelocation(AllocationId.newInitializing()) : AllocationId.newInitializing()); indexShard = newShard(routing); engineClosed = true; break; } case 2: { // relocation source indexShard = newStartedShard(true); ShardRouting routing = indexShard.routingEntry(); routing = newShardRouting(routing.shardId(), routing.currentNodeId(), "otherNode", true, ShardRoutingState.RELOCATING, AllocationId.newRelocation(routing.allocationId())); IndexShardTestCase.updateRoutingEntry(indexShard, routing); indexShard.relocated(primaryContext -> {}); engineClosed = false; break; } default: throw new UnsupportedOperationException("get your numbers straight"); } final ShardRouting shardRouting = indexShard.routingEntry(); logger.info("shard routing to {}", shardRouting); assertEquals(0, indexShard.getActiveOperationsCount()); if (shardRouting.primary() == false) { final IllegalStateException e = expectThrows(IllegalStateException.class, () -> indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.WRITE, "")); assertThat(e, hasToString(containsString("shard " + shardRouting + " is not a primary"))); } final long primaryTerm = indexShard.getPrimaryTerm(); final long translogGen = engineClosed ? -1 : getTranslog(indexShard).getGeneration().translogFileGeneration; final Releasable operation1; final Releasable operation2; if (engineClosed == false) { operation1 = acquireReplicaOperationPermitBlockingly(indexShard, primaryTerm); assertEquals(1, indexShard.getActiveOperationsCount()); operation2 = acquireReplicaOperationPermitBlockingly(indexShard, primaryTerm); assertEquals(2, indexShard.getActiveOperationsCount()); } else { operation1 = null; operation2 = null; } { final AtomicBoolean onResponse = new AtomicBoolean(); final AtomicBoolean onFailure = new AtomicBoolean(); final AtomicReference<Exception> onFailureException = new AtomicReference<>(); ActionListener<Releasable> onLockAcquired = new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { onResponse.set(true); } @Override public void onFailure(Exception e) { onFailure.set(true); onFailureException.set(e); } }; indexShard.acquireReplicaOperationPermit(primaryTerm - 1, SequenceNumbers.UNASSIGNED_SEQ_NO, onLockAcquired, ThreadPool.Names.WRITE, ""); assertFalse(onResponse.get()); assertTrue(onFailure.get()); assertThat(onFailureException.get(), instanceOf(IllegalStateException.class)); assertThat( onFailureException.get(), hasToString(containsString("operation primary term [" + (primaryTerm - 1) + "] is too old"))); } { final AtomicBoolean onResponse = new AtomicBoolean(); final AtomicReference<Exception> onFailure = new AtomicReference<>(); final CyclicBarrier barrier = new CyclicBarrier(2); final long newPrimaryTerm = primaryTerm + 1 + randomInt(20); if (engineClosed == false) { assertThat(indexShard.getLocalCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); assertThat(indexShard.getGlobalCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); } final long newGlobalCheckPoint; if (engineClosed || randomBoolean()) { newGlobalCheckPoint = SequenceNumbers.NO_OPS_PERFORMED; } else { long localCheckPoint = indexShard.getGlobalCheckpoint() + randomInt(100); // advance local checkpoint for (int i = 0; i <= localCheckPoint; i++) { indexShard.markSeqNoAsNoop(i, "dummy doc"); } newGlobalCheckPoint = randomIntBetween((int) indexShard.getGlobalCheckpoint(), (int) localCheckPoint); } final long expectedLocalCheckpoint; if (newGlobalCheckPoint == SequenceNumbers.UNASSIGNED_SEQ_NO) { expectedLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; } else { expectedLocalCheckpoint = newGlobalCheckPoint; } // but you can not increment with a new primary term until the operations on the older primary term complete final Thread thread = new Thread(() -> { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } ActionListener<Releasable> listener = new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { assertThat(indexShard.getPrimaryTerm(), equalTo(newPrimaryTerm)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); assertThat(indexShard.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint)); assertThat(indexShard.getGlobalCheckpoint(), equalTo(newGlobalCheckPoint)); onResponse.set(true); releasable.close(); finish(); } @Override public void onFailure(Exception e) { onFailure.set(e); finish(); } private void finish() { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } } }; try { indexShard.acquireReplicaOperationPermit( newPrimaryTerm, newGlobalCheckPoint, listener, ThreadPool.Names.SAME, ""); } catch (Exception e) { listener.onFailure(e); } }); thread.start(); barrier.await(); if (indexShard.state() == IndexShardState.CREATED || indexShard.state() == IndexShardState.RECOVERING) { barrier.await(); assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm)); assertFalse(onResponse.get()); assertThat(onFailure.get(), instanceOf(IndexShardNotStartedException.class)); Releasables.close(operation1); Releasables.close(operation2); } else { // our operation should be blocked until the previous operations complete assertFalse(onResponse.get()); assertNull(onFailure.get()); assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(primaryTerm)); Releasables.close(operation1); // our operation should still be blocked assertFalse(onResponse.get()); assertNull(onFailure.get()); assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(primaryTerm)); Releasables.close(operation2); barrier.await(); // now lock acquisition should have succeeded assertThat(indexShard.getPrimaryTerm(), equalTo(newPrimaryTerm)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); if (engineClosed) { assertFalse(onResponse.get()); assertThat(onFailure.get(), instanceOf(AlreadyClosedException.class)); } else { assertTrue(onResponse.get()); assertNull(onFailure.get()); assertThat(getTranslog(indexShard).getGeneration().translogFileGeneration, equalTo(translogGen + 1)); assertThat(indexShard.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint)); assertThat(indexShard.getGlobalCheckpoint(), equalTo(newGlobalCheckPoint)); } } thread.join(); assertEquals(0, indexShard.getActiveOperationsCount()); } closeShards(indexShard); } public void testGlobalCheckpointSync() throws IOException { // create the primary shard with a callback that sets a boolean when the global checkpoint sync is invoked final ShardId shardId = new ShardId("index", "_na_", 0); final ShardRouting shardRouting = TestShardRouting.newShardRouting( shardId, randomAlphaOfLength(8), true, ShardRoutingState.INITIALIZING, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE); final Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); final IndexMetaData.Builder indexMetadata = IndexMetaData.builder(shardRouting.getIndexName()).settings(settings).primaryTerm(0, 1); final AtomicBoolean synced = new AtomicBoolean(); final IndexShard primaryShard = newShard(shardRouting, indexMetadata.build(), null, new InternalEngineFactory(), () -> synced.set(true)); // add a replica recoverShardFromStore(primaryShard); final IndexShard replicaShard = newShard(shardId, false); recoverReplica(replicaShard, primaryShard, true); final int maxSeqNo = randomIntBetween(0, 128); for (int i = 0; i <= maxSeqNo; i++) { EngineTestCase.generateNewSeqNo(primaryShard.getEngine()); } final long checkpoint = rarely() ? maxSeqNo - scaledRandomIntBetween(0, maxSeqNo) : maxSeqNo; // set up local checkpoints on the shard copies primaryShard.updateLocalCheckpointForShard(shardRouting.allocationId().getId(), checkpoint); final int replicaLocalCheckpoint = randomIntBetween(0, Math.toIntExact(checkpoint)); final String replicaAllocationId = replicaShard.routingEntry().allocationId().getId(); primaryShard.updateLocalCheckpointForShard(replicaAllocationId, replicaLocalCheckpoint); // initialize the local knowledge on the primary of the global checkpoint on the replica shard final int replicaGlobalCheckpoint = randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Math.toIntExact(primaryShard.getGlobalCheckpoint())); primaryShard.updateGlobalCheckpointForShard(replicaAllocationId, replicaGlobalCheckpoint); // simulate a background maybe sync; it should only run if the knowledge on the replica of the global checkpoint lags the primary primaryShard.maybeSyncGlobalCheckpoint("test"); assertThat( synced.get(), equalTo(maxSeqNo == primaryShard.getGlobalCheckpoint() && (replicaGlobalCheckpoint < checkpoint))); // simulate that the background sync advanced the global checkpoint on the replica primaryShard.updateGlobalCheckpointForShard(replicaAllocationId, primaryShard.getGlobalCheckpoint()); // reset our boolean so that we can assert after another simulated maybe sync synced.set(false); primaryShard.maybeSyncGlobalCheckpoint("test"); // this time there should not be a sync since all the replica copies are caught up with the primary assertFalse(synced.get()); closeShards(replicaShard, primaryShard); } public void testRestoreLocalCheckpointTrackerFromTranslogOnPromotion() throws IOException, InterruptedException { final IndexShard indexShard = newStartedShard(false); final int operations = 1024 - scaledRandomIntBetween(0, 1024); indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); final long globalCheckpointOnReplica = SequenceNumbers.UNASSIGNED_SEQ_NO; randomIntBetween( Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), Math.toIntExact(indexShard.getLocalCheckpoint())); indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); final int globalCheckpoint = randomIntBetween( Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), Math.toIntExact(indexShard.getLocalCheckpoint())); final CountDownLatch latch = new CountDownLatch(1); indexShard.acquireReplicaOperationPermit( indexShard.getPrimaryTerm() + 1, globalCheckpoint, new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { releasable.close(); latch.countDown(); } @Override public void onFailure(Exception e) { } }, ThreadPool.Names.SAME, ""); latch.await(); final ShardRouting newRouting = indexShard.routingEntry().moveActiveReplicaToPrimary(); final CountDownLatch resyncLatch = new CountDownLatch(1); indexShard.updateShardState( newRouting, indexShard.getPrimaryTerm() + 1, (s, r) -> resyncLatch.countDown(), 1L, Collections.singleton(newRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(newRouting.shardId()).addShard(newRouting).build(), Collections.emptySet()); resyncLatch.await(); assertThat(indexShard.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(indexShard.seqNoStats().getMaxSeqNo(), equalTo(maxSeqNo)); closeShards(indexShard); } public void testThrowBackLocalCheckpointOnReplica() throws IOException, InterruptedException { final IndexShard indexShard = newStartedShard(false); // most of the time this is large enough that most of the time there will be at least one gap final int operations = 1024 - scaledRandomIntBetween(0, 1024); indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final long globalCheckpointOnReplica = randomIntBetween( Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), Math.toIntExact(indexShard.getLocalCheckpoint())); indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); final int globalCheckpoint = randomIntBetween( Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), Math.toIntExact(indexShard.getLocalCheckpoint())); final CountDownLatch latch = new CountDownLatch(1); indexShard.acquireReplicaOperationPermit( indexShard.primaryTerm + 1, globalCheckpoint, new ActionListener<Releasable>() { @Override public void onResponse(final Releasable releasable) { releasable.close(); latch.countDown(); } @Override public void onFailure(final Exception e) { } }, ThreadPool.Names.SAME, ""); latch.await(); if (globalCheckpointOnReplica == SequenceNumbers.UNASSIGNED_SEQ_NO && globalCheckpoint == SequenceNumbers.UNASSIGNED_SEQ_NO) { assertThat(indexShard.getLocalCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); } else { assertThat(indexShard.getLocalCheckpoint(), equalTo(Math.max(globalCheckpoint, globalCheckpointOnReplica))); } // ensure that after the local checkpoint throw back and indexing again, the local checkpoint advances final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(indexShard.getLocalCheckpoint())); assertThat(indexShard.getLocalCheckpoint(), equalTo((long) result.localCheckpoint)); closeShards(indexShard); } public void testConcurrentTermIncreaseOnReplicaShard() throws BrokenBarrierException, InterruptedException, IOException { final IndexShard indexShard = newStartedShard(false); final CyclicBarrier barrier = new CyclicBarrier(3); final CountDownLatch latch = new CountDownLatch(2); final long primaryTerm = indexShard.getPrimaryTerm(); final AtomicLong counter = new AtomicLong(); final AtomicReference<Exception> onFailure = new AtomicReference<>(); final LongFunction<Runnable> function = increment -> () -> { assert increment > 0; try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } indexShard.acquireReplicaOperationPermit( primaryTerm + increment, indexShard.getGlobalCheckpoint(), new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { counter.incrementAndGet(); assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm + increment)); latch.countDown(); releasable.close(); } @Override public void onFailure(Exception e) { onFailure.set(e); latch.countDown(); } }, ThreadPool.Names.WRITE, ""); }; final long firstIncrement = 1 + (randomBoolean() ? 0 : 1); final long secondIncrement = 1 + (randomBoolean() ? 0 : 1); final Thread first = new Thread(function.apply(firstIncrement)); final Thread second = new Thread(function.apply(secondIncrement)); first.start(); second.start(); // the two threads synchronize attempting to acquire an operation permit barrier.await(); // we wait for both operations to complete latch.await(); first.join(); second.join(); final Exception e; if ((e = onFailure.get()) != null) { /* * If one thread tried to set the primary term to a higher value than the other thread and the thread with the higher term won * the race, then the other thread lost the race and only one operation should have been executed. */ assertThat(e, instanceOf(IllegalStateException.class)); assertThat(e, hasToString(matches("operation primary term \\[\\d+\\] is too old"))); assertThat(counter.get(), equalTo(1L)); } else { assertThat(counter.get(), equalTo(2L)); } assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm + Math.max(firstIncrement, secondIncrement))); closeShards(indexShard); } /*** * test one can snapshot the store at various lifecycle stages */ public void testSnapshotStore() throws IOException { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); flushShard(shard); final IndexShard newShard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); Store.MetadataSnapshot snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); assertTrue(newShard.recoverFromStore()); snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); newShard.close("test", false); snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); closeShards(newShard); } public void testAsyncFsync() throws InterruptedException, IOException { IndexShard shard = newStartedShard(); Semaphore semaphore = new Semaphore(Integer.MAX_VALUE); Thread[] thread = new Thread[randomIntBetween(3, 5)]; CountDownLatch latch = new CountDownLatch(thread.length); for (int i = 0; i < thread.length; i++) { thread[i] = new Thread() { @Override public void run() { try { latch.countDown(); latch.await(); for (int i = 0; i < 10000; i++) { semaphore.acquire(); shard.sync(TranslogTests.randomTranslogLocation(), (ex) -> semaphore.release()); } } catch (Exception ex) { throw new RuntimeException(ex); } } }; thread[i].start(); } for (int i = 0; i < thread.length; i++) { thread[i].join(); } assertTrue(semaphore.tryAcquire(Integer.MAX_VALUE, 10, TimeUnit.SECONDS)); closeShards(shard); } public void testMinimumCompatVersion() throws IOException { Version versionCreated = VersionUtils.randomVersion(random()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, versionCreated.id) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .settings(settings) .primaryTerm(0, 1).build(); IndexShard test = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(test); indexDoc(test, "_doc", "test"); assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); indexDoc(test, "_doc", "test"); assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); test.getEngine().flush(); assertEquals(Version.CURRENT.luceneVersion, test.minimumCompatibleVersion()); closeShards(test); } public void testShardStats() throws IOException { IndexShard shard = newStartedShard(); ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), new CommonStats(new IndicesQueryCache(Settings.EMPTY), shard, new CommonStatsFlags()), shard.commitStats(), shard.seqNoStats()); assertEquals(shard.shardPath().getRootDataPath().toString(), stats.getDataPath()); assertEquals(shard.shardPath().getRootStatePath().toString(), stats.getStatePath()); assertEquals(shard.shardPath().isCustomDataPath(), stats.isCustomDataPath()); // try to serialize it to ensure values survive the serialization BytesStreamOutput out = new BytesStreamOutput(); stats.writeTo(out); StreamInput in = out.bytes().streamInput(); stats = ShardStats.readShardStats(in); XContentBuilder builder = jsonBuilder(); builder.startObject(); stats.toXContent(builder, EMPTY_PARAMS); builder.endObject(); String xContent = Strings.toString(builder); StringBuilder expectedSubSequence = new StringBuilder("\"shard_path\":{\"state_path\":\""); expectedSubSequence.append(shard.shardPath().getRootStatePath().toString()); expectedSubSequence.append("\",\"data_path\":\""); expectedSubSequence.append(shard.shardPath().getRootDataPath().toString()); expectedSubSequence.append("\",\"is_custom_data_path\":").append(shard.shardPath().isCustomDataPath()).append("}"); if (Constants.WINDOWS) { // Some path weirdness on windows } else { assertTrue(xContent.contains(expectedSubSequence)); } closeShards(shard); } public void testShardStatsWithFailures() throws IOException { allowShardFailures(); final ShardId shardId = new ShardId("index", "_na_", 0); final ShardRouting shardRouting = newShardRouting(shardId, "node", true, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, ShardRoutingState.INITIALIZING); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder(shardRouting.getIndexName()) .settings(settings) .primaryTerm(0, 1) .build(); // Override two Directory methods to make them fail at our will // We use AtomicReference here to inject failure in the middle of the test not immediately // We use Supplier<IOException> instead of IOException to produce meaningful stacktrace // (remember stack trace is filled when exception is instantiated) AtomicReference<Supplier<IOException>> exceptionToThrow = new AtomicReference<>(); AtomicBoolean throwWhenMarkingStoreCorrupted = new AtomicBoolean(false); Directory directory = new FilterDirectory(newFSDirectory(shardPath.resolveIndex())) { //fileLength method is called during storeStats try block //it's not called when store is marked as corrupted @Override public long fileLength(String name) throws IOException { Supplier<IOException> ex = exceptionToThrow.get(); if (ex == null) { return super.fileLength(name); } else { throw ex.get(); } } //listAll method is called when marking store as corrupted @Override public String[] listAll() throws IOException { Supplier<IOException> ex = exceptionToThrow.get(); if (throwWhenMarkingStoreCorrupted.get() && ex != null) { throw ex.get(); } else { return super.listAll(); } } }; try (Store store = createStore(shardId, new IndexSettings(metaData, Settings.EMPTY), directory)) { IndexShard shard = newShard(shardRouting, shardPath, metaData, store, null, new InternalEngineFactory(), () -> { }, EMPTY_EVENT_LISTENER); AtomicBoolean failureCallbackTriggered = new AtomicBoolean(false); shard.addShardFailureCallback((ig)->failureCallbackTriggered.set(true)); recoverShardFromStore(shard); final boolean corruptIndexException = randomBoolean(); if (corruptIndexException) { exceptionToThrow.set(() -> new CorruptIndexException("Test CorruptIndexException", "Test resource")); throwWhenMarkingStoreCorrupted.set(randomBoolean()); } else { exceptionToThrow.set(() -> new IOException("Test IOException")); } ElasticsearchException e = expectThrows(ElasticsearchException.class, shard::storeStats); assertTrue(failureCallbackTriggered.get()); if (corruptIndexException && !throwWhenMarkingStoreCorrupted.get()) { assertTrue(store.isMarkedCorrupted()); } } } public void testRefreshMetric() throws IOException { IndexShard shard = newStartedShard(); assertThat(shard.refreshStats().getTotal(), equalTo(2L)); // refresh on: finalize and end of recovery long initialTotalTime = shard.refreshStats().getTotalTimeInMillis(); // check time advances for (int i = 1; shard.refreshStats().getTotalTimeInMillis() == initialTotalTime; i++) { indexDoc(shard, "_doc", "test"); assertThat(shard.refreshStats().getTotal(), equalTo(2L + i - 1)); shard.refresh("test"); assertThat(shard.refreshStats().getTotal(), equalTo(2L + i)); assertThat(shard.refreshStats().getTotalTimeInMillis(), greaterThanOrEqualTo(initialTotalTime)); } long refreshCount = shard.refreshStats().getTotal(); indexDoc(shard, "_doc", "test"); try (Engine.GetResult ignored = shard.get(new Engine.Get(true, false, "test", "test", new Term(IdFieldMapper.NAME, Uid.encodeId("test"))))) { assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount+1)); } indexDoc(shard, "_doc", "test"); shard.writeIndexingBuffer(); assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount+2)); closeShards(shard); } public void testIndexingOperationsListeners() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); shard.updateLocalCheckpointForShard(shard.shardRouting.allocationId().getId(), 0); AtomicInteger preIndex = new AtomicInteger(); AtomicInteger postIndexCreate = new AtomicInteger(); AtomicInteger postIndexUpdate = new AtomicInteger(); AtomicInteger postIndexException = new AtomicInteger(); AtomicInteger preDelete = new AtomicInteger(); AtomicInteger postDelete = new AtomicInteger(); AtomicInteger postDeleteException = new AtomicInteger(); shard.close("simon says", true); shard = reinitShard(shard, new IndexingOperationListener() { @Override public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { preIndex.incrementAndGet(); return operation; } @Override public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult result) { switch (result.getResultType()) { case SUCCESS: if (result.isCreated()) { postIndexCreate.incrementAndGet(); } else { postIndexUpdate.incrementAndGet(); } break; case FAILURE: postIndex(shardId, index, result.getFailure()); break; default: fail("unexpected result type:" + result.getResultType()); } } @Override public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { postIndexException.incrementAndGet(); } @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { preDelete.incrementAndGet(); return delete; } @Override public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) { switch (result.getResultType()) { case SUCCESS: postDelete.incrementAndGet(); break; case FAILURE: postDelete(shardId, delete, result.getFailure()); break; default: fail("unexpected result type:" + result.getResultType()); } } @Override public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { postDeleteException.incrementAndGet(); } }); recoverShardFromStore(shard); indexDoc(shard, "_doc", "1"); assertEquals(1, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(0, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(0, preDelete.get()); assertEquals(0, postDelete.get()); assertEquals(0, postDeleteException.get()); indexDoc(shard, "_doc", "1"); assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(1, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(0, preDelete.get()); assertEquals(0, postDelete.get()); assertEquals(0, postDeleteException.get()); deleteDoc(shard, "_doc", "1"); assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(1, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(1, preDelete.get()); assertEquals(1, postDelete.get()); assertEquals(0, postDeleteException.get()); shard.close("Unexpected close", true); shard.state = IndexShardState.STARTED; // It will generate exception try { indexDoc(shard, "_doc", "1"); fail(); } catch (AlreadyClosedException e) { } assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(1, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(1, preDelete.get()); assertEquals(1, postDelete.get()); assertEquals(0, postDeleteException.get()); try { deleteDoc(shard, "_doc", "1"); fail(); } catch (AlreadyClosedException e) { } assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(1, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(1, preDelete.get()); assertEquals(1, postDelete.get()); assertEquals(0, postDeleteException.get()); closeShards(shard); } public void testLockingBeforeAndAfterRelocated() throws Exception { final IndexShard shard = newStartedShard(true); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); CountDownLatch latch = new CountDownLatch(1); Thread recoveryThread = new Thread(() -> { latch.countDown(); try { shard.relocated(primaryContext -> {}); } catch (InterruptedException e) { throw new RuntimeException(e); } }); try (Releasable ignored = acquirePrimaryOperationPermitBlockingly(shard)) { // start finalization of recovery recoveryThread.start(); latch.await(); // recovery can only be finalized after we release the current primaryOperationLock assertTrue(shard.isPrimaryMode()); } // recovery can be now finalized recoveryThread.join(); assertFalse(shard.isPrimaryMode()); try (Releasable ignored = acquirePrimaryOperationPermitBlockingly(shard)) { // lock can again be acquired assertFalse(shard.isPrimaryMode()); } closeShards(shard); } public void testDelayedOperationsBeforeAndAfterRelocated() throws Exception { final IndexShard shard = newStartedShard(true); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); Thread recoveryThread = new Thread(() -> { try { shard.relocated(primaryContext -> {}); } catch (InterruptedException e) { throw new RuntimeException(e); } }); recoveryThread.start(); List<PlainActionFuture<Releasable>> onLockAcquiredActions = new ArrayList<>(); for (int i = 0; i < 10; i++) { PlainActionFuture<Releasable> onLockAcquired = new PlainActionFuture<Releasable>() { @Override public void onResponse(Releasable releasable) { releasable.close(); super.onResponse(releasable); } }; shard.acquirePrimaryOperationPermit(onLockAcquired, ThreadPool.Names.WRITE, "i_" + i); onLockAcquiredActions.add(onLockAcquired); } for (PlainActionFuture<Releasable> onLockAcquired : onLockAcquiredActions) { assertNotNull(onLockAcquired.get(30, TimeUnit.SECONDS)); } recoveryThread.join(); closeShards(shard); } public void testStressRelocated() throws Exception { final IndexShard shard = newStartedShard(true); assertTrue(shard.isPrimaryMode()); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); final int numThreads = randomIntBetween(2, 4); Thread[] indexThreads = new Thread[numThreads]; CountDownLatch allPrimaryOperationLocksAcquired = new CountDownLatch(numThreads); CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); for (int i = 0; i < indexThreads.length; i++) { indexThreads[i] = new Thread() { @Override public void run() { try (Releasable operationLock = acquirePrimaryOperationPermitBlockingly(shard)) { allPrimaryOperationLocksAcquired.countDown(); barrier.await(); } catch (InterruptedException | BrokenBarrierException | ExecutionException e) { throw new RuntimeException(e); } } }; indexThreads[i].start(); } AtomicBoolean relocated = new AtomicBoolean(); final Thread recoveryThread = new Thread(() -> { try { shard.relocated(primaryContext -> {}); } catch (InterruptedException e) { throw new RuntimeException(e); } relocated.set(true); }); // ensure we wait for all primary operation locks to be acquired allPrimaryOperationLocksAcquired.await(); // start recovery thread recoveryThread.start(); assertThat(relocated.get(), equalTo(false)); assertThat(shard.getActiveOperationsCount(), greaterThan(0)); // ensure we only transition after pending operations completed assertTrue(shard.isPrimaryMode()); // complete pending operations barrier.await(); // complete recovery/relocation recoveryThread.join(); // ensure relocated successfully once pending operations are done assertThat(relocated.get(), equalTo(true)); assertFalse(shard.isPrimaryMode()); assertThat(shard.getActiveOperationsCount(), equalTo(0)); for (Thread indexThread : indexThreads) { indexThread.join(); } closeShards(shard); } public void testRelocatedShardCanNotBeRevived() throws IOException, InterruptedException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); shard.relocated(primaryContext -> {}); expectThrows(IllegalIndexShardStateException.class, () -> IndexShardTestCase.updateRoutingEntry(shard, originalRouting)); closeShards(shard); } public void testShardCanNotBeMarkedAsRelocatedIfRelocationCancelled() throws IOException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); IndexShardTestCase.updateRoutingEntry(shard, originalRouting); expectThrows(IllegalIndexShardStateException.class, () -> shard.relocated(primaryContext -> {})); closeShards(shard); } public void testRelocatedShardCanNotBeRevivedConcurrently() throws IOException, InterruptedException, BrokenBarrierException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); CyclicBarrier cyclicBarrier = new CyclicBarrier(3); AtomicReference<Exception> relocationException = new AtomicReference<>(); Thread relocationThread = new Thread(new AbstractRunnable() { @Override public void onFailure(Exception e) { relocationException.set(e); } @Override protected void doRun() throws Exception { cyclicBarrier.await(); shard.relocated(primaryContext -> {}); } }); relocationThread.start(); AtomicReference<Exception> cancellingException = new AtomicReference<>(); Thread cancellingThread = new Thread(new AbstractRunnable() { @Override public void onFailure(Exception e) { cancellingException.set(e); } @Override protected void doRun() throws Exception { cyclicBarrier.await(); IndexShardTestCase.updateRoutingEntry(shard, originalRouting); } }); cancellingThread.start(); cyclicBarrier.await(); relocationThread.join(); cancellingThread.join(); if (shard.isPrimaryMode() == false) { logger.debug("shard was relocated successfully"); assertThat(cancellingException.get(), instanceOf(IllegalIndexShardStateException.class)); assertThat("current routing:" + shard.routingEntry(), shard.routingEntry().relocating(), equalTo(true)); assertThat(relocationException.get(), nullValue()); } else { logger.debug("shard relocation was cancelled"); assertThat(relocationException.get(), instanceOf(IllegalIndexShardStateException.class)); assertThat("current routing:" + shard.routingEntry(), shard.routingEntry().relocating(), equalTo(false)); assertThat(cancellingException.get(), nullValue()); } closeShards(shard); } public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { /* * The flow of this test: * - delete #1 * - roll generation (to create gen 2) * - index #0 * - index #3 * - flush (commit point has max_seqno 3, and local checkpoint 1 -> points at gen 2, previous commit point is maintained) * - index #2 * - index #5 * - If flush and then recover from the existing store, delete #1 will be removed while index #0 is still retained and replayed. */ final IndexShard shard = newStartedShard(false); shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)); shard.applyIndexOperationOnReplica(3, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery. shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); shard.applyIndexOperationOnReplica(2, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)); shard.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; if (randomBoolean()) { // Advance the global checkpoint to remove the 1st commit; this shard will recover the 2nd commit. shard.updateGlobalCheckpointOnReplica(3, "test"); logger.info("--> flushing shard"); shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); translogOps = 4; // delete #1 won't be replayed. } else if (randomBoolean()) { shard.getEngine().rollTranslogGeneration(); translogOps = 5; } else { translogOps = 5; } final ShardRouting replicaRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING, RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertEquals(translogOps, newShard.recoveryState().getTranslog().recoveredOperations()); assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperations()); assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperationsOnStart()); assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f); updateRoutingEntry(newShard, ShardRoutingHelper.moveToStarted(newShard.routingEntry())); assertDocCount(newShard, 3); closeShards(newShard); } public void testRecoverFromStore() throws IOException { final IndexShard shard = newStartedShard(true); int totalOps = randomInt(10); int translogOps = totalOps; for (int i = 0; i < totalOps; i++) { indexDoc(shard, "_doc", Integer.toString(i)); } if (randomBoolean()) { shard.updateLocalCheckpointForShard(shard.shardRouting.allocationId().getId(), totalOps - 1); flushShard(shard); translogOps = 0; } IndexShard newShard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertEquals(translogOps, newShard.recoveryState().getTranslog().recoveredOperations()); assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperations()); assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperationsOnStart()); assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); // check that local checkpoint of new primary is properly tracked after recovery assertThat(newShard.getLocalCheckpoint(), equalTo(totalOps - 1L)); assertThat(newShard.getReplicationTracker().getTrackedLocalCheckpointForShard(newShard.routingEntry().allocationId().getId()) .getLocalCheckpoint(), equalTo(totalOps - 1L)); assertDocCount(newShard, totalOps); closeShards(newShard); } public void testPrimaryHandOffUpdatesLocalCheckpoint() throws IOException { final IndexShard primarySource = newStartedShard(true); int totalOps = randomInt(10); for (int i = 0; i < totalOps; i++) { indexDoc(primarySource, "_doc", Integer.toString(i)); } IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); final IndexShard primaryTarget = newShard(primarySource.routingEntry().getTargetRelocatingShard()); updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetaData()); recoverReplica(primaryTarget, primarySource, true); // check that local checkpoint of new primary is properly tracked after primary relocation assertThat(primaryTarget.getLocalCheckpoint(), equalTo(totalOps - 1L)); assertThat(primaryTarget.getReplicationTracker().getTrackedLocalCheckpointForShard( primaryTarget.routingEntry().allocationId().getId()).getLocalCheckpoint(), equalTo(totalOps - 1L)); assertDocCount(primaryTarget, totalOps); closeShards(primarySource, primaryTarget); } /* This test just verifies that we fill up local checkpoint up to max seen seqID on primary recovery */ public void testRecoverFromStoreWithNoOps() throws IOException { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); Engine.IndexResult test = indexDoc(shard, "_doc", "1"); // start a replica shard and index the second doc final IndexShard otherShard = newStartedShard(false); updateMappings(otherShard, shard.indexSettings().getIndexMetaData()); SourceToParse sourceToParse = SourceToParse.source(shard.shardId().getIndexName(), "_doc", "1", new BytesArray("{}"), XContentType.JSON); otherShard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); final ShardRouting primaryShardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(otherShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertEquals(1, newShard.recoveryState().getTranslog().recoveredOperations()); assertEquals(1, newShard.recoveryState().getTranslog().totalOperations()); assertEquals(1, newShard.recoveryState().getTranslog().totalOperationsOnStart()); assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f); try (Translog.Snapshot snapshot = getTranslog(newShard).newSnapshot()) { Translog.Operation operation; int numNoops = 0; while ((operation = snapshot.next()) != null) { if (operation.opType() == Translog.Operation.Type.NO_OP) { numNoops++; assertEquals(newShard.getPrimaryTerm(), operation.primaryTerm()); assertEquals(0, operation.seqNo()); } } assertEquals(1, numNoops); } IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); assertDocCount(newShard, 1); assertDocCount(shard, 2); for (int i = 0; i < 2; i++) { newShard = reinitShard(newShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); try (Translog.Snapshot snapshot = getTranslog(newShard).newSnapshot()) { assertThat(snapshot.totalOperations(), equalTo(2)); } } closeShards(newShard, shard); } public void testRecoverFromCleanStore() throws IOException { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); if (randomBoolean()) { flushShard(shard); } final ShardRouting shardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, ShardRoutingHelper.initWithSameId(shardRouting, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE) ); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertEquals(0, newShard.recoveryState().getTranslog().recoveredOperations()); assertEquals(0, newShard.recoveryState().getTranslog().totalOperations()); assertEquals(0, newShard.recoveryState().getTranslog().totalOperationsOnStart()); assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); assertDocCount(newShard, 0); closeShards(newShard); } public void testFailIfIndexNotPresentInRecoverFromStore() throws Exception { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); if (randomBoolean()) { flushShard(shard); } Store store = shard.store(); store.incRef(); closeShards(shard); cleanLuceneIndex(store.directory()); store.decRef(); IndexShard newShard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); ShardRouting routing = newShard.routingEntry(); newShard.markAsRecovering("store", new RecoveryState(routing, localNode, null)); try { newShard.recoverFromStore(); fail("index not there!"); } catch (IndexShardRecoveryException ex) { assertTrue(ex.getMessage().contains("failed to fetch index version after copying it over")); } routing = ShardRoutingHelper.moveToUnassigned(routing, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "because I say so")); routing = ShardRoutingHelper.initialize(routing, newShard.routingEntry().currentNodeId()); assertTrue("it's already recovering, we should ignore new ones", newShard.ignoreRecoveryAttempt()); try { newShard.markAsRecovering("store", new RecoveryState(routing, localNode, null)); fail("we are already recovering, can't mark again"); } catch (IllegalIndexShardStateException e) { // OK! } newShard = reinitShard(newShard, ShardRoutingHelper.initWithSameId(routing, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE)); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue("recover even if there is nothing to recover", newShard.recoverFromStore()); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); assertDocCount(newShard, 0); // we can't issue this request through a client because of the inconsistencies we created with the cluster state // doing it directly instead indexDoc(newShard, "_doc", "0"); newShard.refresh("test"); assertDocCount(newShard, 1); closeShards(newShard); } public void testRecoverFromStoreRemoveStaleOperations() throws Exception { final IndexShard shard = newStartedShard(false); final String indexName = shard.shardId().getIndexName(); // Index #0, index #1 shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here. shard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1")); // Simulate resync (without rollback): Noop #1, index #2 acquireReplicaOperationPermitBlockingly(shard, shard.primaryTerm + 1); shard.markSeqNoAsNoop(1, "test"); shard.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2")); // Recovering from store should discard doc #1 final ShardRouting replicaRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING, RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); newShard.primaryTerm++; DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertThat(getShardDocUIDs(newShard), containsInAnyOrder("doc-0", "doc-2")); closeShards(newShard); } public void testRecoveryFailsAfterMovingToRelocatedState() throws InterruptedException, IOException { final IndexShard shard = newStartedShard(true); ShardRouting origRouting = shard.routingEntry(); assertThat(shard.state(), equalTo(IndexShardState.STARTED)); ShardRouting inRecoveryRouting = ShardRoutingHelper.relocate(origRouting, "some_node"); IndexShardTestCase.updateRoutingEntry(shard, inRecoveryRouting); shard.relocated(primaryContext -> {}); assertFalse(shard.isPrimaryMode()); try { IndexShardTestCase.updateRoutingEntry(shard, origRouting); fail("Expected IndexShardRelocatedException"); } catch (IndexShardRelocatedException expected) { } closeShards(shard); } public void testRestoreShard() throws IOException { final IndexShard source = newStartedShard(true); IndexShard target = newStartedShard(true); indexDoc(source, "_doc", "0"); if (randomBoolean()) { source.refresh("test"); } indexDoc(target, "_doc", "1"); target.refresh("test"); assertDocs(target, "1"); flushShard(source); // only flush source ShardRouting routing = ShardRoutingHelper.initWithSameId(target.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE); final Snapshot snapshot = new Snapshot("foo", new SnapshotId("bar", UUIDs.randomBase64UUID())); routing = ShardRoutingHelper.newWithRestoreSource(routing, new RecoverySource.SnapshotRecoverySource(snapshot, Version.CURRENT, "test")); target = reinitShard(target, routing); Store sourceStore = source.store(); Store targetStore = target.store(); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); target.markAsRecovering("store", new RecoveryState(routing, localNode, null)); assertTrue(target.restoreFromRepository(new RestoreOnlyRepository("test") { @Override public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) { try { cleanLuceneIndex(targetStore.directory()); for (String file : sourceStore.directory().listAll()) { if (file.equals("write.lock") || file.startsWith("extra")) { continue; } targetStore.directory().copyFrom(sourceStore.directory(), file, file, IOContext.DEFAULT); } } catch (Exception ex) { throw new RuntimeException(ex); } } })); assertThat(target.getLocalCheckpoint(), equalTo(0L)); assertThat(target.seqNoStats().getMaxSeqNo(), equalTo(0L)); assertThat(target.getReplicationTracker().getGlobalCheckpoint(), equalTo(0L)); IndexShardTestCase.updateRoutingEntry(target, routing.moveToStarted()); assertThat(target.getReplicationTracker().getTrackedLocalCheckpointForShard( target.routingEntry().allocationId().getId()).getLocalCheckpoint(), equalTo(0L)); assertDocs(target, "0"); closeShards(source, target); } public void testSearcherWrapperIsUsed() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}"); shard.refresh("test"); try (Engine.GetResult getResult = shard .get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))))) { assertTrue(getResult.exists()); assertNotNull(getResult.searcher()); } try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); assertEquals(search.totalHits, 1); search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); assertEquals(search.totalHits, 1); } IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @Override public DirectoryReader wrap(DirectoryReader reader) throws IOException { return new FieldMaskingReader("foo", reader); } @Override public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { return searcher; } }; closeShards(shard); IndexShard newShard = newShard( ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), null, wrapper, new InternalEngineFactory(), () -> {}, EMPTY_EVENT_LISTENER); recoverShardFromStore(newShard); try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); assertEquals(search.totalHits, 0); search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); assertEquals(search.totalHits, 1); } try (Engine.GetResult getResult = newShard .get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))))) { assertTrue(getResult.exists()); assertNotNull(getResult.searcher()); // make sure get uses the wrapped reader assertTrue(getResult.searcher().reader() instanceof FieldMaskingReader); } closeShards(newShard); } public void testSearcherWrapperWorksWithGlobalOrdinals() throws IOException { IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @Override public DirectoryReader wrap(DirectoryReader reader) throws IOException { return new FieldMaskingReader("foo", reader); } @Override public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { return searcher; } }; Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\", \"fielddata\": true }}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard shard = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, wrapper); recoverShardFromStore(shard); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); shard.refresh("created segment 1"); indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}"); shard.refresh("created segment 2"); // test global ordinals are evicted MappedFieldType foo = shard.mapperService().fullName("foo"); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(shard.indexSettings.getNodeSettings(), new IndexFieldDataCache.Listener() {}); IndexFieldDataService indexFieldDataService = new IndexFieldDataService(shard.indexSettings, indicesFieldDataCache, new NoneCircuitBreakerService(), shard.mapperService()); IndexFieldData.Global ifd = indexFieldDataService.getForField(foo); FieldDataStats before = shard.fieldData().stats("foo"); assertThat(before.getMemorySizeInBytes(), equalTo(0L)); FieldDataStats after = null; try (Engine.Searcher searcher = shard.acquireSearcher("test")) { assertThat("we have to have more than one segment", searcher.getDirectoryReader().leaves().size(), greaterThan(1)); ifd.loadGlobal(searcher.getDirectoryReader()); after = shard.fieldData().stats("foo"); assertEquals(after.getEvictions(), before.getEvictions()); // If a field doesn't exist an empty IndexFieldData is returned and that isn't cached: assertThat(after.getMemorySizeInBytes(), equalTo(0L)); } assertEquals(shard.fieldData().stats("foo").getEvictions(), before.getEvictions()); assertEquals(shard.fieldData().stats("foo").getMemorySizeInBytes(), after.getMemorySizeInBytes()); shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); shard.refresh("test"); assertEquals(shard.fieldData().stats("foo").getMemorySizeInBytes(), before.getMemorySizeInBytes()); assertEquals(shard.fieldData().stats("foo").getEvictions(), before.getEvictions()); closeShards(shard); } public void testIndexingOperationListenersIsInvokedOnRecovery() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); deleteDoc(shard, "_doc", "0"); indexDoc(shard, "_doc", "1", "{\"foo\" : \"bar\"}"); shard.refresh("test"); final AtomicInteger preIndex = new AtomicInteger(); final AtomicInteger postIndex = new AtomicInteger(); final AtomicInteger preDelete = new AtomicInteger(); final AtomicInteger postDelete = new AtomicInteger(); IndexingOperationListener listener = new IndexingOperationListener() { @Override public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { preIndex.incrementAndGet(); return operation; } @Override public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult result) { postIndex.incrementAndGet(); } @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { preDelete.incrementAndGet(); return delete; } @Override public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) { postDelete.incrementAndGet(); } }; final IndexShard newShard = reinitShard(shard, listener); recoverShardFromStore(newShard); IndexingStats indexingStats = newShard.indexingStats(); // ensure we are not influencing the indexing stats assertEquals(0, indexingStats.getTotal().getDeleteCount()); assertEquals(0, indexingStats.getTotal().getDeleteCurrent()); assertEquals(0, indexingStats.getTotal().getIndexCount()); assertEquals(0, indexingStats.getTotal().getIndexCurrent()); assertEquals(0, indexingStats.getTotal().getIndexFailedCount()); assertEquals(2, preIndex.get()); assertEquals(2, postIndex.get()); assertEquals(1, preDelete.get()); assertEquals(1, postDelete.get()); closeShards(newShard); } public void testSearchIsReleaseIfWrapperFails() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); shard.refresh("test"); IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @Override public DirectoryReader wrap(DirectoryReader reader) throws IOException { throw new RuntimeException("boom"); } @Override public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { return searcher; } }; closeShards(shard); IndexShard newShard = newShard( ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), null, wrapper, new InternalEngineFactory(), () -> {}, EMPTY_EVENT_LISTENER); recoverShardFromStore(newShard); try { newShard.acquireSearcher("test"); fail("exception expected"); } catch (RuntimeException ex) { // } closeShards(newShard); } public void testTranslogRecoverySyncsTranslog() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); IndexShard replica = newShard(primary.shardId(), false, "n2", metaData, null); recoverReplica(replica, primary, (shard, discoveryNode) -> new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override public long indexTranslogOperations(List<Translog.Operation> operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); assertFalse(replica.isSyncNeeded()); return localCheckpoint; } }, true, true); closeShards(primary, replica); } public void testRecoverFromTranslog() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, randomLongBetween(1, Long.MAX_VALUE)).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); List<Translog.Operation> operations = new ArrayList<>(); int numTotalEntries = randomIntBetween(0, 10); int numCorruptEntries = 0; for (int i = 0; i < numTotalEntries; i++) { if (randomBoolean()) { operations.add(new Translog.Index("_doc", "1", 0, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar\"}".getBytes(Charset.forName("UTF-8")), null, -1)); } else { // corrupt entry operations.add(new Translog.Index("_doc", "2", 1, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar}".getBytes(Charset.forName("UTF-8")), null, -1)); numCorruptEntries++; } } Iterator<Translog.Operation> iterator = operations.iterator(); Translog.Snapshot snapshot = new Translog.Snapshot() { @Override public void close() { } @Override public int totalOperations() { return numTotalEntries; } @Override public Translog.Operation next() throws IOException { return iterator.hasNext() ? iterator.next() : null; } }; primary.markAsRecovering("store", new RecoveryState(primary.routingEntry(), getFakeDiscoNode(primary.routingEntry().currentNodeId()), null)); primary.recoverFromStore(); primary.state = IndexShardState.RECOVERING; // translog recovery on the next line would otherwise fail as we are in POST_RECOVERY primary.runTranslogRecovery(primary.getEngine(), snapshot); assertThat(primary.recoveryState().getTranslog().totalOperationsOnStart(), equalTo(numTotalEntries)); assertThat(primary.recoveryState().getTranslog().totalOperations(), equalTo(numTotalEntries)); assertThat(primary.recoveryState().getTranslog().recoveredOperations(), equalTo(numTotalEntries - numCorruptEntries)); closeShards(primary); } public void testShardActiveDuringInternalRecovery() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); shard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); shard.markAsRecovering("for testing", new RecoveryState(shard.routingEntry(), localNode, null)); // Shard is still inactive since we haven't started recovering yet assertFalse(shard.isActive()); shard.prepareForIndexRecovery(); // Shard is still inactive since we haven't started recovering yet assertFalse(shard.isActive()); shard.openEngineAndRecoverFromTranslog(); // Shard should now be active since we did recover: assertTrue(shard.isActive()); closeShards(shard); } public void testShardActiveDuringPeerRecovery() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); IndexShard replica = newShard(primary.shardId(), false, "n2", metaData, null); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); // Shard is still inactive since we haven't started recovering yet assertFalse(replica.isActive()); recoverReplica(replica, primary, (shard, discoveryNode) -> new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override public long indexTranslogOperations(List<Translog.Operation> operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); // Shard should now be active since we did recover: assertTrue(replica.isActive()); return localCheckpoint; } }, false, true); closeShards(primary, replica); } public void testRefreshListenersDuringPeerRecovery() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); Consumer<IndexShard> assertListenerCalled = shard -> { AtomicBoolean called = new AtomicBoolean(); shard.addRefreshListener(null, b -> { assertFalse(b); called.set(true); }); assertTrue(called.get()); }; IndexShard replica = newShard(primary.shardId(), false, "n2", metaData, null); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); assertListenerCalled.accept(replica); recoverReplica(replica, primary, (shard, discoveryNode) -> new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { // we're only checking that listeners are called when the engine is open, before there is no point @Override public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps) throws IOException { super.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps); assertListenerCalled.accept(replica); } @Override public long indexTranslogOperations(List<Translog.Operation> operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); assertListenerCalled.accept(replica); return localCheckpoint; } @Override public void finalizeRecovery(long globalCheckpoint) throws IOException { super.finalizeRecovery(globalCheckpoint); assertListenerCalled.accept(replica); } }, false, true); closeShards(primary, replica); } public void testRecoverFromLocalShard() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("source") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard sourceShard = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(sourceShard); indexDoc(sourceShard, "_doc", "0", "{\"foo\" : \"bar\"}"); indexDoc(sourceShard, "_doc", "1", "{\"foo\" : \"bar\"}"); sourceShard.refresh("test"); ShardRouting targetRouting = newShardRouting(new ShardId("index_1", "index_1", 0), "n1", true, ShardRoutingState.INITIALIZING, RecoverySource.LocalShardsRecoverySource.INSTANCE); final IndexShard targetShard; DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); Map<String, MappingMetaData> requestedMappingUpdates = ConcurrentCollections.newConcurrentMap(); { targetShard = newShard(targetRouting); targetShard.markAsRecovering("store", new RecoveryState(targetShard.routingEntry(), localNode, null)); BiConsumer<String, MappingMetaData> mappingConsumer = (type, mapping) -> { assertNull(requestedMappingUpdates.put(type, mapping)); }; final IndexShard differentIndex = newShard(new ShardId("index_2", "index_2", 0), true); recoverShardFromStore(differentIndex); expectThrows(IllegalArgumentException.class, () -> { targetShard.recoverFromLocalShards(mappingConsumer, Arrays.asList(sourceShard, differentIndex)); }); closeShards(differentIndex); assertTrue(targetShard.recoverFromLocalShards(mappingConsumer, Arrays.asList(sourceShard))); RecoveryState recoveryState = targetShard.recoveryState(); assertEquals(RecoveryState.Stage.DONE, recoveryState.getStage()); assertTrue(recoveryState.getIndex().fileDetails().size() > 0); for (RecoveryState.File file : recoveryState.getIndex().fileDetails()) { if (file.reused()) { assertEquals(file.recovered(), 0); } else { assertEquals(file.recovered(), file.length()); } } // check that local checkpoint of new primary is properly tracked after recovery assertThat(targetShard.getLocalCheckpoint(), equalTo(1L)); assertThat(targetShard.getReplicationTracker().getGlobalCheckpoint(), equalTo(1L)); IndexShardTestCase.updateRoutingEntry(targetShard, ShardRoutingHelper.moveToStarted(targetShard.routingEntry())); assertThat(targetShard.getReplicationTracker().getTrackedLocalCheckpointForShard( targetShard.routingEntry().allocationId().getId()).getLocalCheckpoint(), equalTo(1L)); assertDocCount(targetShard, 2); } // now check that it's persistent ie. that the added shards are committed { final IndexShard newShard = reinitShard(targetShard); recoverShardFromStore(newShard); assertDocCount(newShard, 2); closeShards(newShard); } assertThat(requestedMappingUpdates, hasKey("_doc")); assertThat(requestedMappingUpdates.get("_doc").get().source().string(), equalTo("{\"properties\":{\"foo\":{\"type\":\"text\"}}}")); closeShards(sourceShard, targetShard); } public void testDocStats() throws IOException { IndexShard indexShard = null; try { indexShard = newStartedShard( Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 0).build()); final long numDocs = randomIntBetween(2, 32); // at least two documents so we have docs to delete // Delete at least numDocs/10 documents otherwise the number of deleted docs will be below 10% // and forceMerge will refuse to expunge deletes final long numDocsToDelete = randomIntBetween((int) Math.ceil(Math.nextUp(numDocs / 10.0)), Math.toIntExact(numDocs)); for (int i = 0; i < numDocs; i++) { final String id = Integer.toString(i); indexDoc(indexShard, "_doc", id); } if (randomBoolean()) { indexShard.refresh("test"); } else { indexShard.flush(new FlushRequest()); } { final DocsStats docsStats = indexShard.docStats(); assertThat(docsStats.getCount(), equalTo(numDocs)); try (Engine.Searcher searcher = indexShard.acquireSearcher("test")) { assertTrue(searcher.reader().numDocs() <= docsStats.getCount()); } assertThat(docsStats.getDeleted(), equalTo(0L)); assertThat(docsStats.getAverageSizeInBytes(), greaterThan(0L)); } final List<Integer> ids = randomSubsetOf( Math.toIntExact(numDocsToDelete), IntStream.range(0, Math.toIntExact(numDocs)).boxed().collect(Collectors.toList())); for (final Integer i : ids) { final String id = Integer.toString(i); deleteDoc(indexShard, "_doc", id); indexDoc(indexShard, "_doc", id); } // Need to update and sync the global checkpoint as the soft-deletes retention MergePolicy depends on it. if (indexShard.indexSettings.isSoftDeleteEnabled()) { if (indexShard.routingEntry().primary()) { indexShard.updateGlobalCheckpointForShard(indexShard.routingEntry().allocationId().getId(), indexShard.getLocalCheckpoint()); } else { indexShard.updateGlobalCheckpointOnReplica(indexShard.getLocalCheckpoint(), "test"); } indexShard.sync(); } // flush the buffered deletes final FlushRequest flushRequest = new FlushRequest(); flushRequest.force(false); flushRequest.waitIfOngoing(false); indexShard.flush(flushRequest); if (randomBoolean()) { indexShard.refresh("test"); } { final DocsStats docStats = indexShard.docStats(); try (Engine.Searcher searcher = indexShard.acquireSearcher("test")) { assertTrue(searcher.reader().numDocs() <= docStats.getCount()); } assertThat(docStats.getCount(), equalTo(numDocs)); // Lucene will delete a segment if all docs are deleted from it; // this means that we lose the deletes when deleting all docs. // If soft-delete is enabled, each delete op will add a deletion marker. final long deleteTombstones = indexShard.indexSettings.isSoftDeleteEnabled() ? numDocsToDelete : 0L; if (numDocsToDelete == numDocs) { assertThat(docStats.getDeleted(), equalTo(deleteTombstones)); } else { assertThat(docStats.getDeleted(), equalTo(numDocsToDelete + deleteTombstones)); } } // merge them away final ForceMergeRequest forceMergeRequest = new ForceMergeRequest(); forceMergeRequest.onlyExpungeDeletes(randomBoolean()); forceMergeRequest.maxNumSegments(1); indexShard.forceMerge(forceMergeRequest); if (randomBoolean()) { indexShard.refresh("test"); } else { indexShard.flush(new FlushRequest()); } { final DocsStats docStats = indexShard.docStats(); assertThat(docStats.getCount(), equalTo(numDocs)); assertThat(docStats.getDeleted(), equalTo(0L)); assertThat(docStats.getAverageSizeInBytes(), greaterThan(0L)); } } finally { closeShards(indexShard); } } public void testEstimateTotalDocSize() throws Exception { IndexShard indexShard = null; try { indexShard = newStartedShard(true); int numDoc = randomIntBetween(100, 200); for (int i = 0; i < numDoc; i++) { String doc = Strings.toString(XContentFactory.jsonBuilder() .startObject() .field("count", randomInt()) .field("point", randomFloat()) .field("description", randomUnicodeOfCodepointLength(100)) .endObject()); indexDoc(indexShard, "_doc", Integer.toString(i), doc); } assertThat("Without flushing, segment sizes should be zero", indexShard.docStats().getTotalSizeInBytes(), equalTo(0L)); if (randomBoolean()) { indexShard.flush(new FlushRequest()); } else { indexShard.refresh("test"); } { final DocsStats docsStats = indexShard.docStats(); final StoreStats storeStats = indexShard.storeStats(); assertThat(storeStats.sizeInBytes(), greaterThan(numDoc * 100L)); // A doc should be more than 100 bytes. assertThat("Estimated total document size is too small compared with the stored size", docsStats.getTotalSizeInBytes(), greaterThanOrEqualTo(storeStats.sizeInBytes() * 80/100)); assertThat("Estimated total document size is too large compared with the stored size", docsStats.getTotalSizeInBytes(), lessThanOrEqualTo(storeStats.sizeInBytes() * 120/100)); } // Do some updates and deletes, then recheck the correlation again. for (int i = 0; i < numDoc / 2; i++) { if (randomBoolean()) { deleteDoc(indexShard, "doc", Integer.toString(i)); } else { indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}"); } } if (randomBoolean()) { indexShard.flush(new FlushRequest()); } else { indexShard.refresh("test"); } { final DocsStats docsStats = indexShard.docStats(); final StoreStats storeStats = indexShard.storeStats(); assertThat("Estimated total document size is too small compared with the stored size", docsStats.getTotalSizeInBytes(), greaterThanOrEqualTo(storeStats.sizeInBytes() * 80/100)); assertThat("Estimated total document size is too large compared with the stored size", docsStats.getTotalSizeInBytes(), lessThanOrEqualTo(storeStats.sizeInBytes() * 120/100)); } } finally { closeShards(indexShard); } } /** * here we are simulating the scenario that happens when we do async shard fetching from GatewaySerivce while we are finishing * a recovery and concurrently clean files. This should always be possible without any exception. Yet there was a bug where IndexShard * acquired the index writer lock before it called into the store that has it's own locking for metadata reads */ public void testReadSnapshotConcurrently() throws IOException, InterruptedException { IndexShard indexShard = newStartedShard(); indexDoc(indexShard, "_doc", "0", "{}"); if (randomBoolean()) { indexShard.refresh("test"); } indexDoc(indexShard, "_doc", "1", "{}"); indexShard.flush(new FlushRequest()); closeShards(indexShard); final IndexShard newShard = reinitShard(indexShard); Store.MetadataSnapshot storeFileMetaDatas = newShard.snapshotStoreMetadata(); assertTrue("at least 2 files, commit and data: " +storeFileMetaDatas.toString(), storeFileMetaDatas.size() > 1); AtomicBoolean stop = new AtomicBoolean(false); CountDownLatch latch = new CountDownLatch(1); expectThrows(AlreadyClosedException.class, () -> newShard.getEngine()); // no engine Thread thread = new Thread(() -> { latch.countDown(); while(stop.get() == false){ try { Store.MetadataSnapshot readMeta = newShard.snapshotStoreMetadata(); assertEquals(0, storeFileMetaDatas.recoveryDiff(readMeta).different.size()); assertEquals(0, storeFileMetaDatas.recoveryDiff(readMeta).missing.size()); assertEquals(storeFileMetaDatas.size(), storeFileMetaDatas.recoveryDiff(readMeta).identical.size()); } catch (IOException e) { throw new AssertionError(e); } } }); thread.start(); latch.await(); int iters = iterations(10, 100); for (int i = 0; i < iters; i++) { newShard.store().cleanupAndVerify("test", storeFileMetaDatas); } assertTrue(stop.compareAndSet(false, true)); thread.join(); closeShards(newShard); } /** * Simulates a scenario that happens when we are async fetching snapshot metadata from GatewayService * and checking index concurrently. This should always be possible without any exception. */ public void testReadSnapshotAndCheckIndexConcurrently() throws Exception { final boolean isPrimary = randomBoolean(); IndexShard indexShard = newStartedShard(isPrimary); final long numDocs = between(10, 100); for (long i = 0; i < numDocs; i++) { indexDoc(indexShard, "_doc", Long.toString(i), "{}"); if (randomBoolean()) { indexShard.refresh("test"); } } indexShard.flush(new FlushRequest()); closeShards(indexShard); final ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(indexShard.routingEntry(), isPrimary ? RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE ); final IndexMetaData indexMetaData = IndexMetaData.builder(indexShard.indexSettings().getIndexMetaData()) .settings(Settings.builder() .put(indexShard.indexSettings.getSettings()) .put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "true", "checksum", "fix"))) .build(); final IndexShard newShard = newShard(shardRouting, indexShard.shardPath(), indexMetaData, null, null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); Store.MetadataSnapshot storeFileMetaDatas = newShard.snapshotStoreMetadata(); assertTrue("at least 2 files, commit and data: " + storeFileMetaDatas.toString(), storeFileMetaDatas.size() > 1); AtomicBoolean stop = new AtomicBoolean(false); CountDownLatch latch = new CountDownLatch(1); Thread snapshotter = new Thread(() -> { latch.countDown(); while (stop.get() == false) { try { Store.MetadataSnapshot readMeta = newShard.snapshotStoreMetadata(); assertThat(readMeta.getNumDocs(), equalTo(numDocs)); assertThat(storeFileMetaDatas.recoveryDiff(readMeta).different.size(), equalTo(0)); assertThat(storeFileMetaDatas.recoveryDiff(readMeta).missing.size(), equalTo(0)); assertThat(storeFileMetaDatas.recoveryDiff(readMeta).identical.size(), equalTo(storeFileMetaDatas.size())); } catch (IOException e) { throw new AssertionError(e); } } }); snapshotter.start(); if (isPrimary) { newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), getFakeDiscoNode(newShard.routingEntry().currentNodeId()), null)); } else { newShard.markAsRecovering("peer", new RecoveryState(newShard.routingEntry(), getFakeDiscoNode(newShard.routingEntry().currentNodeId()), getFakeDiscoNode(newShard.routingEntry().currentNodeId()))); } int iters = iterations(10, 100); latch.await(); for (int i = 0; i < iters; i++) { newShard.checkIndex(); } assertTrue(stop.compareAndSet(false, true)); snapshotter.join(); closeShards(newShard); } class Result { private final int localCheckpoint; private final int maxSeqNo; private final boolean gap; Result(final int localCheckpoint, final int maxSeqNo, final boolean gap) { this.localCheckpoint = localCheckpoint; this.maxSeqNo = maxSeqNo; this.gap = gap; } } /** * Index on the specified shard while introducing sequence number gaps. * * @param indexShard the shard * @param operations the number of operations * @param offset the starting sequence number * @return a pair of the maximum sequence number and whether or not a gap was introduced * @throws IOException if an I/O exception occurs while indexing on the shard */ private Result indexOnReplicaWithGaps( final IndexShard indexShard, final int operations, final int offset) throws IOException { int localCheckpoint = offset; int max = offset; boolean gap = false; for (int i = offset + 1; i < operations; i++) { if (!rarely() || i == operations - 1) { // last operation can't be a gap as it's not a gap anymore final String id = Integer.toString(i); SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", id, new BytesArray("{}"), XContentType.JSON); indexShard.applyIndexOperationOnReplica(i, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (!gap && i == localCheckpoint + 1) { localCheckpoint++; } max = i; } else { gap = true; } } assert localCheckpoint == indexShard.getLocalCheckpoint(); assert !gap || (localCheckpoint != max); return new Result(localCheckpoint, max, gap); } /** A dummy repository for testing which just needs restore overridden */ private abstract static class RestoreOnlyRepository extends AbstractLifecycleComponent implements Repository { private final String indexName; RestoreOnlyRepository(String indexName) { super(Settings.EMPTY); this.indexName = indexName; } @Override protected void doStart() { } @Override protected void doStop() { } @Override protected void doClose() { } @Override public RepositoryMetaData getMetadata() { return null; } @Override public SnapshotInfo getSnapshotInfo(SnapshotId snapshotId) { return null; } @Override public MetaData getSnapshotGlobalMetaData(SnapshotId snapshotId) { return null; } @Override public IndexMetaData getSnapshotIndexMetaData(SnapshotId snapshotId, IndexId index) throws IOException { return null; } @Override public RepositoryData getRepositoryData() { Map<IndexId, Set<SnapshotId>> map = new HashMap<>(); map.put(new IndexId(indexName, "blah"), emptySet()); return new RepositoryData(EMPTY_REPO_GEN, Collections.emptyMap(), Collections.emptyMap(), map, Collections.emptyList()); } @Override public void initializeSnapshot(SnapshotId snapshotId, List<IndexId> indices, MetaData metaData) { } @Override public SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List<IndexId> indices, long startTime, String failure, int totalShards, List<SnapshotShardFailure> shardFailures, long repositoryStateId, boolean includeGlobalState) { return null; } @Override public void deleteSnapshot(SnapshotId snapshotId, long repositoryStateId) { } @Override public long getSnapshotThrottleTimeInNanos() { return 0; } @Override public long getRestoreThrottleTimeInNanos() { return 0; } @Override public String startVerification() { return null; } @Override public void endVerification(String verificationToken) { } @Override public boolean isReadOnly() { return false; } @Override public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) { } @Override public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) { return null; } @Override public void verify(String verificationToken, DiscoveryNode localNode) { } } public void testIsSearchIdle() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); assertFalse(primary.isSearchIdle()); IndexScopedSettings scopedSettings = primary.indexSettings().getScopedSettings(); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.ZERO).build(); scopedSettings.applySettings(settings); assertTrue(primary.isSearchIdle()); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.timeValueMinutes(1)) .build(); scopedSettings.applySettings(settings); assertFalse(primary.isSearchIdle()); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.timeValueMillis(10)) .build(); scopedSettings.applySettings(settings); assertBusy(() -> assertTrue(primary.isSearchIdle())); do { // now loop until we are fast enough... shouldn't take long primary.awaitShardSearchActive(aBoolean -> {}); } while (primary.isSearchIdle()); closeShards(primary); } public void testScheduledRefresh() throws IOException, InterruptedException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); IndexScopedSettings scopedSettings = primary.indexSettings().getScopedSettings(); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.ZERO).build(); scopedSettings.applySettings(settings); assertFalse(primary.getEngine().refreshNeeded()); indexDoc(primary, "_doc", "1", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); long lastSearchAccess = primary.getLastSearcherAccess(); assertFalse(primary.scheduledRefresh()); assertEquals(lastSearchAccess, primary.getLastSearcherAccess()); // wait until the thread-pool has moved the timestamp otherwise we can't assert on this below awaitBusy(() -> primary.getThreadPool().relativeTimeInMillis() > lastSearchAccess); CountDownLatch latch = new CountDownLatch(10); for (int i = 0; i < 10; i++) { primary.awaitShardSearchActive(refreshed -> { assertTrue(refreshed); try (Engine.Searcher searcher = primary.acquireSearcher("test")) { assertEquals(2, searcher.reader().numDocs()); } finally { latch.countDown(); } }); } assertNotEquals("awaitShardSearchActive must access a searcher to remove search idle state", lastSearchAccess, primary.getLastSearcherAccess()); assertTrue(lastSearchAccess < primary.getLastSearcherAccess()); try (Engine.Searcher searcher = primary.acquireSearcher("test")) { assertEquals(1, searcher.reader().numDocs()); } assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); latch.await(); CountDownLatch latch1 = new CountDownLatch(1); primary.awaitShardSearchActive(refreshed -> { assertFalse(refreshed); try (Engine.Searcher searcher = primary.acquireSearcher("test")) { assertEquals(2, searcher.reader().numDocs()); } finally { latch1.countDown(); } }); latch1.await(); indexDoc(primary, "_doc", "2", "{\"foo\" : \"bar\"}"); assertFalse(primary.scheduledRefresh()); assertTrue(primary.isSearchIdle()); primary.checkIdle(0); assertTrue(primary.scheduledRefresh()); // make sure we refresh once the shard is inactive try (Engine.Searcher searcher = primary.acquireSearcher("test")) { assertEquals(3, searcher.reader().numDocs()); } closeShards(primary); } public void testRefreshIsNeededWithRefreshListeners() throws IOException, InterruptedException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); Engine.IndexResult doc = indexDoc(primary, "_doc", "1", "{\"foo\" : \"bar\"}"); CountDownLatch latch = new CountDownLatch(1); primary.addRefreshListener(doc.getTranslogLocation(), r -> latch.countDown()); assertEquals(1, latch.getCount()); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); latch.await(); IndexScopedSettings scopedSettings = primary.indexSettings().getScopedSettings(); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.ZERO).build(); scopedSettings.applySettings(settings); doc = indexDoc(primary, "_doc", "2", "{\"foo\" : \"bar\"}"); CountDownLatch latch1 = new CountDownLatch(1); primary.addRefreshListener(doc.getTranslogLocation(), r -> latch1.countDown()); assertEquals(1, latch1.getCount()); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); latch1.await(); closeShards(primary); } public void testSegmentMemoryTrackedInBreaker() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"foo\"}"); primary.refresh("forced refresh"); SegmentsStats ss = primary.segmentStats(randomBoolean()); CircuitBreaker breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(ss.getMemoryInBytes(), equalTo(breaker.getUsed())); final long preRefreshBytes = ss.getMemoryInBytes(); indexDoc(primary, "_doc", "1", "{\"foo\" : \"bar\"}"); indexDoc(primary, "_doc", "2", "{\"foo\" : \"baz\"}"); indexDoc(primary, "_doc", "3", "{\"foo\" : \"eggplant\"}"); ss = primary.segmentStats(randomBoolean()); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(preRefreshBytes, equalTo(breaker.getUsed())); primary.refresh("refresh"); ss = primary.segmentStats(randomBoolean()); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), equalTo(ss.getMemoryInBytes())); assertThat(breaker.getUsed(), greaterThan(preRefreshBytes)); indexDoc(primary, "_doc", "4", "{\"foo\": \"potato\"}"); indexDoc(primary, "_doc", "5", "{\"foo\": \"potato\"}"); // Forces a refresh with the INTERNAL scope ((InternalEngine) primary.getEngine()).writeIndexingBuffer(); ss = primary.segmentStats(randomBoolean()); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), equalTo(ss.getMemoryInBytes())); assertThat(breaker.getUsed(), greaterThan(preRefreshBytes)); final long postRefreshBytes = ss.getMemoryInBytes(); // Deleting a doc causes its memory to be freed from the breaker deleteDoc(primary, "_doc", "0"); // Here we are testing that a fully deleted segment should be dropped and its memory usage is freed. // In order to instruct the merge policy not to keep a fully deleted segment, // we need to flush and make that commit safe so that the SoftDeletesPolicy can drop everything. if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(settings)) { primary.sync(); flushShard(primary); } primary.refresh("force refresh"); ss = primary.segmentStats(randomBoolean()); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), lessThan(postRefreshBytes)); closeShards(primary); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), equalTo(0L)); } public void testSegmentMemoryTrackedWithRandomSearchers() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); int threadCount = randomIntBetween(2, 6); List<Thread> threads = new ArrayList<>(threadCount); int iterations = randomIntBetween(50, 100); List<Engine.Searcher> searchers = Collections.synchronizedList(new ArrayList<>()); logger.info("--> running with {} threads and {} iterations each", threadCount, iterations); for (int threadId = 0; threadId < threadCount; threadId++) { final String threadName = "thread-" + threadId; Runnable r = () -> { for (int i = 0; i < iterations; i++) { try { if (randomBoolean()) { String id = "id-" + threadName + "-" + i; logger.debug("--> {} indexing {}", threadName, id); indexDoc(primary, "_doc", id, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); } if (randomBoolean() && i > 10) { String id = "id-" + threadName + "-" + randomIntBetween(0, i - 1); logger.debug("--> {}, deleting {}", threadName, id); deleteDoc(primary, "_doc", id); } if (randomBoolean()) { logger.debug("--> {} refreshing", threadName); primary.refresh("forced refresh"); } if (randomBoolean()) { String searcherName = "searcher-" + threadName + "-" + i; logger.debug("--> {} acquiring new searcher {}", threadName, searcherName); // Acquire a new searcher, adding it to the list searchers.add(primary.acquireSearcher(searcherName)); } if (randomBoolean() && searchers.size() > 1) { // Close one of the searchers at random synchronized (searchers) { // re-check because it could have decremented after the check if (searchers.size() > 1) { Engine.Searcher searcher = searchers.remove(0); logger.debug("--> {} closing searcher {}", threadName, searcher.source()); IOUtils.close(searcher); } } } } catch (Exception e) { logger.warn("--> got exception: ", e); fail("got an exception we didn't expect"); } } }; threads.add(new Thread(r, threadName)); } threads.stream().forEach(t -> t.start()); for (Thread t : threads) { t.join(); } // We need to wait for all ongoing merges to complete. The reason is that during a merge the // IndexWriter holds the core cache key open and causes the memory to be registered in the breaker primary.forceMerge(new ForceMergeRequest().maxNumSegments(1).flush(true)); // Close remaining searchers IOUtils.close(searchers); primary.refresh("test"); SegmentsStats ss = primary.segmentStats(randomBoolean()); CircuitBreaker breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); long segmentMem = ss.getMemoryInBytes(); long breakerMem = breaker.getUsed(); logger.info("--> comparing segmentMem: {} - breaker: {} => {}", segmentMem, breakerMem, segmentMem == breakerMem); assertThat(segmentMem, equalTo(breakerMem)); // Close shard closeShards(primary); // Check that the breaker was successfully reset to 0, meaning that all the accounting was correctly applied breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), equalTo(0L)); } public void testFlushOnInactive() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(metaData.getIndex(), 0), "n1", true, ShardRoutingState .INITIALIZING, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE); final ShardId shardId = shardRouting.shardId(); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); AtomicBoolean markedInactive = new AtomicBoolean(); AtomicReference<IndexShard> primaryRef = new AtomicReference<>(); IndexShard primary = newShard(shardRouting, shardPath, metaData, null, null, new InternalEngineFactory(), () -> { }, new IndexEventListener() { @Override public void onShardInactive(IndexShard indexShard) { markedInactive.set(true); primaryRef.get().flush(new FlushRequest()); } }); primaryRef.set(primary); recoverShardFromStore(primary); for (int i = 0; i < 3; i++) { indexDoc(primary, "_doc", "" + i, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); primary.refresh("test"); // produce segments } List<Segment> segments = primary.segments(false); Set<String> names = new HashSet<>(); for (Segment segment : segments) { assertFalse(segment.committed); assertTrue(segment.search); names.add(segment.getName()); } assertEquals(3, segments.size()); primary.flush(new FlushRequest()); primary.forceMerge(new ForceMergeRequest().maxNumSegments(1).flush(false)); primary.refresh("test"); segments = primary.segments(false); for (Segment segment : segments) { if (names.contains(segment.getName())) { assertTrue(segment.committed); assertFalse(segment.search); } else { assertFalse(segment.committed); assertTrue(segment.search); } } assertEquals(4, segments.size()); assertFalse(markedInactive.get()); assertBusy(() -> { primary.checkIdle(0); assertFalse(primary.isActive()); }); assertTrue(markedInactive.get()); segments = primary.segments(false); assertEquals(1, segments.size()); for (Segment segment : segments) { assertTrue(segment.committed); assertTrue(segment.search); } closeShards(primary); } public void testOnCloseStats() throws IOException { final IndexShard indexShard = newStartedShard(true); for (int i = 0; i < 3; i++) { indexDoc(indexShard, "_doc", "" + i, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); indexShard.refresh("test"); // produce segments } // check stats on closed and on opened shard if (randomBoolean()) { closeShards(indexShard); expectThrows(AlreadyClosedException.class, () -> indexShard.seqNoStats()); expectThrows(AlreadyClosedException.class, () -> indexShard.commitStats()); expectThrows(AlreadyClosedException.class, () -> indexShard.storeStats()); } else { final SeqNoStats seqNoStats = indexShard.seqNoStats(); assertThat(seqNoStats.getLocalCheckpoint(), equalTo(2L)); final CommitStats commitStats = indexShard.commitStats(); assertThat(commitStats.getGeneration(), equalTo(2L)); final StoreStats storeStats = indexShard.storeStats(); assertThat(storeStats.sizeInBytes(), greaterThan(0L)); closeShards(indexShard); } } public void testSupplyTombstoneDoc() throws Exception { IndexShard shard = newStartedShard(); String id = randomRealisticUnicodeOfLengthBetween(1, 10); ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc("doc", id); assertThat(deleteTombstone.docs(), hasSize(1)); ParseContext.Document deleteDoc = deleteTombstone.docs().get(0); assertThat(deleteDoc.getFields().stream().map(IndexableField::name).collect(Collectors.toList()), containsInAnyOrder(IdFieldMapper.NAME, VersionFieldMapper.NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.PRIMARY_TERM_NAME, SeqNoFieldMapper.TOMBSTONE_NAME)); assertThat(deleteDoc.getField(IdFieldMapper.NAME).binaryValue(), equalTo(Uid.encodeId(id))); assertThat(deleteDoc.getField(SeqNoFieldMapper.TOMBSTONE_NAME).numericValue().longValue(), equalTo(1L)); final String reason = randomUnicodeOfLength(200); ParsedDocument noopTombstone = shard.getEngine().config().getTombstoneDocSupplier().newNoopTombstoneDoc(reason); assertThat(noopTombstone.docs(), hasSize(1)); ParseContext.Document noopDoc = noopTombstone.docs().get(0); assertThat(noopDoc.getFields().stream().map(IndexableField::name).collect(Collectors.toList()), containsInAnyOrder(VersionFieldMapper.NAME, SourceFieldMapper.NAME, SeqNoFieldMapper.TOMBSTONE_NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.PRIMARY_TERM_NAME)); assertThat(noopDoc.getField(SeqNoFieldMapper.TOMBSTONE_NAME).numericValue().longValue(), equalTo(1L)); assertThat(noopDoc.getField(SourceFieldMapper.NAME).binaryValue(), equalTo(new BytesRef(reason))); closeShards(shard); } public void testSearcherIncludesSoftDeletes() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("test", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard shard = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(shard); indexDoc(shard, "test", "0", "{\"foo\" : \"bar\"}"); indexDoc(shard, "test", "1", "{\"foo\" : \"baz\"}"); deleteDoc(shard, "test", "0"); shard.refresh("test"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { IndexSearcher searchWithSoftDeletes = new IndexSearcher(Lucene.wrapAllDocsLive(searcher.getDirectoryReader())); assertThat(searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10).totalHits, equalTo(0L)); assertThat(searchWithSoftDeletes.search(new TermQuery(new Term("foo", "bar")), 10).totalHits, equalTo(1L)); assertThat(searcher.searcher().search(new TermQuery(new Term("foo", "baz")), 10).totalHits, equalTo(1L)); assertThat(searchWithSoftDeletes.search(new TermQuery(new Term("foo", "baz")), 10).totalHits, equalTo(1L)); } closeShards(shard); } }
server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.shard; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.CorruptIndexException; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.FilterDirectory; import org.apache.lucene.store.IOContext; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.Constants; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.stats.CommonStats; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.AllocationId; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineTestCase; import org.elasticsearch.index.engine.InternalEngine; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.engine.SegmentsStats; import org.elasticsearch.index.fielddata.FieldDataStats; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; import org.elasticsearch.index.store.StoreStats; import org.elasticsearch.index.translog.TestTranslog; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.index.translog.TranslogTests; import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.recovery.RecoveryTarget; import org.elasticsearch.repositories.IndexId; import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.RepositoryData; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotShardFailure; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.FieldMaskingReader; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.ElasticsearchException; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; import java.util.concurrent.ExecutionException; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.LongFunction; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.IntStream; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.elasticsearch.common.lucene.Lucene.cleanLuceneIndex; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.repositories.RepositoryData.EMPTY_REPO_GEN; import static org.elasticsearch.test.hamcrest.RegexMatcher.matches; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; /** * Simple unit-test IndexShard related operations. */ public class IndexShardTests extends IndexShardTestCase { public static ShardStateMetaData load(Logger logger, Path... shardPaths) throws IOException { return ShardStateMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, shardPaths); } public static void write(ShardStateMetaData shardStateMetaData, Path... shardPaths) throws IOException { ShardStateMetaData.FORMAT.write(shardStateMetaData, shardPaths); } public static Engine getEngineFromShard(IndexShard shard) { return shard.getEngineOrNull(); } public void testWriteShardState() throws Exception { try (NodeEnvironment env = newNodeEnvironment()) { ShardId id = new ShardId("foo", "fooUUID", 1); boolean primary = randomBoolean(); AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); ShardStateMetaData state1 = new ShardStateMetaData(primary, "fooUUID", allocationId); write(state1, env.availableShardPaths(id)); ShardStateMetaData shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state1); ShardStateMetaData state2 = new ShardStateMetaData(primary, "fooUUID", allocationId); write(state2, env.availableShardPaths(id)); shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state1); ShardStateMetaData state3 = new ShardStateMetaData(primary, "fooUUID", allocationId); write(state3, env.availableShardPaths(id)); shardStateMetaData = load(logger, env.availableShardPaths(id)); assertEquals(shardStateMetaData, state3); assertEquals("fooUUID", state3.indexUUID); } } public void testPersistenceStateMetadataPersistence() throws Exception { IndexShard shard = newStartedShard(); final Path shardStatePath = shard.shardPath().getShardStatePath(); ShardStateMetaData shardStateMetaData = load(logger, shardStatePath); assertEquals(getShardStateMetadata(shard), shardStateMetaData); ShardRouting routing = shard.shardRouting; IndexShardTestCase.updateRoutingEntry(shard, routing); shardStateMetaData = load(logger, shardStatePath); assertEquals(shardStateMetaData, getShardStateMetadata(shard)); assertEquals(shardStateMetaData, new ShardStateMetaData(routing.primary(), shard.indexSettings().getUUID(), routing.allocationId())); routing = TestShardRouting.relocate(shard.shardRouting, "some node", 42L); IndexShardTestCase.updateRoutingEntry(shard, routing); shardStateMetaData = load(logger, shardStatePath); assertEquals(shardStateMetaData, getShardStateMetadata(shard)); assertEquals(shardStateMetaData, new ShardStateMetaData(routing.primary(), shard.indexSettings().getUUID(), routing.allocationId())); closeShards(shard); } public void testFailShard() throws Exception { allowShardFailures(); IndexShard shard = newStartedShard(); final ShardPath shardPath = shard.shardPath(); assertNotNull(shardPath); // fail shard shard.failShard("test shard fail", new CorruptIndexException("", "")); shard.close("do not assert history", false); shard.store().close(); // check state file still exists ShardStateMetaData shardStateMetaData = load(logger, shardPath.getShardStatePath()); assertEquals(shardStateMetaData, getShardStateMetadata(shard)); // but index can't be opened for a failed shard assertThat("store index should be corrupted", Store.canOpenIndex(logger, shardPath.resolveIndex(), shard.shardId(), (shardId, lockTimeoutMS) -> new DummyShardLock(shardId)), equalTo(false)); } ShardStateMetaData getShardStateMetadata(IndexShard shard) { ShardRouting shardRouting = shard.routingEntry(); if (shardRouting == null) { return null; } else { return new ShardStateMetaData(shardRouting.primary(), shard.indexSettings().getUUID(), shardRouting.allocationId()); } } private AllocationId randomAllocationId() { AllocationId allocationId = AllocationId.newInitializing(); if (randomBoolean()) { allocationId = AllocationId.newRelocation(allocationId); } return allocationId; } public void testShardStateMetaHashCodeEquals() { AllocationId allocationId = randomBoolean() ? null : randomAllocationId(); ShardStateMetaData meta = new ShardStateMetaData(randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), allocationId); assertEquals(meta, new ShardStateMetaData(meta.primary, meta.indexUUID, meta.allocationId)); assertEquals(meta.hashCode(), new ShardStateMetaData(meta.primary, meta.indexUUID, meta.allocationId).hashCode()); assertFalse(meta.equals(new ShardStateMetaData(!meta.primary, meta.indexUUID, meta.allocationId))); assertFalse(meta.equals(new ShardStateMetaData(!meta.primary, meta.indexUUID + "foo", meta.allocationId))); assertFalse(meta.equals(new ShardStateMetaData(!meta.primary, meta.indexUUID + "foo", randomAllocationId()))); Set<Integer> hashCodes = new HashSet<>(); for (int i = 0; i < 30; i++) { // just a sanity check that we impl hashcode allocationId = randomBoolean() ? null : randomAllocationId(); meta = new ShardStateMetaData(randomBoolean(), randomRealisticUnicodeOfCodepointLengthBetween(1, 10), allocationId); hashCodes.add(meta.hashCode()); } assertTrue("more than one unique hashcode expected but got: " + hashCodes.size(), hashCodes.size() > 1); } public void testClosesPreventsNewOperations() throws InterruptedException, ExecutionException, IOException { IndexShard indexShard = newStartedShard(); closeShards(indexShard); assertThat(indexShard.getActiveOperationsCount(), equalTo(0)); try { indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.WRITE, ""); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected } try { indexShard.acquireReplicaOperationPermit(indexShard.getPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, null, ThreadPool.Names.WRITE, ""); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected } } public void testRejectOperationPermitWithHigherTermWhenNotStarted() throws IOException { IndexShard indexShard = newShard(false); expectThrows(IndexShardNotStartedException.class, () -> indexShard.acquireReplicaOperationPermit(indexShard.getPrimaryTerm() + randomIntBetween(1, 100), SequenceNumbers.UNASSIGNED_SEQ_NO, null, ThreadPool.Names.WRITE, "")); closeShards(indexShard); } public void testPrimaryPromotionDelaysOperations() throws IOException, BrokenBarrierException, InterruptedException { final IndexShard indexShard = newShard(false); recoveryEmptyReplica(indexShard, randomBoolean()); final int operations = scaledRandomIntBetween(1, 64); final CyclicBarrier barrier = new CyclicBarrier(1 + operations); final CountDownLatch latch = new CountDownLatch(operations); final CountDownLatch operationLatch = new CountDownLatch(1); final List<Thread> threads = new ArrayList<>(); for (int i = 0; i < operations; i++) { final String id = "t_" + i; final Thread thread = new Thread(() -> { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } indexShard.acquireReplicaOperationPermit( indexShard.getPrimaryTerm(), indexShard.getGlobalCheckpoint(), new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { latch.countDown(); try { operationLatch.await(); } catch (final InterruptedException e) { throw new RuntimeException(e); } releasable.close(); } @Override public void onFailure(Exception e) { throw new RuntimeException(e); } }, ThreadPool.Names.WRITE, id); }); thread.start(); threads.add(thread); } barrier.await(); latch.await(); final ShardRouting replicaRouting = indexShard.routingEntry(); promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); final int delayedOperations = scaledRandomIntBetween(1, 64); final CyclicBarrier delayedOperationsBarrier = new CyclicBarrier(1 + delayedOperations); final CountDownLatch delayedOperationsLatch = new CountDownLatch(delayedOperations); final AtomicLong counter = new AtomicLong(); final List<Thread> delayedThreads = new ArrayList<>(); for (int i = 0; i < delayedOperations; i++) { final String id = "d_" + i; final Thread thread = new Thread(() -> { try { delayedOperationsBarrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } indexShard.acquirePrimaryOperationPermit( new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { counter.incrementAndGet(); releasable.close(); delayedOperationsLatch.countDown(); } @Override public void onFailure(Exception e) { throw new RuntimeException(e); } }, ThreadPool.Names.WRITE, id); }); thread.start(); delayedThreads.add(thread); } delayedOperationsBarrier.await(); assertThat(counter.get(), equalTo(0L)); operationLatch.countDown(); for (final Thread thread : threads) { thread.join(); } delayedOperationsLatch.await(); assertThat(counter.get(), equalTo((long) delayedOperations)); for (final Thread thread : delayedThreads) { thread.join(); } closeShards(indexShard); } /** * This test makes sure that people can use the shard routing entry to check whether a shard was already promoted to * a primary. Concretely this means, that when we publish the routing entry via {@link IndexShard#routingEntry()} the following * should have happened * 1) Internal state (ala ReplicationTracker) have been updated * 2) Primary term is set to the new term */ public void testPublishingOrderOnPromotion() throws IOException, InterruptedException, BrokenBarrierException { final IndexShard indexShard = newShard(false); recoveryEmptyReplica(indexShard, randomBoolean()); final long promotedTerm = indexShard.getPrimaryTerm() + 1; final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicBoolean stop = new AtomicBoolean(); final Thread thread = new Thread(() -> { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } while(stop.get() == false) { if (indexShard.routingEntry().primary()) { assertThat(indexShard.getPrimaryTerm(), equalTo(promotedTerm)); assertThat(indexShard.getReplicationGroup(), notNullValue()); } } }); thread.start(); barrier.await(); final ShardRouting replicaRouting = indexShard.routingEntry(); promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); stop.set(true); thread.join(); closeShards(indexShard); } public void testPrimaryFillsSeqNoGapsOnPromotion() throws Exception { final IndexShard indexShard = newShard(false); recoveryEmptyReplica(indexShard, randomBoolean()); // most of the time this is large enough that most of the time there will be at least one gap final int operations = 1024 - scaledRandomIntBetween(0, 1024); final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final int maxSeqNo = result.maxSeqNo; final boolean gap = result.gap; // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); /* * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the * gaps are filled. */ final CountDownLatch latch = new CountDownLatch(1); indexShard.acquirePrimaryOperationPermit( new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { releasable.close(); latch.countDown(); } @Override public void onFailure(Exception e) { throw new AssertionError(e); } }, ThreadPool.Names.GENERIC, ""); latch.await(); assertThat(indexShard.getLocalCheckpoint(), equalTo((long) maxSeqNo)); closeShards(indexShard); } public void testPrimaryPromotionRollsGeneration() throws Exception { final IndexShard indexShard = newStartedShard(false); final long currentTranslogGeneration = getTranslog(indexShard).getGeneration().translogFileGeneration; // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); final long newPrimaryTerm = indexShard.getPrimaryTerm() + between(1, 10000); final ShardRouting primaryRouting = newShardRouting( replicaRouting.shardId(), replicaRouting.currentNodeId(), null, true, ShardRoutingState.STARTED, replicaRouting.allocationId()); indexShard.updateShardState(primaryRouting, newPrimaryTerm, (shard, listener) -> {}, 0L, Collections.singleton(primaryRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); /* * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the * translog generation has rolled. */ final CountDownLatch latch = new CountDownLatch(1); indexShard.acquirePrimaryOperationPermit( new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { releasable.close(); latch.countDown(); } @Override public void onFailure(Exception e) { throw new RuntimeException(e); } }, ThreadPool.Names.GENERIC, ""); latch.await(); assertThat(getTranslog(indexShard).getGeneration().translogFileGeneration, equalTo(currentTranslogGeneration + 1)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); closeShards(indexShard); } public void testOperationPermitsOnPrimaryShards() throws InterruptedException, ExecutionException, IOException { final ShardId shardId = new ShardId("test", "_na_", 0); final IndexShard indexShard; if (randomBoolean()) { // relocation target indexShard = newShard(newShardRouting(shardId, "local_node", "other node", true, ShardRoutingState.INITIALIZING, AllocationId.newRelocation(AllocationId.newInitializing()))); } else if (randomBoolean()) { // simulate promotion indexShard = newStartedShard(false); ShardRouting replicaRouting = indexShard.routingEntry(); ShardRouting primaryRouting = newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), null, true, ShardRoutingState.STARTED, replicaRouting.allocationId()); final long newPrimaryTerm = indexShard.getPrimaryTerm() + between(1, 1000); indexShard.updateShardState(primaryRouting, newPrimaryTerm, (shard, listener) -> { assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); }, 0L, Collections.singleton(indexShard.routingEntry().allocationId().getId()), new IndexShardRoutingTable.Builder(indexShard.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); } else { indexShard = newStartedShard(true); } final long primaryTerm = indexShard.getPrimaryTerm(); assertEquals(0, indexShard.getActiveOperationsCount()); if (indexShard.routingEntry().isRelocationTarget() == false) { try { indexShard.acquireReplicaOperationPermit(primaryTerm, indexShard.getGlobalCheckpoint(), null, ThreadPool.Names.WRITE, ""); fail("shard shouldn't accept operations as replica"); } catch (IllegalStateException ignored) { } } Releasable operation1 = acquirePrimaryOperationPermitBlockingly(indexShard); assertEquals(1, indexShard.getActiveOperationsCount()); Releasable operation2 = acquirePrimaryOperationPermitBlockingly(indexShard); assertEquals(2, indexShard.getActiveOperationsCount()); Releasables.close(operation1, operation2); assertEquals(0, indexShard.getActiveOperationsCount()); closeShards(indexShard); } private Releasable acquirePrimaryOperationPermitBlockingly(IndexShard indexShard) throws ExecutionException, InterruptedException { PlainActionFuture<Releasable> fut = new PlainActionFuture<>(); indexShard.acquirePrimaryOperationPermit(fut, ThreadPool.Names.WRITE, ""); return fut.get(); } private Releasable acquireReplicaOperationPermitBlockingly(IndexShard indexShard, long opPrimaryTerm) throws ExecutionException, InterruptedException { PlainActionFuture<Releasable> fut = new PlainActionFuture<>(); indexShard.acquireReplicaOperationPermit(opPrimaryTerm, indexShard.getGlobalCheckpoint(), fut, ThreadPool.Names.WRITE, ""); return fut.get(); } public void testOperationPermitOnReplicaShards() throws Exception { final ShardId shardId = new ShardId("test", "_na_", 0); final IndexShard indexShard; final boolean engineClosed; switch (randomInt(2)) { case 0: // started replica indexShard = newStartedShard(false); engineClosed = false; break; case 1: { // initializing replica / primary final boolean relocating = randomBoolean(); ShardRouting routing = newShardRouting(shardId, "local_node", relocating ? "sourceNode" : null, relocating ? randomBoolean() : false, ShardRoutingState.INITIALIZING, relocating ? AllocationId.newRelocation(AllocationId.newInitializing()) : AllocationId.newInitializing()); indexShard = newShard(routing); engineClosed = true; break; } case 2: { // relocation source indexShard = newStartedShard(true); ShardRouting routing = indexShard.routingEntry(); routing = newShardRouting(routing.shardId(), routing.currentNodeId(), "otherNode", true, ShardRoutingState.RELOCATING, AllocationId.newRelocation(routing.allocationId())); IndexShardTestCase.updateRoutingEntry(indexShard, routing); indexShard.relocated(primaryContext -> {}); engineClosed = false; break; } default: throw new UnsupportedOperationException("get your numbers straight"); } final ShardRouting shardRouting = indexShard.routingEntry(); logger.info("shard routing to {}", shardRouting); assertEquals(0, indexShard.getActiveOperationsCount()); if (shardRouting.primary() == false) { final IllegalStateException e = expectThrows(IllegalStateException.class, () -> indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.WRITE, "")); assertThat(e, hasToString(containsString("shard " + shardRouting + " is not a primary"))); } final long primaryTerm = indexShard.getPrimaryTerm(); final long translogGen = engineClosed ? -1 : getTranslog(indexShard).getGeneration().translogFileGeneration; final Releasable operation1; final Releasable operation2; if (engineClosed == false) { operation1 = acquireReplicaOperationPermitBlockingly(indexShard, primaryTerm); assertEquals(1, indexShard.getActiveOperationsCount()); operation2 = acquireReplicaOperationPermitBlockingly(indexShard, primaryTerm); assertEquals(2, indexShard.getActiveOperationsCount()); } else { operation1 = null; operation2 = null; } { final AtomicBoolean onResponse = new AtomicBoolean(); final AtomicBoolean onFailure = new AtomicBoolean(); final AtomicReference<Exception> onFailureException = new AtomicReference<>(); ActionListener<Releasable> onLockAcquired = new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { onResponse.set(true); } @Override public void onFailure(Exception e) { onFailure.set(true); onFailureException.set(e); } }; indexShard.acquireReplicaOperationPermit(primaryTerm - 1, SequenceNumbers.UNASSIGNED_SEQ_NO, onLockAcquired, ThreadPool.Names.WRITE, ""); assertFalse(onResponse.get()); assertTrue(onFailure.get()); assertThat(onFailureException.get(), instanceOf(IllegalStateException.class)); assertThat( onFailureException.get(), hasToString(containsString("operation primary term [" + (primaryTerm - 1) + "] is too old"))); } { final AtomicBoolean onResponse = new AtomicBoolean(); final AtomicReference<Exception> onFailure = new AtomicReference<>(); final CyclicBarrier barrier = new CyclicBarrier(2); final long newPrimaryTerm = primaryTerm + 1 + randomInt(20); if (engineClosed == false) { assertThat(indexShard.getLocalCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); assertThat(indexShard.getGlobalCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); } final long newGlobalCheckPoint; if (engineClosed || randomBoolean()) { newGlobalCheckPoint = SequenceNumbers.NO_OPS_PERFORMED; } else { long localCheckPoint = indexShard.getGlobalCheckpoint() + randomInt(100); // advance local checkpoint for (int i = 0; i <= localCheckPoint; i++) { indexShard.markSeqNoAsNoop(i, "dummy doc"); } newGlobalCheckPoint = randomIntBetween((int) indexShard.getGlobalCheckpoint(), (int) localCheckPoint); } final long expectedLocalCheckpoint; if (newGlobalCheckPoint == SequenceNumbers.UNASSIGNED_SEQ_NO) { expectedLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED; } else { expectedLocalCheckpoint = newGlobalCheckPoint; } // but you can not increment with a new primary term until the operations on the older primary term complete final Thread thread = new Thread(() -> { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } ActionListener<Releasable> listener = new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { assertThat(indexShard.getPrimaryTerm(), equalTo(newPrimaryTerm)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); assertThat(indexShard.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint)); assertThat(indexShard.getGlobalCheckpoint(), equalTo(newGlobalCheckPoint)); onResponse.set(true); releasable.close(); finish(); } @Override public void onFailure(Exception e) { onFailure.set(e); finish(); } private void finish() { try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } } }; try { indexShard.acquireReplicaOperationPermit( newPrimaryTerm, newGlobalCheckPoint, listener, ThreadPool.Names.SAME, ""); } catch (Exception e) { listener.onFailure(e); } }); thread.start(); barrier.await(); if (indexShard.state() == IndexShardState.CREATED || indexShard.state() == IndexShardState.RECOVERING) { barrier.await(); assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm)); assertFalse(onResponse.get()); assertThat(onFailure.get(), instanceOf(IndexShardNotStartedException.class)); Releasables.close(operation1); Releasables.close(operation2); } else { // our operation should be blocked until the previous operations complete assertFalse(onResponse.get()); assertNull(onFailure.get()); assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(primaryTerm)); Releasables.close(operation1); // our operation should still be blocked assertFalse(onResponse.get()); assertNull(onFailure.get()); assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(primaryTerm)); Releasables.close(operation2); barrier.await(); // now lock acquisition should have succeeded assertThat(indexShard.getPrimaryTerm(), equalTo(newPrimaryTerm)); assertThat(TestTranslog.getCurrentTerm(getTranslog(indexShard)), equalTo(newPrimaryTerm)); if (engineClosed) { assertFalse(onResponse.get()); assertThat(onFailure.get(), instanceOf(AlreadyClosedException.class)); } else { assertTrue(onResponse.get()); assertNull(onFailure.get()); assertThat(getTranslog(indexShard).getGeneration().translogFileGeneration, equalTo(translogGen + 1)); assertThat(indexShard.getLocalCheckpoint(), equalTo(expectedLocalCheckpoint)); assertThat(indexShard.getGlobalCheckpoint(), equalTo(newGlobalCheckPoint)); } } thread.join(); assertEquals(0, indexShard.getActiveOperationsCount()); } closeShards(indexShard); } public void testGlobalCheckpointSync() throws IOException { // create the primary shard with a callback that sets a boolean when the global checkpoint sync is invoked final ShardId shardId = new ShardId("index", "_na_", 0); final ShardRouting shardRouting = TestShardRouting.newShardRouting( shardId, randomAlphaOfLength(8), true, ShardRoutingState.INITIALIZING, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE); final Settings settings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 2) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); final IndexMetaData.Builder indexMetadata = IndexMetaData.builder(shardRouting.getIndexName()).settings(settings).primaryTerm(0, 1); final AtomicBoolean synced = new AtomicBoolean(); final IndexShard primaryShard = newShard(shardRouting, indexMetadata.build(), null, new InternalEngineFactory(), () -> synced.set(true)); // add a replica recoverShardFromStore(primaryShard); final IndexShard replicaShard = newShard(shardId, false); recoverReplica(replicaShard, primaryShard, true); final int maxSeqNo = randomIntBetween(0, 128); for (int i = 0; i <= maxSeqNo; i++) { EngineTestCase.generateNewSeqNo(primaryShard.getEngine()); } final long checkpoint = rarely() ? maxSeqNo - scaledRandomIntBetween(0, maxSeqNo) : maxSeqNo; // set up local checkpoints on the shard copies primaryShard.updateLocalCheckpointForShard(shardRouting.allocationId().getId(), checkpoint); final int replicaLocalCheckpoint = randomIntBetween(0, Math.toIntExact(checkpoint)); final String replicaAllocationId = replicaShard.routingEntry().allocationId().getId(); primaryShard.updateLocalCheckpointForShard(replicaAllocationId, replicaLocalCheckpoint); // initialize the local knowledge on the primary of the global checkpoint on the replica shard final int replicaGlobalCheckpoint = randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Math.toIntExact(primaryShard.getGlobalCheckpoint())); primaryShard.updateGlobalCheckpointForShard(replicaAllocationId, replicaGlobalCheckpoint); // simulate a background maybe sync; it should only run if the knowledge on the replica of the global checkpoint lags the primary primaryShard.maybeSyncGlobalCheckpoint("test"); assertThat( synced.get(), equalTo(maxSeqNo == primaryShard.getGlobalCheckpoint() && (replicaGlobalCheckpoint < checkpoint))); // simulate that the background sync advanced the global checkpoint on the replica primaryShard.updateGlobalCheckpointForShard(replicaAllocationId, primaryShard.getGlobalCheckpoint()); // reset our boolean so that we can assert after another simulated maybe sync synced.set(false); primaryShard.maybeSyncGlobalCheckpoint("test"); // this time there should not be a sync since all the replica copies are caught up with the primary assertFalse(synced.get()); closeShards(replicaShard, primaryShard); } public void testRestoreLocalCheckpointTrackerFromTranslogOnPromotion() throws IOException, InterruptedException { final IndexShard indexShard = newStartedShard(false); final int operations = 1024 - scaledRandomIntBetween(0, 1024); indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); final long globalCheckpointOnReplica = SequenceNumbers.UNASSIGNED_SEQ_NO; randomIntBetween( Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), Math.toIntExact(indexShard.getLocalCheckpoint())); indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); final int globalCheckpoint = randomIntBetween( Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), Math.toIntExact(indexShard.getLocalCheckpoint())); final CountDownLatch latch = new CountDownLatch(1); indexShard.acquireReplicaOperationPermit( indexShard.getPrimaryTerm() + 1, globalCheckpoint, new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { releasable.close(); latch.countDown(); } @Override public void onFailure(Exception e) { } }, ThreadPool.Names.SAME, ""); latch.await(); final ShardRouting newRouting = indexShard.routingEntry().moveActiveReplicaToPrimary(); final CountDownLatch resyncLatch = new CountDownLatch(1); indexShard.updateShardState( newRouting, indexShard.getPrimaryTerm() + 1, (s, r) -> resyncLatch.countDown(), 1L, Collections.singleton(newRouting.allocationId().getId()), new IndexShardRoutingTable.Builder(newRouting.shardId()).addShard(newRouting).build(), Collections.emptySet()); resyncLatch.await(); assertThat(indexShard.getLocalCheckpoint(), equalTo(maxSeqNo)); assertThat(indexShard.seqNoStats().getMaxSeqNo(), equalTo(maxSeqNo)); closeShards(indexShard); } public void testThrowBackLocalCheckpointOnReplica() throws IOException, InterruptedException { final IndexShard indexShard = newStartedShard(false); // most of the time this is large enough that most of the time there will be at least one gap final int operations = 1024 - scaledRandomIntBetween(0, 1024); indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED)); final long globalCheckpointOnReplica = randomIntBetween( Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), Math.toIntExact(indexShard.getLocalCheckpoint())); indexShard.updateGlobalCheckpointOnReplica(globalCheckpointOnReplica, "test"); final int globalCheckpoint = randomIntBetween( Math.toIntExact(SequenceNumbers.UNASSIGNED_SEQ_NO), Math.toIntExact(indexShard.getLocalCheckpoint())); final CountDownLatch latch = new CountDownLatch(1); indexShard.acquireReplicaOperationPermit( indexShard.primaryTerm + 1, globalCheckpoint, new ActionListener<Releasable>() { @Override public void onResponse(final Releasable releasable) { releasable.close(); latch.countDown(); } @Override public void onFailure(final Exception e) { } }, ThreadPool.Names.SAME, ""); latch.await(); if (globalCheckpointOnReplica == SequenceNumbers.UNASSIGNED_SEQ_NO && globalCheckpoint == SequenceNumbers.UNASSIGNED_SEQ_NO) { assertThat(indexShard.getLocalCheckpoint(), equalTo(SequenceNumbers.NO_OPS_PERFORMED)); } else { assertThat(indexShard.getLocalCheckpoint(), equalTo(Math.max(globalCheckpoint, globalCheckpointOnReplica))); } // ensure that after the local checkpoint throw back and indexing again, the local checkpoint advances final Result result = indexOnReplicaWithGaps(indexShard, operations, Math.toIntExact(indexShard.getLocalCheckpoint())); assertThat(indexShard.getLocalCheckpoint(), equalTo((long) result.localCheckpoint)); closeShards(indexShard); } public void testConcurrentTermIncreaseOnReplicaShard() throws BrokenBarrierException, InterruptedException, IOException { final IndexShard indexShard = newStartedShard(false); final CyclicBarrier barrier = new CyclicBarrier(3); final CountDownLatch latch = new CountDownLatch(2); final long primaryTerm = indexShard.getPrimaryTerm(); final AtomicLong counter = new AtomicLong(); final AtomicReference<Exception> onFailure = new AtomicReference<>(); final LongFunction<Runnable> function = increment -> () -> { assert increment > 0; try { barrier.await(); } catch (final BrokenBarrierException | InterruptedException e) { throw new RuntimeException(e); } indexShard.acquireReplicaOperationPermit( primaryTerm + increment, indexShard.getGlobalCheckpoint(), new ActionListener<Releasable>() { @Override public void onResponse(Releasable releasable) { counter.incrementAndGet(); assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm + increment)); latch.countDown(); releasable.close(); } @Override public void onFailure(Exception e) { onFailure.set(e); latch.countDown(); } }, ThreadPool.Names.WRITE, ""); }; final long firstIncrement = 1 + (randomBoolean() ? 0 : 1); final long secondIncrement = 1 + (randomBoolean() ? 0 : 1); final Thread first = new Thread(function.apply(firstIncrement)); final Thread second = new Thread(function.apply(secondIncrement)); first.start(); second.start(); // the two threads synchronize attempting to acquire an operation permit barrier.await(); // we wait for both operations to complete latch.await(); first.join(); second.join(); final Exception e; if ((e = onFailure.get()) != null) { /* * If one thread tried to set the primary term to a higher value than the other thread and the thread with the higher term won * the race, then the other thread lost the race and only one operation should have been executed. */ assertThat(e, instanceOf(IllegalStateException.class)); assertThat(e, hasToString(matches("operation primary term \\[\\d+\\] is too old"))); assertThat(counter.get(), equalTo(1L)); } else { assertThat(counter.get(), equalTo(2L)); } assertThat(indexShard.getPrimaryTerm(), equalTo(primaryTerm + Math.max(firstIncrement, secondIncrement))); closeShards(indexShard); } /*** * test one can snapshot the store at various lifecycle stages */ public void testSnapshotStore() throws IOException { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); flushShard(shard); final IndexShard newShard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); Store.MetadataSnapshot snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); assertTrue(newShard.recoverFromStore()); snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); newShard.close("test", false); snapshot = newShard.snapshotStoreMetadata(); assertThat(snapshot.getSegmentsFile().name(), equalTo("segments_3")); closeShards(newShard); } public void testAsyncFsync() throws InterruptedException, IOException { IndexShard shard = newStartedShard(); Semaphore semaphore = new Semaphore(Integer.MAX_VALUE); Thread[] thread = new Thread[randomIntBetween(3, 5)]; CountDownLatch latch = new CountDownLatch(thread.length); for (int i = 0; i < thread.length; i++) { thread[i] = new Thread() { @Override public void run() { try { latch.countDown(); latch.await(); for (int i = 0; i < 10000; i++) { semaphore.acquire(); shard.sync(TranslogTests.randomTranslogLocation(), (ex) -> semaphore.release()); } } catch (Exception ex) { throw new RuntimeException(ex); } } }; thread[i].start(); } for (int i = 0; i < thread.length; i++) { thread[i].join(); } assertTrue(semaphore.tryAcquire(Integer.MAX_VALUE, 10, TimeUnit.SECONDS)); closeShards(shard); } public void testMinimumCompatVersion() throws IOException { Version versionCreated = VersionUtils.randomVersion(random()); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, versionCreated.id) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .settings(settings) .primaryTerm(0, 1).build(); IndexShard test = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(test); indexDoc(test, "_doc", "test"); assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); indexDoc(test, "_doc", "test"); assertEquals(versionCreated.luceneVersion, test.minimumCompatibleVersion()); test.getEngine().flush(); assertEquals(Version.CURRENT.luceneVersion, test.minimumCompatibleVersion()); closeShards(test); } public void testShardStats() throws IOException { IndexShard shard = newStartedShard(); ShardStats stats = new ShardStats(shard.routingEntry(), shard.shardPath(), new CommonStats(new IndicesQueryCache(Settings.EMPTY), shard, new CommonStatsFlags()), shard.commitStats(), shard.seqNoStats()); assertEquals(shard.shardPath().getRootDataPath().toString(), stats.getDataPath()); assertEquals(shard.shardPath().getRootStatePath().toString(), stats.getStatePath()); assertEquals(shard.shardPath().isCustomDataPath(), stats.isCustomDataPath()); // try to serialize it to ensure values survive the serialization BytesStreamOutput out = new BytesStreamOutput(); stats.writeTo(out); StreamInput in = out.bytes().streamInput(); stats = ShardStats.readShardStats(in); XContentBuilder builder = jsonBuilder(); builder.startObject(); stats.toXContent(builder, EMPTY_PARAMS); builder.endObject(); String xContent = Strings.toString(builder); StringBuilder expectedSubSequence = new StringBuilder("\"shard_path\":{\"state_path\":\""); expectedSubSequence.append(shard.shardPath().getRootStatePath().toString()); expectedSubSequence.append("\",\"data_path\":\""); expectedSubSequence.append(shard.shardPath().getRootDataPath().toString()); expectedSubSequence.append("\",\"is_custom_data_path\":").append(shard.shardPath().isCustomDataPath()).append("}"); if (Constants.WINDOWS) { // Some path weirdness on windows } else { assertTrue(xContent.contains(expectedSubSequence)); } closeShards(shard); } public void testShardStatsWithFailures() throws IOException { allowShardFailures(); final ShardId shardId = new ShardId("index", "_na_", 0); final ShardRouting shardRouting = newShardRouting(shardId, "node", true, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE, ShardRoutingState.INITIALIZING); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder(shardRouting.getIndexName()) .settings(settings) .primaryTerm(0, 1) .build(); // Override two Directory methods to make them fail at our will // We use AtomicReference here to inject failure in the middle of the test not immediately // We use Supplier<IOException> instead of IOException to produce meaningful stacktrace // (remember stack trace is filled when exception is instantiated) AtomicReference<Supplier<IOException>> exceptionToThrow = new AtomicReference<>(); AtomicBoolean throwWhenMarkingStoreCorrupted = new AtomicBoolean(false); Directory directory = new FilterDirectory(newFSDirectory(shardPath.resolveIndex())) { //fileLength method is called during storeStats try block //it's not called when store is marked as corrupted @Override public long fileLength(String name) throws IOException { Supplier<IOException> ex = exceptionToThrow.get(); if (ex == null) { return super.fileLength(name); } else { throw ex.get(); } } //listAll method is called when marking store as corrupted @Override public String[] listAll() throws IOException { Supplier<IOException> ex = exceptionToThrow.get(); if (throwWhenMarkingStoreCorrupted.get() && ex != null) { throw ex.get(); } else { return super.listAll(); } } }; try (Store store = createStore(shardId, new IndexSettings(metaData, Settings.EMPTY), directory)) { IndexShard shard = newShard(shardRouting, shardPath, metaData, store, null, new InternalEngineFactory(), () -> { }, EMPTY_EVENT_LISTENER); AtomicBoolean failureCallbackTriggered = new AtomicBoolean(false); shard.addShardFailureCallback((ig)->failureCallbackTriggered.set(true)); recoverShardFromStore(shard); final boolean corruptIndexException = randomBoolean(); if (corruptIndexException) { exceptionToThrow.set(() -> new CorruptIndexException("Test CorruptIndexException", "Test resource")); throwWhenMarkingStoreCorrupted.set(randomBoolean()); } else { exceptionToThrow.set(() -> new IOException("Test IOException")); } ElasticsearchException e = expectThrows(ElasticsearchException.class, shard::storeStats); assertTrue(failureCallbackTriggered.get()); if (corruptIndexException && !throwWhenMarkingStoreCorrupted.get()) { assertTrue(store.isMarkedCorrupted()); } } } public void testRefreshMetric() throws IOException { IndexShard shard = newStartedShard(); assertThat(shard.refreshStats().getTotal(), equalTo(2L)); // refresh on: finalize and end of recovery long initialTotalTime = shard.refreshStats().getTotalTimeInMillis(); // check time advances for (int i = 1; shard.refreshStats().getTotalTimeInMillis() == initialTotalTime; i++) { indexDoc(shard, "_doc", "test"); assertThat(shard.refreshStats().getTotal(), equalTo(2L + i - 1)); shard.refresh("test"); assertThat(shard.refreshStats().getTotal(), equalTo(2L + i)); assertThat(shard.refreshStats().getTotalTimeInMillis(), greaterThanOrEqualTo(initialTotalTime)); } long refreshCount = shard.refreshStats().getTotal(); indexDoc(shard, "_doc", "test"); try (Engine.GetResult ignored = shard.get(new Engine.Get(true, false, "test", "test", new Term(IdFieldMapper.NAME, Uid.encodeId("test"))))) { assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount+1)); } indexDoc(shard, "_doc", "test"); shard.writeIndexingBuffer(); assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount+2)); closeShards(shard); } public void testIndexingOperationsListeners() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); shard.updateLocalCheckpointForShard(shard.shardRouting.allocationId().getId(), 0); AtomicInteger preIndex = new AtomicInteger(); AtomicInteger postIndexCreate = new AtomicInteger(); AtomicInteger postIndexUpdate = new AtomicInteger(); AtomicInteger postIndexException = new AtomicInteger(); AtomicInteger preDelete = new AtomicInteger(); AtomicInteger postDelete = new AtomicInteger(); AtomicInteger postDeleteException = new AtomicInteger(); shard.close("simon says", true); shard = reinitShard(shard, new IndexingOperationListener() { @Override public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { preIndex.incrementAndGet(); return operation; } @Override public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult result) { switch (result.getResultType()) { case SUCCESS: if (result.isCreated()) { postIndexCreate.incrementAndGet(); } else { postIndexUpdate.incrementAndGet(); } break; case FAILURE: postIndex(shardId, index, result.getFailure()); break; default: fail("unexpected result type:" + result.getResultType()); } } @Override public void postIndex(ShardId shardId, Engine.Index index, Exception ex) { postIndexException.incrementAndGet(); } @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { preDelete.incrementAndGet(); return delete; } @Override public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) { switch (result.getResultType()) { case SUCCESS: postDelete.incrementAndGet(); break; case FAILURE: postDelete(shardId, delete, result.getFailure()); break; default: fail("unexpected result type:" + result.getResultType()); } } @Override public void postDelete(ShardId shardId, Engine.Delete delete, Exception ex) { postDeleteException.incrementAndGet(); } }); recoverShardFromStore(shard); indexDoc(shard, "_doc", "1"); assertEquals(1, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(0, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(0, preDelete.get()); assertEquals(0, postDelete.get()); assertEquals(0, postDeleteException.get()); indexDoc(shard, "_doc", "1"); assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(1, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(0, preDelete.get()); assertEquals(0, postDelete.get()); assertEquals(0, postDeleteException.get()); deleteDoc(shard, "_doc", "1"); assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(1, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(1, preDelete.get()); assertEquals(1, postDelete.get()); assertEquals(0, postDeleteException.get()); shard.close("Unexpected close", true); shard.state = IndexShardState.STARTED; // It will generate exception try { indexDoc(shard, "_doc", "1"); fail(); } catch (AlreadyClosedException e) { } assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(1, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(1, preDelete.get()); assertEquals(1, postDelete.get()); assertEquals(0, postDeleteException.get()); try { deleteDoc(shard, "_doc", "1"); fail(); } catch (AlreadyClosedException e) { } assertEquals(2, preIndex.get()); assertEquals(1, postIndexCreate.get()); assertEquals(1, postIndexUpdate.get()); assertEquals(0, postIndexException.get()); assertEquals(1, preDelete.get()); assertEquals(1, postDelete.get()); assertEquals(0, postDeleteException.get()); closeShards(shard); } public void testLockingBeforeAndAfterRelocated() throws Exception { final IndexShard shard = newStartedShard(true); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); CountDownLatch latch = new CountDownLatch(1); Thread recoveryThread = new Thread(() -> { latch.countDown(); try { shard.relocated(primaryContext -> {}); } catch (InterruptedException e) { throw new RuntimeException(e); } }); try (Releasable ignored = acquirePrimaryOperationPermitBlockingly(shard)) { // start finalization of recovery recoveryThread.start(); latch.await(); // recovery can only be finalized after we release the current primaryOperationLock assertTrue(shard.isPrimaryMode()); } // recovery can be now finalized recoveryThread.join(); assertFalse(shard.isPrimaryMode()); try (Releasable ignored = acquirePrimaryOperationPermitBlockingly(shard)) { // lock can again be acquired assertFalse(shard.isPrimaryMode()); } closeShards(shard); } public void testDelayedOperationsBeforeAndAfterRelocated() throws Exception { final IndexShard shard = newStartedShard(true); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); Thread recoveryThread = new Thread(() -> { try { shard.relocated(primaryContext -> {}); } catch (InterruptedException e) { throw new RuntimeException(e); } }); recoveryThread.start(); List<PlainActionFuture<Releasable>> onLockAcquiredActions = new ArrayList<>(); for (int i = 0; i < 10; i++) { PlainActionFuture<Releasable> onLockAcquired = new PlainActionFuture<Releasable>() { @Override public void onResponse(Releasable releasable) { releasable.close(); super.onResponse(releasable); } }; shard.acquirePrimaryOperationPermit(onLockAcquired, ThreadPool.Names.WRITE, "i_" + i); onLockAcquiredActions.add(onLockAcquired); } for (PlainActionFuture<Releasable> onLockAcquired : onLockAcquiredActions) { assertNotNull(onLockAcquired.get(30, TimeUnit.SECONDS)); } recoveryThread.join(); closeShards(shard); } public void testStressRelocated() throws Exception { final IndexShard shard = newStartedShard(true); assertTrue(shard.isPrimaryMode()); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(shard.routingEntry(), "other_node")); final int numThreads = randomIntBetween(2, 4); Thread[] indexThreads = new Thread[numThreads]; CountDownLatch allPrimaryOperationLocksAcquired = new CountDownLatch(numThreads); CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); for (int i = 0; i < indexThreads.length; i++) { indexThreads[i] = new Thread() { @Override public void run() { try (Releasable operationLock = acquirePrimaryOperationPermitBlockingly(shard)) { allPrimaryOperationLocksAcquired.countDown(); barrier.await(); } catch (InterruptedException | BrokenBarrierException | ExecutionException e) { throw new RuntimeException(e); } } }; indexThreads[i].start(); } AtomicBoolean relocated = new AtomicBoolean(); final Thread recoveryThread = new Thread(() -> { try { shard.relocated(primaryContext -> {}); } catch (InterruptedException e) { throw new RuntimeException(e); } relocated.set(true); }); // ensure we wait for all primary operation locks to be acquired allPrimaryOperationLocksAcquired.await(); // start recovery thread recoveryThread.start(); assertThat(relocated.get(), equalTo(false)); assertThat(shard.getActiveOperationsCount(), greaterThan(0)); // ensure we only transition after pending operations completed assertTrue(shard.isPrimaryMode()); // complete pending operations barrier.await(); // complete recovery/relocation recoveryThread.join(); // ensure relocated successfully once pending operations are done assertThat(relocated.get(), equalTo(true)); assertFalse(shard.isPrimaryMode()); assertThat(shard.getActiveOperationsCount(), equalTo(0)); for (Thread indexThread : indexThreads) { indexThread.join(); } closeShards(shard); } public void testRelocatedShardCanNotBeRevived() throws IOException, InterruptedException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); shard.relocated(primaryContext -> {}); expectThrows(IllegalIndexShardStateException.class, () -> IndexShardTestCase.updateRoutingEntry(shard, originalRouting)); closeShards(shard); } public void testShardCanNotBeMarkedAsRelocatedIfRelocationCancelled() throws IOException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); IndexShardTestCase.updateRoutingEntry(shard, originalRouting); expectThrows(IllegalIndexShardStateException.class, () -> shard.relocated(primaryContext -> {})); closeShards(shard); } public void testRelocatedShardCanNotBeRevivedConcurrently() throws IOException, InterruptedException, BrokenBarrierException { final IndexShard shard = newStartedShard(true); final ShardRouting originalRouting = shard.routingEntry(); IndexShardTestCase.updateRoutingEntry(shard, ShardRoutingHelper.relocate(originalRouting, "other_node")); CyclicBarrier cyclicBarrier = new CyclicBarrier(3); AtomicReference<Exception> relocationException = new AtomicReference<>(); Thread relocationThread = new Thread(new AbstractRunnable() { @Override public void onFailure(Exception e) { relocationException.set(e); } @Override protected void doRun() throws Exception { cyclicBarrier.await(); shard.relocated(primaryContext -> {}); } }); relocationThread.start(); AtomicReference<Exception> cancellingException = new AtomicReference<>(); Thread cancellingThread = new Thread(new AbstractRunnable() { @Override public void onFailure(Exception e) { cancellingException.set(e); } @Override protected void doRun() throws Exception { cyclicBarrier.await(); IndexShardTestCase.updateRoutingEntry(shard, originalRouting); } }); cancellingThread.start(); cyclicBarrier.await(); relocationThread.join(); cancellingThread.join(); if (shard.isPrimaryMode() == false) { logger.debug("shard was relocated successfully"); assertThat(cancellingException.get(), instanceOf(IllegalIndexShardStateException.class)); assertThat("current routing:" + shard.routingEntry(), shard.routingEntry().relocating(), equalTo(true)); assertThat(relocationException.get(), nullValue()); } else { logger.debug("shard relocation was cancelled"); assertThat(relocationException.get(), instanceOf(IllegalIndexShardStateException.class)); assertThat("current routing:" + shard.routingEntry(), shard.routingEntry().relocating(), equalTo(false)); assertThat(cancellingException.get(), nullValue()); } closeShards(shard); } public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { /* * The flow of this test: * - delete #1 * - roll generation (to create gen 2) * - index #0 * - index #3 * - flush (commit point has max_seqno 3, and local checkpoint 1 -> points at gen 2, previous commit point is maintained) * - index #2 * - index #5 * - If flush and then recover from the existing store, delete #1 will be removed while index #0 is still retained and replayed. */ final IndexShard shard = newStartedShard(false); shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)); shard.applyIndexOperationOnReplica(3, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery. shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); shard.applyIndexOperationOnReplica(2, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)); shard.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; if (randomBoolean()) { // Advance the global checkpoint to remove the 1st commit; this shard will recover the 2nd commit. shard.updateGlobalCheckpointOnReplica(3, "test"); logger.info("--> flushing shard"); shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); translogOps = 4; // delete #1 won't be replayed. } else if (randomBoolean()) { shard.getEngine().rollTranslogGeneration(); translogOps = 5; } else { translogOps = 5; } final ShardRouting replicaRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING, RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertEquals(translogOps, newShard.recoveryState().getTranslog().recoveredOperations()); assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperations()); assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperationsOnStart()); assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f); updateRoutingEntry(newShard, ShardRoutingHelper.moveToStarted(newShard.routingEntry())); assertDocCount(newShard, 3); closeShards(newShard); } public void testRecoverFromStore() throws IOException { final IndexShard shard = newStartedShard(true); int totalOps = randomInt(10); int translogOps = totalOps; for (int i = 0; i < totalOps; i++) { indexDoc(shard, "_doc", Integer.toString(i)); } if (randomBoolean()) { shard.updateLocalCheckpointForShard(shard.shardRouting.allocationId().getId(), totalOps - 1); flushShard(shard); translogOps = 0; } IndexShard newShard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertEquals(translogOps, newShard.recoveryState().getTranslog().recoveredOperations()); assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperations()); assertEquals(translogOps, newShard.recoveryState().getTranslog().totalOperationsOnStart()); assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); // check that local checkpoint of new primary is properly tracked after recovery assertThat(newShard.getLocalCheckpoint(), equalTo(totalOps - 1L)); assertThat(newShard.getReplicationTracker().getTrackedLocalCheckpointForShard(newShard.routingEntry().allocationId().getId()) .getLocalCheckpoint(), equalTo(totalOps - 1L)); assertDocCount(newShard, totalOps); closeShards(newShard); } public void testPrimaryHandOffUpdatesLocalCheckpoint() throws IOException { final IndexShard primarySource = newStartedShard(true); int totalOps = randomInt(10); for (int i = 0; i < totalOps; i++) { indexDoc(primarySource, "_doc", Integer.toString(i)); } IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); final IndexShard primaryTarget = newShard(primarySource.routingEntry().getTargetRelocatingShard()); updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetaData()); recoverReplica(primaryTarget, primarySource, true); // check that local checkpoint of new primary is properly tracked after primary relocation assertThat(primaryTarget.getLocalCheckpoint(), equalTo(totalOps - 1L)); assertThat(primaryTarget.getReplicationTracker().getTrackedLocalCheckpointForShard( primaryTarget.routingEntry().allocationId().getId()).getLocalCheckpoint(), equalTo(totalOps - 1L)); assertDocCount(primaryTarget, totalOps); closeShards(primarySource, primaryTarget); } /* This test just verifies that we fill up local checkpoint up to max seen seqID on primary recovery */ public void testRecoverFromStoreWithNoOps() throws IOException { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); Engine.IndexResult test = indexDoc(shard, "_doc", "1"); // start a replica shard and index the second doc final IndexShard otherShard = newStartedShard(false); updateMappings(otherShard, shard.indexSettings().getIndexMetaData()); SourceToParse sourceToParse = SourceToParse.source(shard.shardId().getIndexName(), "_doc", "1", new BytesArray("{}"), XContentType.JSON); otherShard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); final ShardRouting primaryShardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(otherShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertEquals(1, newShard.recoveryState().getTranslog().recoveredOperations()); assertEquals(1, newShard.recoveryState().getTranslog().totalOperations()); assertEquals(1, newShard.recoveryState().getTranslog().totalOperationsOnStart()); assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f); try (Translog.Snapshot snapshot = getTranslog(newShard).newSnapshot()) { Translog.Operation operation; int numNoops = 0; while ((operation = snapshot.next()) != null) { if (operation.opType() == Translog.Operation.Type.NO_OP) { numNoops++; assertEquals(newShard.getPrimaryTerm(), operation.primaryTerm()); assertEquals(0, operation.seqNo()); } } assertEquals(1, numNoops); } IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); assertDocCount(newShard, 1); assertDocCount(shard, 2); for (int i = 0; i < 2; i++) { newShard = reinitShard(newShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); try (Translog.Snapshot snapshot = getTranslog(newShard).newSnapshot()) { assertThat(snapshot.totalOperations(), equalTo(2)); } } closeShards(newShard, shard); } public void testRecoverFromCleanStore() throws IOException { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); if (randomBoolean()) { flushShard(shard); } final ShardRouting shardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, ShardRoutingHelper.initWithSameId(shardRouting, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE) ); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertEquals(0, newShard.recoveryState().getTranslog().recoveredOperations()); assertEquals(0, newShard.recoveryState().getTranslog().totalOperations()); assertEquals(0, newShard.recoveryState().getTranslog().totalOperationsOnStart()); assertEquals(100.0f, newShard.recoveryState().getTranslog().recoveredPercent(), 0.01f); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); assertDocCount(newShard, 0); closeShards(newShard); } public void testFailIfIndexNotPresentInRecoverFromStore() throws Exception { final IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); if (randomBoolean()) { flushShard(shard); } Store store = shard.store(); store.incRef(); closeShards(shard); cleanLuceneIndex(store.directory()); store.decRef(); IndexShard newShard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); ShardRouting routing = newShard.routingEntry(); newShard.markAsRecovering("store", new RecoveryState(routing, localNode, null)); try { newShard.recoverFromStore(); fail("index not there!"); } catch (IndexShardRecoveryException ex) { assertTrue(ex.getMessage().contains("failed to fetch index version after copying it over")); } routing = ShardRoutingHelper.moveToUnassigned(routing, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "because I say so")); routing = ShardRoutingHelper.initialize(routing, newShard.routingEntry().currentNodeId()); assertTrue("it's already recovering, we should ignore new ones", newShard.ignoreRecoveryAttempt()); try { newShard.markAsRecovering("store", new RecoveryState(routing, localNode, null)); fail("we are already recovering, can't mark again"); } catch (IllegalIndexShardStateException e) { // OK! } newShard = reinitShard(newShard, ShardRoutingHelper.initWithSameId(routing, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE)); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue("recover even if there is nothing to recover", newShard.recoverFromStore()); IndexShardTestCase.updateRoutingEntry(newShard, newShard.routingEntry().moveToStarted()); assertDocCount(newShard, 0); // we can't issue this request through a client because of the inconsistencies we created with the cluster state // doing it directly instead indexDoc(newShard, "_doc", "0"); newShard.refresh("test"); assertDocCount(newShard, 1); closeShards(newShard); } public void testRecoverFromStoreRemoveStaleOperations() throws Exception { final IndexShard shard = newStartedShard(false); final String indexName = shard.shardId().getIndexName(); // Index #0, index #1 shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here. shard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1")); // Simulate resync (without rollback): Noop #1, index #2 acquireReplicaOperationPermitBlockingly(shard, shard.primaryTerm + 1); shard.markSeqNoAsNoop(1, "test"); shard.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2")); // Recovering from store should discard doc #1 final ShardRouting replicaRouting = shard.routingEntry(); IndexShard newShard = reinitShard(shard, newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), true, ShardRoutingState.INITIALIZING, RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE)); newShard.primaryTerm++; DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), localNode, null)); assertTrue(newShard.recoverFromStore()); assertThat(getShardDocUIDs(newShard), containsInAnyOrder("doc-0", "doc-2")); closeShards(newShard); } public void testRecoveryFailsAfterMovingToRelocatedState() throws InterruptedException, IOException { final IndexShard shard = newStartedShard(true); ShardRouting origRouting = shard.routingEntry(); assertThat(shard.state(), equalTo(IndexShardState.STARTED)); ShardRouting inRecoveryRouting = ShardRoutingHelper.relocate(origRouting, "some_node"); IndexShardTestCase.updateRoutingEntry(shard, inRecoveryRouting); shard.relocated(primaryContext -> {}); assertFalse(shard.isPrimaryMode()); try { IndexShardTestCase.updateRoutingEntry(shard, origRouting); fail("Expected IndexShardRelocatedException"); } catch (IndexShardRelocatedException expected) { } closeShards(shard); } public void testRestoreShard() throws IOException { final IndexShard source = newStartedShard(true); IndexShard target = newStartedShard(true); indexDoc(source, "_doc", "0"); if (randomBoolean()) { source.refresh("test"); } indexDoc(target, "_doc", "1"); target.refresh("test"); assertDocs(target, "1"); flushShard(source); // only flush source ShardRouting routing = ShardRoutingHelper.initWithSameId(target.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE); final Snapshot snapshot = new Snapshot("foo", new SnapshotId("bar", UUIDs.randomBase64UUID())); routing = ShardRoutingHelper.newWithRestoreSource(routing, new RecoverySource.SnapshotRecoverySource(snapshot, Version.CURRENT, "test")); target = reinitShard(target, routing); Store sourceStore = source.store(); Store targetStore = target.store(); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); target.markAsRecovering("store", new RecoveryState(routing, localNode, null)); assertTrue(target.restoreFromRepository(new RestoreOnlyRepository("test") { @Override public void restoreShard(IndexShard shard, SnapshotId snapshotId, Version version, IndexId indexId, ShardId snapshotShardId, RecoveryState recoveryState) { try { cleanLuceneIndex(targetStore.directory()); for (String file : sourceStore.directory().listAll()) { if (file.equals("write.lock") || file.startsWith("extra")) { continue; } targetStore.directory().copyFrom(sourceStore.directory(), file, file, IOContext.DEFAULT); } } catch (Exception ex) { throw new RuntimeException(ex); } } })); assertThat(target.getLocalCheckpoint(), equalTo(0L)); assertThat(target.seqNoStats().getMaxSeqNo(), equalTo(0L)); assertThat(target.getReplicationTracker().getGlobalCheckpoint(), equalTo(0L)); IndexShardTestCase.updateRoutingEntry(target, routing.moveToStarted()); assertThat(target.getReplicationTracker().getTrackedLocalCheckpointForShard( target.routingEntry().allocationId().getId()).getLocalCheckpoint(), equalTo(0L)); assertDocs(target, "0"); closeShards(source, target); } public void testSearcherWrapperIsUsed() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}"); shard.refresh("test"); try (Engine.GetResult getResult = shard .get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))))) { assertTrue(getResult.exists()); assertNotNull(getResult.searcher()); } try (Engine.Searcher searcher = shard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); assertEquals(search.totalHits, 1); search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); assertEquals(search.totalHits, 1); } IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @Override public DirectoryReader wrap(DirectoryReader reader) throws IOException { return new FieldMaskingReader("foo", reader); } @Override public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { return searcher; } }; closeShards(shard); IndexShard newShard = newShard( ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), null, wrapper, new InternalEngineFactory(), () -> {}, EMPTY_EVENT_LISTENER); recoverShardFromStore(newShard); try (Engine.Searcher searcher = newShard.acquireSearcher("test")) { TopDocs search = searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10); assertEquals(search.totalHits, 0); search = searcher.searcher().search(new TermQuery(new Term("foobar", "bar")), 10); assertEquals(search.totalHits, 1); } try (Engine.GetResult getResult = newShard .get(new Engine.Get(false, false, "test", "1", new Term(IdFieldMapper.NAME, Uid.encodeId("1"))))) { assertTrue(getResult.exists()); assertNotNull(getResult.searcher()); // make sure get uses the wrapped reader assertTrue(getResult.searcher().reader() instanceof FieldMaskingReader); } closeShards(newShard); } public void testSearcherWrapperWorksWithGlobalOrdinals() throws IOException { IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @Override public DirectoryReader wrap(DirectoryReader reader) throws IOException { return new FieldMaskingReader("foo", reader); } @Override public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { return searcher; } }; Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\", \"fielddata\": true }}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard shard = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, wrapper); recoverShardFromStore(shard); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); shard.refresh("created segment 1"); indexDoc(shard, "_doc", "1", "{\"foobar\" : \"bar\"}"); shard.refresh("created segment 2"); // test global ordinals are evicted MappedFieldType foo = shard.mapperService().fullName("foo"); IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(shard.indexSettings.getNodeSettings(), new IndexFieldDataCache.Listener() {}); IndexFieldDataService indexFieldDataService = new IndexFieldDataService(shard.indexSettings, indicesFieldDataCache, new NoneCircuitBreakerService(), shard.mapperService()); IndexFieldData.Global ifd = indexFieldDataService.getForField(foo); FieldDataStats before = shard.fieldData().stats("foo"); assertThat(before.getMemorySizeInBytes(), equalTo(0L)); FieldDataStats after = null; try (Engine.Searcher searcher = shard.acquireSearcher("test")) { assertThat("we have to have more than one segment", searcher.getDirectoryReader().leaves().size(), greaterThan(1)); ifd.loadGlobal(searcher.getDirectoryReader()); after = shard.fieldData().stats("foo"); assertEquals(after.getEvictions(), before.getEvictions()); // If a field doesn't exist an empty IndexFieldData is returned and that isn't cached: assertThat(after.getMemorySizeInBytes(), equalTo(0L)); } assertEquals(shard.fieldData().stats("foo").getEvictions(), before.getEvictions()); assertEquals(shard.fieldData().stats("foo").getMemorySizeInBytes(), after.getMemorySizeInBytes()); shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); shard.refresh("test"); assertEquals(shard.fieldData().stats("foo").getMemorySizeInBytes(), before.getMemorySizeInBytes()); assertEquals(shard.fieldData().stats("foo").getEvictions(), before.getEvictions()); closeShards(shard); } public void testIndexingOperationListenersIsInvokedOnRecovery() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); deleteDoc(shard, "_doc", "0"); indexDoc(shard, "_doc", "1", "{\"foo\" : \"bar\"}"); shard.refresh("test"); final AtomicInteger preIndex = new AtomicInteger(); final AtomicInteger postIndex = new AtomicInteger(); final AtomicInteger preDelete = new AtomicInteger(); final AtomicInteger postDelete = new AtomicInteger(); IndexingOperationListener listener = new IndexingOperationListener() { @Override public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { preIndex.incrementAndGet(); return operation; } @Override public void postIndex(ShardId shardId, Engine.Index index, Engine.IndexResult result) { postIndex.incrementAndGet(); } @Override public Engine.Delete preDelete(ShardId shardId, Engine.Delete delete) { preDelete.incrementAndGet(); return delete; } @Override public void postDelete(ShardId shardId, Engine.Delete delete, Engine.DeleteResult result) { postDelete.incrementAndGet(); } }; final IndexShard newShard = reinitShard(shard, listener); recoverShardFromStore(newShard); IndexingStats indexingStats = newShard.indexingStats(); // ensure we are not influencing the indexing stats assertEquals(0, indexingStats.getTotal().getDeleteCount()); assertEquals(0, indexingStats.getTotal().getDeleteCurrent()); assertEquals(0, indexingStats.getTotal().getIndexCount()); assertEquals(0, indexingStats.getTotal().getIndexCurrent()); assertEquals(0, indexingStats.getTotal().getIndexFailedCount()); assertEquals(2, preIndex.get()); assertEquals(2, postIndex.get()); assertEquals(1, preDelete.get()); assertEquals(1, postDelete.get()); closeShards(newShard); } public void testSearchIsReleaseIfWrapperFails() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0", "{\"foo\" : \"bar\"}"); shard.refresh("test"); IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { @Override public DirectoryReader wrap(DirectoryReader reader) throws IOException { throw new RuntimeException("boom"); } @Override public IndexSearcher wrap(IndexSearcher searcher) throws EngineException { return searcher; } }; closeShards(shard); IndexShard newShard = newShard( ShardRoutingHelper.initWithSameId(shard.routingEntry(), RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE), shard.shardPath(), shard.indexSettings().getIndexMetaData(), null, wrapper, new InternalEngineFactory(), () -> {}, EMPTY_EVENT_LISTENER); recoverShardFromStore(newShard); try { newShard.acquireSearcher("test"); fail("exception expected"); } catch (RuntimeException ex) { // } closeShards(newShard); } public void testTranslogRecoverySyncsTranslog() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); IndexShard replica = newShard(primary.shardId(), false, "n2", metaData, null); recoverReplica(replica, primary, (shard, discoveryNode) -> new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override public long indexTranslogOperations(List<Translog.Operation> operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); assertFalse(replica.isSyncNeeded()); return localCheckpoint; } }, true, true); closeShards(primary, replica); } public void testRecoverFromTranslog() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, randomLongBetween(1, Long.MAX_VALUE)).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); List<Translog.Operation> operations = new ArrayList<>(); int numTotalEntries = randomIntBetween(0, 10); int numCorruptEntries = 0; for (int i = 0; i < numTotalEntries; i++) { if (randomBoolean()) { operations.add(new Translog.Index("_doc", "1", 0, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar\"}".getBytes(Charset.forName("UTF-8")), null, -1)); } else { // corrupt entry operations.add(new Translog.Index("_doc", "2", 1, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar}".getBytes(Charset.forName("UTF-8")), null, -1)); numCorruptEntries++; } } Iterator<Translog.Operation> iterator = operations.iterator(); Translog.Snapshot snapshot = new Translog.Snapshot() { @Override public void close() { } @Override public int totalOperations() { return numTotalEntries; } @Override public Translog.Operation next() throws IOException { return iterator.hasNext() ? iterator.next() : null; } }; primary.markAsRecovering("store", new RecoveryState(primary.routingEntry(), getFakeDiscoNode(primary.routingEntry().currentNodeId()), null)); primary.recoverFromStore(); primary.state = IndexShardState.RECOVERING; // translog recovery on the next line would otherwise fail as we are in POST_RECOVERY primary.runTranslogRecovery(primary.getEngine(), snapshot); assertThat(primary.recoveryState().getTranslog().totalOperationsOnStart(), equalTo(numTotalEntries)); assertThat(primary.recoveryState().getTranslog().totalOperations(), equalTo(numTotalEntries)); assertThat(primary.recoveryState().getTranslog().recoveredOperations(), equalTo(numTotalEntries - numCorruptEntries)); closeShards(primary); } public void testShardActiveDuringInternalRecovery() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "_doc", "0"); shard = reinitShard(shard); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); shard.markAsRecovering("for testing", new RecoveryState(shard.routingEntry(), localNode, null)); // Shard is still inactive since we haven't started recovering yet assertFalse(shard.isActive()); shard.prepareForIndexRecovery(); // Shard is still inactive since we haven't started recovering yet assertFalse(shard.isActive()); shard.openEngineAndRecoverFromTranslog(); // Shard should now be active since we did recover: assertTrue(shard.isActive()); closeShards(shard); } public void testShardActiveDuringPeerRecovery() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); IndexShard replica = newShard(primary.shardId(), false, "n2", metaData, null); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); // Shard is still inactive since we haven't started recovering yet assertFalse(replica.isActive()); recoverReplica(replica, primary, (shard, discoveryNode) -> new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { @Override public long indexTranslogOperations(List<Translog.Operation> operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); // Shard should now be active since we did recover: assertTrue(replica.isActive()); return localCheckpoint; } }, false, true); closeShards(primary, replica); } public void testRefreshListenersDuringPeerRecovery() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); Consumer<IndexShard> assertListenerCalled = shard -> { AtomicBoolean called = new AtomicBoolean(); shard.addRefreshListener(null, b -> { assertFalse(b); called.set(true); }); assertTrue(called.get()); }; IndexShard replica = newShard(primary.shardId(), false, "n2", metaData, null); DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); replica.markAsRecovering("for testing", new RecoveryState(replica.routingEntry(), localNode, localNode)); assertListenerCalled.accept(replica); recoverReplica(replica, primary, (shard, discoveryNode) -> new RecoveryTarget(shard, discoveryNode, recoveryListener, aLong -> { }) { // we're only checking that listeners are called when the engine is open, before there is no point @Override public void prepareForTranslogOperations(boolean fileBasedRecovery, int totalTranslogOps) throws IOException { super.prepareForTranslogOperations(fileBasedRecovery, totalTranslogOps); assertListenerCalled.accept(replica); } @Override public long indexTranslogOperations(List<Translog.Operation> operations, int totalTranslogOps) throws IOException { final long localCheckpoint = super.indexTranslogOperations(operations, totalTranslogOps); assertListenerCalled.accept(replica); return localCheckpoint; } @Override public void finalizeRecovery(long globalCheckpoint) throws IOException { super.finalizeRecovery(globalCheckpoint); assertListenerCalled.accept(replica); } }, false, true); closeShards(primary, replica); } public void testRecoverFromLocalShard() throws IOException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("source") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard sourceShard = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(sourceShard); indexDoc(sourceShard, "_doc", "0", "{\"foo\" : \"bar\"}"); indexDoc(sourceShard, "_doc", "1", "{\"foo\" : \"bar\"}"); sourceShard.refresh("test"); ShardRouting targetRouting = newShardRouting(new ShardId("index_1", "index_1", 0), "n1", true, ShardRoutingState.INITIALIZING, RecoverySource.LocalShardsRecoverySource.INSTANCE); final IndexShard targetShard; DiscoveryNode localNode = new DiscoveryNode("foo", buildNewFakeTransportAddress(), emptyMap(), emptySet(), Version.CURRENT); Map<String, MappingMetaData> requestedMappingUpdates = ConcurrentCollections.newConcurrentMap(); { targetShard = newShard(targetRouting); targetShard.markAsRecovering("store", new RecoveryState(targetShard.routingEntry(), localNode, null)); BiConsumer<String, MappingMetaData> mappingConsumer = (type, mapping) -> { assertNull(requestedMappingUpdates.put(type, mapping)); }; final IndexShard differentIndex = newShard(new ShardId("index_2", "index_2", 0), true); recoverShardFromStore(differentIndex); expectThrows(IllegalArgumentException.class, () -> { targetShard.recoverFromLocalShards(mappingConsumer, Arrays.asList(sourceShard, differentIndex)); }); closeShards(differentIndex); assertTrue(targetShard.recoverFromLocalShards(mappingConsumer, Arrays.asList(sourceShard))); RecoveryState recoveryState = targetShard.recoveryState(); assertEquals(RecoveryState.Stage.DONE, recoveryState.getStage()); assertTrue(recoveryState.getIndex().fileDetails().size() > 0); for (RecoveryState.File file : recoveryState.getIndex().fileDetails()) { if (file.reused()) { assertEquals(file.recovered(), 0); } else { assertEquals(file.recovered(), file.length()); } } // check that local checkpoint of new primary is properly tracked after recovery assertThat(targetShard.getLocalCheckpoint(), equalTo(1L)); assertThat(targetShard.getReplicationTracker().getGlobalCheckpoint(), equalTo(1L)); IndexShardTestCase.updateRoutingEntry(targetShard, ShardRoutingHelper.moveToStarted(targetShard.routingEntry())); assertThat(targetShard.getReplicationTracker().getTrackedLocalCheckpointForShard( targetShard.routingEntry().allocationId().getId()).getLocalCheckpoint(), equalTo(1L)); assertDocCount(targetShard, 2); } // now check that it's persistent ie. that the added shards are committed { final IndexShard newShard = reinitShard(targetShard); recoverShardFromStore(newShard); assertDocCount(newShard, 2); closeShards(newShard); } assertThat(requestedMappingUpdates, hasKey("_doc")); assertThat(requestedMappingUpdates.get("_doc").get().source().string(), equalTo("{\"properties\":{\"foo\":{\"type\":\"text\"}}}")); closeShards(sourceShard, targetShard); } public void testDocStats() throws IOException { IndexShard indexShard = null; try { indexShard = newStartedShard( Settings.builder().put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 0).build()); final long numDocs = randomIntBetween(2, 32); // at least two documents so we have docs to delete // Delete at least numDocs/10 documents otherwise the number of deleted docs will be below 10% // and forceMerge will refuse to expunge deletes final long numDocsToDelete = randomIntBetween((int) Math.ceil(Math.nextUp(numDocs / 10.0)), Math.toIntExact(numDocs)); for (int i = 0; i < numDocs; i++) { final String id = Integer.toString(i); indexDoc(indexShard, "_doc", id); } if (randomBoolean()) { indexShard.refresh("test"); } else { indexShard.flush(new FlushRequest()); } { final DocsStats docsStats = indexShard.docStats(); assertThat(docsStats.getCount(), equalTo(numDocs)); try (Engine.Searcher searcher = indexShard.acquireSearcher("test")) { assertTrue(searcher.reader().numDocs() <= docsStats.getCount()); } assertThat(docsStats.getDeleted(), equalTo(0L)); assertThat(docsStats.getAverageSizeInBytes(), greaterThan(0L)); } final List<Integer> ids = randomSubsetOf( Math.toIntExact(numDocsToDelete), IntStream.range(0, Math.toIntExact(numDocs)).boxed().collect(Collectors.toList())); for (final Integer i : ids) { final String id = Integer.toString(i); deleteDoc(indexShard, "_doc", id); indexDoc(indexShard, "_doc", id); } // Need to update and sync the global checkpoint as the soft-deletes retention MergePolicy depends on it. if (indexShard.indexSettings.isSoftDeleteEnabled()) { if (indexShard.routingEntry().primary()) { indexShard.updateGlobalCheckpointForShard(indexShard.routingEntry().allocationId().getId(), indexShard.getLocalCheckpoint()); } else { indexShard.updateGlobalCheckpointOnReplica(indexShard.getLocalCheckpoint(), "test"); } indexShard.sync(); } // flush the buffered deletes final FlushRequest flushRequest = new FlushRequest(); flushRequest.force(false); flushRequest.waitIfOngoing(false); indexShard.flush(flushRequest); if (randomBoolean()) { indexShard.refresh("test"); } { final DocsStats docStats = indexShard.docStats(); try (Engine.Searcher searcher = indexShard.acquireSearcher("test")) { assertTrue(searcher.reader().numDocs() <= docStats.getCount()); } assertThat(docStats.getCount(), equalTo(numDocs)); // Lucene will delete a segment if all docs are deleted from it; // this means that we lose the deletes when deleting all docs. // If soft-delete is enabled, each delete op will add a deletion marker. final long deleteTombstones = indexShard.indexSettings.isSoftDeleteEnabled() ? numDocsToDelete : 0L; if (numDocsToDelete == numDocs) { assertThat(docStats.getDeleted(), equalTo(deleteTombstones)); } else { assertThat(docStats.getDeleted(), equalTo(numDocsToDelete + deleteTombstones)); } } // merge them away final ForceMergeRequest forceMergeRequest = new ForceMergeRequest(); forceMergeRequest.onlyExpungeDeletes(randomBoolean()); forceMergeRequest.maxNumSegments(1); indexShard.forceMerge(forceMergeRequest); if (randomBoolean()) { indexShard.refresh("test"); } else { indexShard.flush(new FlushRequest()); } { final DocsStats docStats = indexShard.docStats(); assertThat(docStats.getCount(), equalTo(numDocs)); assertThat(docStats.getDeleted(), equalTo(0L)); assertThat(docStats.getAverageSizeInBytes(), greaterThan(0L)); } } finally { closeShards(indexShard); } } public void testEstimateTotalDocSize() throws Exception { IndexShard indexShard = null; try { indexShard = newStartedShard(true); int numDoc = randomIntBetween(100, 200); for (int i = 0; i < numDoc; i++) { String doc = Strings.toString(XContentFactory.jsonBuilder() .startObject() .field("count", randomInt()) .field("point", randomFloat()) .field("description", randomUnicodeOfCodepointLength(100)) .endObject()); indexDoc(indexShard, "_doc", Integer.toString(i), doc); } assertThat("Without flushing, segment sizes should be zero", indexShard.docStats().getTotalSizeInBytes(), equalTo(0L)); if (randomBoolean()) { indexShard.flush(new FlushRequest()); } else { indexShard.refresh("test"); } { final DocsStats docsStats = indexShard.docStats(); final StoreStats storeStats = indexShard.storeStats(); assertThat(storeStats.sizeInBytes(), greaterThan(numDoc * 100L)); // A doc should be more than 100 bytes. assertThat("Estimated total document size is too small compared with the stored size", docsStats.getTotalSizeInBytes(), greaterThanOrEqualTo(storeStats.sizeInBytes() * 80/100)); assertThat("Estimated total document size is too large compared with the stored size", docsStats.getTotalSizeInBytes(), lessThanOrEqualTo(storeStats.sizeInBytes() * 120/100)); } // Do some updates and deletes, then recheck the correlation again. for (int i = 0; i < numDoc / 2; i++) { if (randomBoolean()) { deleteDoc(indexShard, "doc", Integer.toString(i)); } else { indexDoc(indexShard, "_doc", Integer.toString(i), "{\"foo\": \"bar\"}"); } } if (randomBoolean()) { indexShard.flush(new FlushRequest()); } else { indexShard.refresh("test"); } { final DocsStats docsStats = indexShard.docStats(); final StoreStats storeStats = indexShard.storeStats(); assertThat("Estimated total document size is too small compared with the stored size", docsStats.getTotalSizeInBytes(), greaterThanOrEqualTo(storeStats.sizeInBytes() * 80/100)); assertThat("Estimated total document size is too large compared with the stored size", docsStats.getTotalSizeInBytes(), lessThanOrEqualTo(storeStats.sizeInBytes() * 120/100)); } } finally { closeShards(indexShard); } } /** * here we are simulating the scenario that happens when we do async shard fetching from GatewaySerivce while we are finishing * a recovery and concurrently clean files. This should always be possible without any exception. Yet there was a bug where IndexShard * acquired the index writer lock before it called into the store that has it's own locking for metadata reads */ public void testReadSnapshotConcurrently() throws IOException, InterruptedException { IndexShard indexShard = newStartedShard(); indexDoc(indexShard, "_doc", "0", "{}"); if (randomBoolean()) { indexShard.refresh("test"); } indexDoc(indexShard, "_doc", "1", "{}"); indexShard.flush(new FlushRequest()); closeShards(indexShard); final IndexShard newShard = reinitShard(indexShard); Store.MetadataSnapshot storeFileMetaDatas = newShard.snapshotStoreMetadata(); assertTrue("at least 2 files, commit and data: " +storeFileMetaDatas.toString(), storeFileMetaDatas.size() > 1); AtomicBoolean stop = new AtomicBoolean(false); CountDownLatch latch = new CountDownLatch(1); expectThrows(AlreadyClosedException.class, () -> newShard.getEngine()); // no engine Thread thread = new Thread(() -> { latch.countDown(); while(stop.get() == false){ try { Store.MetadataSnapshot readMeta = newShard.snapshotStoreMetadata(); assertEquals(0, storeFileMetaDatas.recoveryDiff(readMeta).different.size()); assertEquals(0, storeFileMetaDatas.recoveryDiff(readMeta).missing.size()); assertEquals(storeFileMetaDatas.size(), storeFileMetaDatas.recoveryDiff(readMeta).identical.size()); } catch (IOException e) { throw new AssertionError(e); } } }); thread.start(); latch.await(); int iters = iterations(10, 100); for (int i = 0; i < iters; i++) { newShard.store().cleanupAndVerify("test", storeFileMetaDatas); } assertTrue(stop.compareAndSet(false, true)); thread.join(); closeShards(newShard); } /** * Simulates a scenario that happens when we are async fetching snapshot metadata from GatewayService * and checking index concurrently. This should always be possible without any exception. */ public void testReadSnapshotAndCheckIndexConcurrently() throws Exception { final boolean isPrimary = randomBoolean(); IndexShard indexShard = newStartedShard(isPrimary); final long numDocs = between(10, 100); for (long i = 0; i < numDocs; i++) { indexDoc(indexShard, "_doc", Long.toString(i), "{}"); if (randomBoolean()) { indexShard.refresh("test"); } } indexShard.flush(new FlushRequest()); closeShards(indexShard); final ShardRouting shardRouting = ShardRoutingHelper.initWithSameId(indexShard.routingEntry(), isPrimary ? RecoverySource.StoreRecoverySource.EXISTING_STORE_INSTANCE : RecoverySource.PeerRecoverySource.INSTANCE ); final IndexMetaData indexMetaData = IndexMetaData.builder(indexShard.indexSettings().getIndexMetaData()) .settings(Settings.builder() .put(indexShard.indexSettings.getSettings()) .put(IndexSettings.INDEX_CHECK_ON_STARTUP.getKey(), randomFrom("false", "true", "checksum", "fix"))) .build(); final IndexShard newShard = newShard(shardRouting, indexShard.shardPath(), indexMetaData, null, null, indexShard.engineFactory, indexShard.getGlobalCheckpointSyncer(), EMPTY_EVENT_LISTENER); Store.MetadataSnapshot storeFileMetaDatas = newShard.snapshotStoreMetadata(); assertTrue("at least 2 files, commit and data: " + storeFileMetaDatas.toString(), storeFileMetaDatas.size() > 1); AtomicBoolean stop = new AtomicBoolean(false); CountDownLatch latch = new CountDownLatch(1); Thread snapshotter = new Thread(() -> { latch.countDown(); while (stop.get() == false) { try { Store.MetadataSnapshot readMeta = newShard.snapshotStoreMetadata(); assertThat(readMeta.getNumDocs(), equalTo(numDocs)); assertThat(storeFileMetaDatas.recoveryDiff(readMeta).different.size(), equalTo(0)); assertThat(storeFileMetaDatas.recoveryDiff(readMeta).missing.size(), equalTo(0)); assertThat(storeFileMetaDatas.recoveryDiff(readMeta).identical.size(), equalTo(storeFileMetaDatas.size())); } catch (IOException e) { throw new AssertionError(e); } } }); snapshotter.start(); if (isPrimary) { newShard.markAsRecovering("store", new RecoveryState(newShard.routingEntry(), getFakeDiscoNode(newShard.routingEntry().currentNodeId()), null)); } else { newShard.markAsRecovering("peer", new RecoveryState(newShard.routingEntry(), getFakeDiscoNode(newShard.routingEntry().currentNodeId()), getFakeDiscoNode(newShard.routingEntry().currentNodeId()))); } int iters = iterations(10, 100); latch.await(); for (int i = 0; i < iters; i++) { newShard.checkIndex(); } assertTrue(stop.compareAndSet(false, true)); snapshotter.join(); closeShards(newShard); } class Result { private final int localCheckpoint; private final int maxSeqNo; private final boolean gap; Result(final int localCheckpoint, final int maxSeqNo, final boolean gap) { this.localCheckpoint = localCheckpoint; this.maxSeqNo = maxSeqNo; this.gap = gap; } } /** * Index on the specified shard while introducing sequence number gaps. * * @param indexShard the shard * @param operations the number of operations * @param offset the starting sequence number * @return a pair of the maximum sequence number and whether or not a gap was introduced * @throws IOException if an I/O exception occurs while indexing on the shard */ private Result indexOnReplicaWithGaps( final IndexShard indexShard, final int operations, final int offset) throws IOException { int localCheckpoint = offset; int max = offset; boolean gap = false; for (int i = offset + 1; i < operations; i++) { if (!rarely() || i == operations - 1) { // last operation can't be a gap as it's not a gap anymore final String id = Integer.toString(i); SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", id, new BytesArray("{}"), XContentType.JSON); indexShard.applyIndexOperationOnReplica(i, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (!gap && i == localCheckpoint + 1) { localCheckpoint++; } max = i; } else { gap = true; } } assert localCheckpoint == indexShard.getLocalCheckpoint(); assert !gap || (localCheckpoint != max); return new Result(localCheckpoint, max, gap); } /** A dummy repository for testing which just needs restore overridden */ private abstract static class RestoreOnlyRepository extends AbstractLifecycleComponent implements Repository { private final String indexName; RestoreOnlyRepository(String indexName) { super(Settings.EMPTY); this.indexName = indexName; } @Override protected void doStart() { } @Override protected void doStop() { } @Override protected void doClose() { } @Override public RepositoryMetaData getMetadata() { return null; } @Override public SnapshotInfo getSnapshotInfo(SnapshotId snapshotId) { return null; } @Override public MetaData getSnapshotGlobalMetaData(SnapshotId snapshotId) { return null; } @Override public IndexMetaData getSnapshotIndexMetaData(SnapshotId snapshotId, IndexId index) throws IOException { return null; } @Override public RepositoryData getRepositoryData() { Map<IndexId, Set<SnapshotId>> map = new HashMap<>(); map.put(new IndexId(indexName, "blah"), emptySet()); return new RepositoryData(EMPTY_REPO_GEN, Collections.emptyMap(), Collections.emptyMap(), map, Collections.emptyList()); } @Override public void initializeSnapshot(SnapshotId snapshotId, List<IndexId> indices, MetaData metaData) { } @Override public SnapshotInfo finalizeSnapshot(SnapshotId snapshotId, List<IndexId> indices, long startTime, String failure, int totalShards, List<SnapshotShardFailure> shardFailures, long repositoryStateId, boolean includeGlobalState) { return null; } @Override public void deleteSnapshot(SnapshotId snapshotId, long repositoryStateId) { } @Override public long getSnapshotThrottleTimeInNanos() { return 0; } @Override public long getRestoreThrottleTimeInNanos() { return 0; } @Override public String startVerification() { return null; } @Override public void endVerification(String verificationToken) { } @Override public boolean isReadOnly() { return false; } @Override public void snapshotShard(IndexShard shard, SnapshotId snapshotId, IndexId indexId, IndexCommit snapshotIndexCommit, IndexShardSnapshotStatus snapshotStatus) { } @Override public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Version version, IndexId indexId, ShardId shardId) { return null; } @Override public void verify(String verificationToken, DiscoveryNode localNode) { } } public void testIsSearchIdle() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); assertFalse(primary.isSearchIdle()); IndexScopedSettings scopedSettings = primary.indexSettings().getScopedSettings(); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.ZERO).build(); scopedSettings.applySettings(settings); assertTrue(primary.isSearchIdle()); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.timeValueMinutes(1)) .build(); scopedSettings.applySettings(settings); assertFalse(primary.isSearchIdle()); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.timeValueMillis(10)) .build(); scopedSettings.applySettings(settings); assertBusy(() -> assertTrue(primary.isSearchIdle())); do { // now loop until we are fast enough... shouldn't take long primary.awaitShardSearchActive(aBoolean -> {}); } while (primary.isSearchIdle()); closeShards(primary); } public void testScheduledRefresh() throws IOException, InterruptedException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); IndexScopedSettings scopedSettings = primary.indexSettings().getScopedSettings(); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.ZERO).build(); scopedSettings.applySettings(settings); assertFalse(primary.getEngine().refreshNeeded()); indexDoc(primary, "_doc", "1", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); long lastSearchAccess = primary.getLastSearcherAccess(); assertFalse(primary.scheduledRefresh()); assertEquals(lastSearchAccess, primary.getLastSearcherAccess()); // wait until the thread-pool has moved the timestamp otherwise we can't assert on this below awaitBusy(() -> primary.getThreadPool().relativeTimeInMillis() > lastSearchAccess); CountDownLatch latch = new CountDownLatch(10); for (int i = 0; i < 10; i++) { primary.awaitShardSearchActive(refreshed -> { assertTrue(refreshed); try (Engine.Searcher searcher = primary.acquireSearcher("test")) { assertEquals(2, searcher.reader().numDocs()); } finally { latch.countDown(); } }); } assertNotEquals("awaitShardSearchActive must access a searcher to remove search idle state", lastSearchAccess, primary.getLastSearcherAccess()); assertTrue(lastSearchAccess < primary.getLastSearcherAccess()); try (Engine.Searcher searcher = primary.acquireSearcher("test")) { assertEquals(1, searcher.reader().numDocs()); } assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); latch.await(); CountDownLatch latch1 = new CountDownLatch(1); primary.awaitShardSearchActive(refreshed -> { assertFalse(refreshed); try (Engine.Searcher searcher = primary.acquireSearcher("test")) { assertEquals(2, searcher.reader().numDocs()); } finally { latch1.countDown(); } }); latch1.await(); indexDoc(primary, "_doc", "2", "{\"foo\" : \"bar\"}"); assertFalse(primary.scheduledRefresh()); assertTrue(primary.isSearchIdle()); primary.checkIdle(0); assertTrue(primary.scheduledRefresh()); // make sure we refresh once the shard is inactive try (Engine.Searcher searcher = primary.acquireSearcher("test")) { assertEquals(3, searcher.reader().numDocs()); } closeShards(primary); } public void testRefreshIsNeededWithRefreshListeners() throws IOException, InterruptedException { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"bar\"}"); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); Engine.IndexResult doc = indexDoc(primary, "_doc", "1", "{\"foo\" : \"bar\"}"); CountDownLatch latch = new CountDownLatch(1); primary.addRefreshListener(doc.getTranslogLocation(), r -> latch.countDown()); assertEquals(1, latch.getCount()); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); latch.await(); IndexScopedSettings scopedSettings = primary.indexSettings().getScopedSettings(); settings = Settings.builder().put(settings).put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.ZERO).build(); scopedSettings.applySettings(settings); doc = indexDoc(primary, "_doc", "2", "{\"foo\" : \"bar\"}"); CountDownLatch latch1 = new CountDownLatch(1); primary.addRefreshListener(doc.getTranslogLocation(), r -> latch1.countDown()); assertEquals(1, latch1.getCount()); assertTrue(primary.getEngine().refreshNeeded()); assertTrue(primary.scheduledRefresh()); latch1.await(); closeShards(primary); } public void testSegmentMemoryTrackedInBreaker() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); indexDoc(primary, "_doc", "0", "{\"foo\" : \"foo\"}"); primary.refresh("forced refresh"); SegmentsStats ss = primary.segmentStats(randomBoolean()); CircuitBreaker breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(ss.getMemoryInBytes(), equalTo(breaker.getUsed())); final long preRefreshBytes = ss.getMemoryInBytes(); indexDoc(primary, "_doc", "1", "{\"foo\" : \"bar\"}"); indexDoc(primary, "_doc", "2", "{\"foo\" : \"baz\"}"); indexDoc(primary, "_doc", "3", "{\"foo\" : \"eggplant\"}"); ss = primary.segmentStats(randomBoolean()); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(preRefreshBytes, equalTo(breaker.getUsed())); primary.refresh("refresh"); ss = primary.segmentStats(randomBoolean()); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), equalTo(ss.getMemoryInBytes())); assertThat(breaker.getUsed(), greaterThan(preRefreshBytes)); indexDoc(primary, "_doc", "4", "{\"foo\": \"potato\"}"); // Forces a refresh with the INTERNAL scope ((InternalEngine) primary.getEngine()).writeIndexingBuffer(); ss = primary.segmentStats(randomBoolean()); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), equalTo(ss.getMemoryInBytes())); assertThat(breaker.getUsed(), greaterThan(preRefreshBytes)); final long postRefreshBytes = ss.getMemoryInBytes(); // Deleting a doc causes its memory to be freed from the breaker deleteDoc(primary, "_doc", "0"); // Here we are testing that a fully deleted segment should be dropped and its memory usage is freed. // In order to instruct the merge policy not to keep a fully deleted segment, // we need to flush and make that commit safe so that the SoftDeletesPolicy can drop everything. if (IndexSettings.INDEX_SOFT_DELETES_SETTING.get(settings)) { primary.sync(); flushShard(primary); } primary.refresh("force refresh"); ss = primary.segmentStats(randomBoolean()); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), lessThan(postRefreshBytes)); closeShards(primary); breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), equalTo(0L)); } public void testSegmentMemoryTrackedWithRandomSearchers() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(primary); int threadCount = randomIntBetween(2, 6); List<Thread> threads = new ArrayList<>(threadCount); int iterations = randomIntBetween(50, 100); List<Engine.Searcher> searchers = Collections.synchronizedList(new ArrayList<>()); logger.info("--> running with {} threads and {} iterations each", threadCount, iterations); for (int threadId = 0; threadId < threadCount; threadId++) { final String threadName = "thread-" + threadId; Runnable r = () -> { for (int i = 0; i < iterations; i++) { try { if (randomBoolean()) { String id = "id-" + threadName + "-" + i; logger.debug("--> {} indexing {}", threadName, id); indexDoc(primary, "_doc", id, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); } if (randomBoolean() && i > 10) { String id = "id-" + threadName + "-" + randomIntBetween(0, i - 1); logger.debug("--> {}, deleting {}", threadName, id); deleteDoc(primary, "_doc", id); } if (randomBoolean()) { logger.debug("--> {} refreshing", threadName); primary.refresh("forced refresh"); } if (randomBoolean()) { String searcherName = "searcher-" + threadName + "-" + i; logger.debug("--> {} acquiring new searcher {}", threadName, searcherName); // Acquire a new searcher, adding it to the list searchers.add(primary.acquireSearcher(searcherName)); } if (randomBoolean() && searchers.size() > 1) { // Close one of the searchers at random synchronized (searchers) { // re-check because it could have decremented after the check if (searchers.size() > 1) { Engine.Searcher searcher = searchers.remove(0); logger.debug("--> {} closing searcher {}", threadName, searcher.source()); IOUtils.close(searcher); } } } } catch (Exception e) { logger.warn("--> got exception: ", e); fail("got an exception we didn't expect"); } } }; threads.add(new Thread(r, threadName)); } threads.stream().forEach(t -> t.start()); for (Thread t : threads) { t.join(); } // We need to wait for all ongoing merges to complete. The reason is that during a merge the // IndexWriter holds the core cache key open and causes the memory to be registered in the breaker primary.forceMerge(new ForceMergeRequest().maxNumSegments(1).flush(true)); // Close remaining searchers IOUtils.close(searchers); primary.refresh("test"); SegmentsStats ss = primary.segmentStats(randomBoolean()); CircuitBreaker breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); long segmentMem = ss.getMemoryInBytes(); long breakerMem = breaker.getUsed(); logger.info("--> comparing segmentMem: {} - breaker: {} => {}", segmentMem, breakerMem, segmentMem == breakerMem); assertThat(segmentMem, equalTo(breakerMem)); // Close shard closeShards(primary); // Check that the breaker was successfully reset to 0, meaning that all the accounting was correctly applied breaker = primary.circuitBreakerService.getBreaker(CircuitBreaker.ACCOUNTING); assertThat(breaker.getUsed(), equalTo(0L)); } public void testFlushOnInactive() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("_doc", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(metaData.getIndex(), 0), "n1", true, ShardRoutingState .INITIALIZING, RecoverySource.StoreRecoverySource.EMPTY_STORE_INSTANCE); final ShardId shardId = shardRouting.shardId(); final NodeEnvironment.NodePath nodePath = new NodeEnvironment.NodePath(createTempDir()); ShardPath shardPath = new ShardPath(false, nodePath.resolve(shardId), nodePath.resolve(shardId), shardId); AtomicBoolean markedInactive = new AtomicBoolean(); AtomicReference<IndexShard> primaryRef = new AtomicReference<>(); IndexShard primary = newShard(shardRouting, shardPath, metaData, null, null, new InternalEngineFactory(), () -> { }, new IndexEventListener() { @Override public void onShardInactive(IndexShard indexShard) { markedInactive.set(true); primaryRef.get().flush(new FlushRequest()); } }); primaryRef.set(primary); recoverShardFromStore(primary); for (int i = 0; i < 3; i++) { indexDoc(primary, "_doc", "" + i, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); primary.refresh("test"); // produce segments } List<Segment> segments = primary.segments(false); Set<String> names = new HashSet<>(); for (Segment segment : segments) { assertFalse(segment.committed); assertTrue(segment.search); names.add(segment.getName()); } assertEquals(3, segments.size()); primary.flush(new FlushRequest()); primary.forceMerge(new ForceMergeRequest().maxNumSegments(1).flush(false)); primary.refresh("test"); segments = primary.segments(false); for (Segment segment : segments) { if (names.contains(segment.getName())) { assertTrue(segment.committed); assertFalse(segment.search); } else { assertFalse(segment.committed); assertTrue(segment.search); } } assertEquals(4, segments.size()); assertFalse(markedInactive.get()); assertBusy(() -> { primary.checkIdle(0); assertFalse(primary.isActive()); }); assertTrue(markedInactive.get()); segments = primary.segments(false); assertEquals(1, segments.size()); for (Segment segment : segments) { assertTrue(segment.committed); assertTrue(segment.search); } closeShards(primary); } public void testOnCloseStats() throws IOException { final IndexShard indexShard = newStartedShard(true); for (int i = 0; i < 3; i++) { indexDoc(indexShard, "_doc", "" + i, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); indexShard.refresh("test"); // produce segments } // check stats on closed and on opened shard if (randomBoolean()) { closeShards(indexShard); expectThrows(AlreadyClosedException.class, () -> indexShard.seqNoStats()); expectThrows(AlreadyClosedException.class, () -> indexShard.commitStats()); expectThrows(AlreadyClosedException.class, () -> indexShard.storeStats()); } else { final SeqNoStats seqNoStats = indexShard.seqNoStats(); assertThat(seqNoStats.getLocalCheckpoint(), equalTo(2L)); final CommitStats commitStats = indexShard.commitStats(); assertThat(commitStats.getGeneration(), equalTo(2L)); final StoreStats storeStats = indexShard.storeStats(); assertThat(storeStats.sizeInBytes(), greaterThan(0L)); closeShards(indexShard); } } public void testSupplyTombstoneDoc() throws Exception { IndexShard shard = newStartedShard(); String id = randomRealisticUnicodeOfLengthBetween(1, 10); ParsedDocument deleteTombstone = shard.getEngine().config().getTombstoneDocSupplier().newDeleteTombstoneDoc("doc", id); assertThat(deleteTombstone.docs(), hasSize(1)); ParseContext.Document deleteDoc = deleteTombstone.docs().get(0); assertThat(deleteDoc.getFields().stream().map(IndexableField::name).collect(Collectors.toList()), containsInAnyOrder(IdFieldMapper.NAME, VersionFieldMapper.NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.PRIMARY_TERM_NAME, SeqNoFieldMapper.TOMBSTONE_NAME)); assertThat(deleteDoc.getField(IdFieldMapper.NAME).binaryValue(), equalTo(Uid.encodeId(id))); assertThat(deleteDoc.getField(SeqNoFieldMapper.TOMBSTONE_NAME).numericValue().longValue(), equalTo(1L)); final String reason = randomUnicodeOfLength(200); ParsedDocument noopTombstone = shard.getEngine().config().getTombstoneDocSupplier().newNoopTombstoneDoc(reason); assertThat(noopTombstone.docs(), hasSize(1)); ParseContext.Document noopDoc = noopTombstone.docs().get(0); assertThat(noopDoc.getFields().stream().map(IndexableField::name).collect(Collectors.toList()), containsInAnyOrder(VersionFieldMapper.NAME, SourceFieldMapper.NAME, SeqNoFieldMapper.TOMBSTONE_NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.NAME, SeqNoFieldMapper.PRIMARY_TERM_NAME)); assertThat(noopDoc.getField(SeqNoFieldMapper.TOMBSTONE_NAME).numericValue().longValue(), equalTo(1L)); assertThat(noopDoc.getField(SourceFieldMapper.NAME).binaryValue(), equalTo(new BytesRef(reason))); closeShards(shard); } public void testSearcherIncludesSoftDeletes() throws Exception { Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true) .build(); IndexMetaData metaData = IndexMetaData.builder("test") .putMapping("test", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") .settings(settings) .primaryTerm(0, 1).build(); IndexShard shard = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); recoverShardFromStore(shard); indexDoc(shard, "test", "0", "{\"foo\" : \"bar\"}"); indexDoc(shard, "test", "1", "{\"foo\" : \"baz\"}"); deleteDoc(shard, "test", "0"); shard.refresh("test"); try (Engine.Searcher searcher = shard.acquireSearcher("test")) { IndexSearcher searchWithSoftDeletes = new IndexSearcher(Lucene.wrapAllDocsLive(searcher.getDirectoryReader())); assertThat(searcher.searcher().search(new TermQuery(new Term("foo", "bar")), 10).totalHits, equalTo(0L)); assertThat(searchWithSoftDeletes.search(new TermQuery(new Term("foo", "bar")), 10).totalHits, equalTo(1L)); assertThat(searcher.searcher().search(new TermQuery(new Term("foo", "baz")), 10).totalHits, equalTo(1L)); assertThat(searchWithSoftDeletes.search(new TermQuery(new Term("foo", "baz")), 10).totalHits, equalTo(1L)); } closeShards(shard); } }
TEST: avoid merge in testSegmentMemoryTrackedInBreaker This commit indexes an extra document to avoid triggering merges. Relates LUCENE-8263
server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java
TEST: avoid merge in testSegmentMemoryTrackedInBreaker
<ide><path>erver/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java <ide> assertThat(breaker.getUsed(), greaterThan(preRefreshBytes)); <ide> <ide> indexDoc(primary, "_doc", "4", "{\"foo\": \"potato\"}"); <add> indexDoc(primary, "_doc", "5", "{\"foo\": \"potato\"}"); <ide> // Forces a refresh with the INTERNAL scope <ide> ((InternalEngine) primary.getEngine()).writeIndexingBuffer(); <ide>