lang
stringclasses
2 values
license
stringclasses
13 values
stderr
stringlengths
0
343
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
6
87.7k
new_contents
stringlengths
0
6.23M
new_file
stringlengths
3
311
old_contents
stringlengths
0
6.23M
message
stringlengths
6
9.1k
old_file
stringlengths
3
311
subject
stringlengths
0
4k
git_diff
stringlengths
0
6.31M
Java
apache-2.0
314274a6b55980f67511bb8159095cbd60fb5e47
0
consulo/consulo,amith01994/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,salguarnieri/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,ernestp/consulo,robovm/robovm-studio,FHannes/intellij-community,caot/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,holmes/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,ahb0327/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,caot/intellij-community,adedayo/intellij-community,adedayo/intellij-community,ernestp/consulo,fitermay/intellij-community,ivan-fedorov/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,ThiagoGarciaAlves/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,dslomov/intellij-community,clumsy/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,supersven/intellij-community,akosyakov/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,ryano144/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,slisson/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,ahb0327/intellij-community,izonder/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,jagguli/intellij-community,diorcety/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,allotria/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,da1z/intellij-community,caot/intellij-community,TangHao1987/intellij-community,consulo/consulo,youdonghai/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,adedayo/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,semonte/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,izonder/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,fitermay/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,allotria/intellij-community,ernestp/consulo,fitermay/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,ibinti/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,orekyuu/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,ernestp/consulo,samthor/intellij-community,ol-loginov/intellij-community,kool79/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,hurricup/intellij-community,diorcety/intellij-community,fitermay/intellij-community,xfournet/intellij-community,asedunov/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,supersven/intellij-community,vladmm/intellij-community,signed/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,fitermay/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,allotria/intellij-community,amith01994/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,retomerz/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,supersven/intellij-community,holmes/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,fnouama/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,FHannes/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,holmes/intellij-community,apixandru/intellij-community,signed/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,holmes/intellij-community,Lekanich/intellij-community,caot/intellij-community,jagguli/intellij-community,asedunov/intellij-community,apixandru/intellij-community,izonder/intellij-community,amith01994/intellij-community,ryano144/intellij-community,ibinti/intellij-community,holmes/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,izonder/intellij-community,FHannes/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,allotria/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,samthor/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,izonder/intellij-community,izonder/intellij-community,TangHao1987/intellij-community,signed/intellij-community,caot/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,slisson/intellij-community,signed/intellij-community,blademainer/intellij-community,ibinti/intellij-community,caot/intellij-community,signed/intellij-community,mglukhikh/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,MER-GROUP/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,supersven/intellij-community,Lekanich/intellij-community,xfournet/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,vladmm/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,da1z/intellij-community,asedunov/intellij-community,blademainer/intellij-community,salguarnieri/intellij-community,samthor/intellij-community,tmpgit/intellij-community,jagguli/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,ibinti/intellij-community,semonte/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,vladmm/intellij-community,apixandru/intellij-community,holmes/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,amith01994/intellij-community,robovm/robovm-studio,slisson/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,vladmm/intellij-community,xfournet/intellij-community,asedunov/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,allotria/intellij-community,allotria/intellij-community,samthor/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,youdonghai/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,clumsy/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,blademainer/intellij-community,slisson/intellij-community,wreckJ/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,supersven/intellij-community,TangHao1987/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,signed/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,da1z/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,consulo/consulo,samthor/intellij-community,holmes/intellij-community,FHannes/intellij-community,samthor/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,fengbaicanhe/intellij-community,fnouama/intellij-community,adedayo/intellij-community,vladmm/intellij-community,allotria/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,salguarnieri/intellij-community,ibinti/intellij-community,holmes/intellij-community,robovm/robovm-studio,retomerz/intellij-community,jagguli/intellij-community,robovm/robovm-studio,kdwink/intellij-community,diorcety/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,Lekanich/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,caot/intellij-community,ivan-fedorov/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,supersven/intellij-community,Distrotech/intellij-community,samthor/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,semonte/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,amith01994/intellij-community,ibinti/intellij-community,retomerz/intellij-community,samthor/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,caot/intellij-community,suncycheng/intellij-community,tmpgit/intellij-community,petteyg/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,da1z/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,kdwink/intellij-community,orekyuu/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,slisson/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,holmes/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,da1z/intellij-community,dslomov/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,kool79/intellij-community,clumsy/intellij-community,xfournet/intellij-community,consulo/consulo,ryano144/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,semonte/intellij-community,suncycheng/intellij-community,akosyakov/intellij-community,caot/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,dslomov/intellij-community,da1z/intellij-community,apixandru/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,robovm/robovm-studio,signed/intellij-community,lucafavatella/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,lucafavatella/intellij-community,tmpgit/intellij-community,kool79/intellij-community,allotria/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,alphafoobar/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,fnouama/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,fengbaicanhe/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,kdwink/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,MichaelNedzelsky/intellij-community,SerCeMan/intellij-community,wreckJ/intellij-community,izonder/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,wreckJ/intellij-community,izonder/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,vladmm/intellij-community,signed/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,ftomassetti/intellij-community,clumsy/intellij-community,muntasirsyed/intellij-community,vvv1559/intellij-community,kool79/intellij-community,ryano144/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,izonder/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,signed/intellij-community,nicolargo/intellij-community,retomerz/intellij-community,consulo/consulo,kdwink/intellij-community,hurricup/intellij-community,kdwink/intellij-community,slisson/intellij-community,slisson/intellij-community,asedunov/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,blademainer/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,xfournet/intellij-community,pwoodworth/intellij-community,ernestp/consulo,signed/intellij-community,ahb0327/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,da1z/intellij-community,slisson/intellij-community,vvv1559/intellij-community,izonder/intellij-community,idea4bsd/idea4bsd,ernestp/consulo,ivan-fedorov/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,hurricup/intellij-community,clumsy/intellij-community,Distrotech/intellij-community,dslomov/intellij-community,fitermay/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,MER-GROUP/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,ahb0327/intellij-community,holmes/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,supersven/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,fitermay/intellij-community,petteyg/intellij-community,kdwink/intellij-community,diorcety/intellij-community,apixandru/intellij-community,muntasirsyed/intellij-community,dslomov/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,diorcety/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,alphafoobar/intellij-community,tmpgit/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,da1z/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,youdonghai/intellij-community,blademainer/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,ibinti/intellij-community,clumsy/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,xfournet/intellij-community,samthor/intellij-community,blademainer/intellij-community,slisson/intellij-community,adedayo/intellij-community,semonte/intellij-community,suncycheng/intellij-community,caot/intellij-community,adedayo/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,consulo/consulo,supersven/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,asedunov/intellij-community,izonder/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,muntasirsyed/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,youdonghai/intellij-community,kool79/intellij-community,allotria/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,gnuhub/intellij-community,clumsy/intellij-community,kool79/intellij-community,slisson/intellij-community,semonte/intellij-community,ryano144/intellij-community,asedunov/intellij-community,signed/intellij-community,retomerz/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,kool79/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,orekyuu/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,hurricup/intellij-community,retomerz/intellij-community,gnuhub/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,samthor/intellij-community,da1z/intellij-community,nicolargo/intellij-community,kdwink/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,pwoodworth/intellij-community,jagguli/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,diorcety/intellij-community,fnouama/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,michaelgallacher/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.ui; import com.intellij.CommonBundle; import com.intellij.icons.AllIcons; import com.intellij.ide.ui.UISettings; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.Disposable; import com.intellij.openapi.MnemonicHelper; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.StackingPopupDispatcher; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.IdeGlassPaneUtil; import com.intellij.ui.IdeBorderFactory; import com.intellij.ui.UIBundle; import com.intellij.ui.components.JBOptionButton; import com.intellij.ui.components.JBScrollPane; import com.intellij.util.Alarm; import com.intellij.util.ArrayUtil; import com.intellij.util.ui.AwtVisitor; import com.intellij.util.ui.DialogUtil; import com.intellij.util.ui.UIUtil; import org.intellij.lang.annotations.MagicConstant; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import javax.swing.plaf.UIResource; import java.awt.*; import java.awt.event.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; /** * The dialog wrapper. The dialog wrapper could be used only on event dispatch thread. * In case when the dialog must be created from other threads use * {@link EventQueue#invokeLater(Runnable)} or {@link EventQueue#invokeAndWait(Runnable)}. */ @SuppressWarnings({"SSBasedInspection", "MethodMayBeStatic", "UnusedDeclaration"}) public abstract class DialogWrapper { /** * The default exit code for "OK" action. */ public static final int OK_EXIT_CODE = 0; /** * The default exit code for "Cancel" action. */ public static final int CANCEL_EXIT_CODE = 1; /** * The default exit code for "Close" action. Equal to cancel. */ public static final int CLOSE_EXIT_CODE = CANCEL_EXIT_CODE; /** * If you use your custom exit codes you have have to start them with * this constant. */ public static final int NEXT_USER_EXIT_CODE = 2; /** * If your action returned by <code>createActions</code> method has non * <code>null</code> value for this key, then the button that corresponds to the action will be the * default button for the dialog. It's true if you don't change this behaviour * of <code>createJButtonForAction(Action)</code> method. */ @NonNls public static final String DEFAULT_ACTION = "DefaultAction"; @NonNls public static final String FOCUSED_ACTION = "FocusedAction"; private static final KeyStroke SHOW_OPTION_KEYSTROKE = KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, InputEvent.ALT_MASK | InputEvent.SHIFT_MASK); private final DialogWrapperPeer myPeer; private int myExitCode = CANCEL_EXIT_CODE; /** * The shared instance of default border for dialog's content pane. */ public static final Border ourDefaultBorder = new EmptyBorder(UIUtil.PANEL_REGULAR_INSETS); private float myHorizontalStretch = 1.0f; private float myVerticalStretch = 1.0f; /** * Defines horizontal alignment of buttons. */ private int myButtonAlignment = SwingConstants.RIGHT; private boolean myCrossClosesWindow = true; private Insets myButtonMargins = new Insets(2, 16, 2, 16); protected Action myOKAction; protected Action myCancelAction; protected Action myHelpAction; private JButton[] myButtons; private boolean myClosed = false; protected boolean myPerformAction = false; private Action myYesAction = null; private Action myNoAction = null; protected JCheckBox myCheckBoxDoNotShowDialog; @Nullable private DoNotAskOption myDoNotAsk; private JComponent myPreferredFocusedComponent; private Computable<Point> myInitialLocationCallback; protected final Disposable myDisposable = new Disposable() { public String toString() { return DialogWrapper.this.toString(); } public void dispose() { DialogWrapper.this.dispose(); } }; private List<JBOptionButton> myOptionsButtons = new ArrayList<JBOptionButton>(); private int myCurrentOptionsButtonIndex = -1; private boolean myResizeInProgress = false; private ComponentAdapter myResizeListener; protected String getDoNotShowMessage() { return CommonBundle.message("dialog.options.do.not.show"); } public void setDoNotAskOption(@Nullable DoNotAskOption doNotAsk) { myDoNotAsk = doNotAsk; } private ErrorText myErrorText; private int myMaxErrorTextLength; private final Alarm myErrorTextAlarm = new Alarm(); /** * Creates modal <code>DialogWrapper</code>. The currently active window will be the dialog's parent. * * @param project parent window for the dialog will be calculated based on focused window for the * specified <code>project</code>. This parameter can be <code>null</code>. In this case parent window * will be suggested based on current focused window. * @param canBeParent specifies whether the dialog can be parent for other windows. This parameter is used * by <code>WindowManager</code>. * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ protected DialogWrapper(@Nullable Project project, boolean canBeParent) { myPeer = createPeer(project, canBeParent); final Window window = myPeer.getWindow(); if (window != null) { myResizeListener = new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { if (!myResizeInProgress) { myActualSize = myPeer.getSize(); if (myErrorText.isVisible()) { myActualSize.height -= myErrorText.getHeight() + 10; } } } }; window.addComponentListener(myResizeListener); } createDefaultActions(); } /** * Creates modal <code>DialogWrapper</code> that can be parent for other windows. * The currently active window will be the dialog's parent. * * @param project parent window for the dialog will be calculated based on focused window for the * specified <code>project</code>. This parameter can be <code>null</code>. In this case parent window * will be suggested based on current focused window. * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread * @see com.intellij.openapi.ui.DialogWrapper#DialogWrapper(com.intellij.openapi.project.Project, boolean) */ protected DialogWrapper(@Nullable Project project) { this(project, true); } /** * Creates modal <code>DialogWrapper</code>. The currently active window will be the dialog's parent. * * @param canBeParent specifies whether the dialog can be parent for other windows. This parameter is used * by <code>WindowManager</code>. * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ protected DialogWrapper(boolean canBeParent) { this((Project)null, canBeParent); } protected DialogWrapper(boolean canBeParent, boolean toolkitModalIfPossible) { ensureEventDispatchThread(); myPeer = createPeer(canBeParent, toolkitModalIfPossible); createDefaultActions(); } /** * @param parent parent component which is used to calculate heavy weight window ancestor. * <code>parent</code> cannot be <code>null</code> and must be showing. * @param canBeParent can be parent * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ protected DialogWrapper(@NotNull Component parent, boolean canBeParent) { ensureEventDispatchThread(); myPeer = createPeer(parent, canBeParent); createDefaultActions(); } //validation private final Alarm myValidationAlarm = new Alarm(getValidationThreadToUse(), myDisposable); protected Alarm.ThreadToUse getValidationThreadToUse() { return Alarm.ThreadToUse.SWING_THREAD; } private int myValidationDelay = 300; private boolean myDisposed = false; private boolean myValidationStarted = false; private final ErrorPainter myErrorPainter = new ErrorPainter(); private JComponent myErrorPane; private boolean myErrorPainterInstalled = false; /** * Allows to postpone first start of validation * * @return <code>false</code> if start validation in <code>init()</code> method */ protected boolean postponeValidation() { return true; } /** * Validates a user input and returns <code>null</code> if everything is fine * or returns a problem description with component where is the problem has been found. * * @return <code>null</code> if everything is OK or a problem descriptor */ @Nullable protected ValidationInfo doValidate() { return null; } public void setValidationDelay(int delay) { myValidationDelay = delay; } private void reportProblem(final ValidationInfo info) { installErrorPainter(); myErrorPainter.setValidationInfo(info); if (! myErrorText.isTextSet(info.message)) { SwingUtilities.invokeLater(new Runnable() { public void run() { if (myDisposed) return; setErrorText(info.message); myPeer.getRootPane().getGlassPane().repaint(); getOKAction().setEnabled(false); } }); } } private void installErrorPainter() { if (myErrorPainterInstalled) return; myErrorPainterInstalled = true; UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { IdeGlassPaneUtil.installPainter(myErrorPane, myErrorPainter, myDisposable); } }); } private void clearProblems() { myErrorPainter.setValidationInfo(null); if (! myErrorText.isTextSet(null)) { SwingUtilities.invokeLater(new Runnable() { public void run() { if (myDisposed) return; setErrorText(null); myPeer.getRootPane().getGlassPane().repaint(); getOKAction().setEnabled(true); } }); } } protected void createDefaultActions() { myOKAction = new OkAction(); myCancelAction = new CancelAction(); myHelpAction = new HelpAction(); } public void setUndecorated(boolean undecorated) { myPeer.setUndecorated(undecorated); } public final void addMouseListener(MouseListener listener) { myPeer.addMouseListener(listener); } public final void addMouseListener(MouseMotionListener listener) { myPeer.addMouseListener(listener); } public final void addKeyListener(KeyListener listener) { myPeer.addKeyListener(listener); } /** * Closes and disposes the dialog and sets the specified exit code. * * @param exitCode exit code * @param isOk is OK * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ public final void close(int exitCode, boolean isOk) { ensureEventDispatchThread(); if (myClosed) return; myClosed = true; myExitCode = exitCode; Window window = getWindow(); if (window != null && myResizeListener != null) { window.removeComponentListener(myResizeListener); myResizeListener = null; } if (isOk) { processDoNotAskOnOk(exitCode); } else { processDoNotAskOnCancel(); } Disposer.dispose(myDisposable); } public final void close(int exitCode) { close(exitCode, exitCode != CANCEL_EXIT_CODE); } /** * Factory method. It creates border for dialog's content pane. By default content * pane has has empty border with <code>(8,12,8,12)</code> insets. The subclasses can * return <code>null</code> in overridden methods. In this case there will be no * any border in the content pane. * * @return content pane border */ @Nullable protected Border createContentPaneBorder() { return ourDefaultBorder; } /** * This is factory method. It creates the panel located at the south of the content pane. By default that * panel contains dialog's buttons. This default implementation uses <code>createActions()</code> * and <code>createJButtonForAction(Action)</code> methods to construct the panel. * * @return south panel */ @Nullable protected JComponent createSouthPanel() { Action[] actions = filter(createActions()); Action[] leftSideActions = createLeftSideActions(); List<JButton> buttons = new ArrayList<JButton>(); boolean hasHelpToMoveToLeftSide = false; if (UIUtil.isUnderAquaLookAndFeel() && Arrays.asList(actions).contains(getHelpAction())) { hasHelpToMoveToLeftSide = true; actions = ArrayUtil.remove(actions, getHelpAction()); } if (SystemInfo.isMac) { for (Action action : actions) { if (action instanceof MacOtherAction) { leftSideActions = ArrayUtil.append(leftSideActions, action); actions = ArrayUtil.remove(actions, action); break; } } } else if (UIUtil.isUnderGTKLookAndFeel() && Arrays.asList(actions).contains(getHelpAction())) { leftSideActions = ArrayUtil.append(leftSideActions, getHelpAction()); actions = ArrayUtil.remove(actions, getHelpAction()); } JPanel panel = new JPanel(new BorderLayout()); final JPanel lrButtonsPanel = new JPanel(new GridBagLayout()); final Insets insets = SystemInfo.isMacOSLeopard ? new Insets(0, 0, 0, 0) : new Insets(8, 0, 0, 0); if (actions.length > 0 || leftSideActions.length > 0) { int gridX = 0; if (leftSideActions.length > 0) { JPanel buttonsPanel = createButtons(leftSideActions, buttons); if (actions.length > 0) { buttonsPanel.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 20)); // leave some space between button groups } lrButtonsPanel.add(buttonsPanel, new GridBagConstraints(gridX++, 0, 1, 1, 0, 0, GridBagConstraints.CENTER, GridBagConstraints.NONE, insets, 0, 0)); } lrButtonsPanel.add(Box.createHorizontalGlue(), // left strut new GridBagConstraints(gridX++, 0, 1, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, insets, 0, 0)); if (actions.length > 0) { if (SystemInfo.isMac) { // move ok action to the right int okNdx = ArrayUtil.indexOf(actions, getOKAction()); if (okNdx >= 0 && okNdx != actions.length - 1) { actions = ArrayUtil.append(ArrayUtil.remove(actions, getOKAction()), getOKAction()); } // move cancel action to the left int cancelNdx = ArrayUtil.indexOf(actions, getCancelAction()); if (cancelNdx > 0) { actions = ArrayUtil.mergeArrays(new Action[] {getCancelAction()}, ArrayUtil.remove(actions, getCancelAction())); } /*if (!hasFocusedAction(actions)) { int ndx = ArrayUtil.find(actions, getCancelAction()); if (ndx >= 0) { actions[ndx].putValue(FOCUSED_ACTION, Boolean.TRUE); } }*/ } JPanel buttonsPanel = createButtons(actions, buttons); lrButtonsPanel.add(buttonsPanel, new GridBagConstraints(gridX++, 0, 1, 1, 0, 0, GridBagConstraints.CENTER, GridBagConstraints.NONE, insets, 0, 0)); } if (SwingConstants.CENTER == myButtonAlignment) { lrButtonsPanel.add(Box.createHorizontalGlue(), // right strut new GridBagConstraints(gridX, 0, 1, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, insets, 0, 0)); } myButtons = buttons.toArray(new JButton[buttons.size()]); } if (hasHelpToMoveToLeftSide) { JButton helpButton = new JButton(getHelpAction()); if (!UIUtil.isUnderDarcula()) { helpButton.putClientProperty("JButton.buttonType", "help"); helpButton.setText(""); } helpButton.setMargin(insets); helpButton.setToolTipText(ActionsBundle.actionDescription("HelpTopics")); panel.add(helpButton, BorderLayout.WEST); } panel.add(lrButtonsPanel, BorderLayout.CENTER); final DoNotAskOption askOption = myDoNotAsk; if (askOption != null) { myCheckBoxDoNotShowDialog = new JCheckBox(askOption.getDoNotShowMessage()); JComponent southPanel = panel; if (!askOption.canBeHidden()) { return southPanel; } final JPanel withCB = addDoNotShowCheckBox(southPanel, myCheckBoxDoNotShowDialog); myCheckBoxDoNotShowDialog.setSelected(!askOption.isToBeShown()); DialogUtil.registerMnemonic(myCheckBoxDoNotShowDialog, '&'); panel = withCB; } panel.setBorder(IdeBorderFactory.createEmptyBorder(new Insets(8, 0, 0, 0))); return panel; } private Action[] filter(Action[] actions) { ArrayList<Action> answer = new ArrayList<Action>(); for (Action action : actions) { if (action != null && (ApplicationInfo.contextHelpAvailable() || action != getHelpAction())) { answer.add(action); } } return answer.toArray(new Action[answer.size()]); } protected boolean toBeShown() { return !myCheckBoxDoNotShowDialog.isSelected(); } public boolean isTypeAheadEnabled() { return false; } public static JPanel addDoNotShowCheckBox(JComponent southPanel, JCheckBox checkBox) { final JPanel panel = new JPanel(new BorderLayout()); JPanel wrapper = new JPanel(new GridBagLayout()); wrapper.add(checkBox); panel.add(wrapper, BorderLayout.WEST); panel.add(southPanel, BorderLayout.EAST); checkBox.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 20)); return panel; } private boolean hasFocusedAction(Action[] actions) { for (Action action : actions) { if (action.getValue(FOCUSED_ACTION) != null && (Boolean)action.getValue(FOCUSED_ACTION)) { return true; } } return false; } private JPanel createButtons(Action[] actions, List<JButton> buttons) { if (!UISettings.getShadowInstance().ALLOW_MERGE_BUTTONS) { final List<Action> actionList = new ArrayList<Action>(); for (Action action : actions) { actionList.add(action); if (action instanceof OptionAction) { final Action[] options = ((OptionAction)action).getOptions(); actionList.addAll(Arrays.asList(options)); } } if (actionList.size() != actions.length) { actions = actionList.toArray(actionList.toArray(new Action[actionList.size()])); } } JPanel buttonsPanel = new JPanel(new GridLayout(1, actions.length, SystemInfo.isMacOSLeopard ? 0 : 5, 0)); for (final Action action : actions) { JButton button = createJButtonForAction(action); final Object value = action.getValue(Action.MNEMONIC_KEY); if (value instanceof Integer) { final int mnemonic = ((Integer)value).intValue(); final Object name = action.getValue(Action.NAME); if (mnemonic == 'Y' && "Yes".equals(name)) { myYesAction = action; } else if (mnemonic == 'N' && "No".equals(name)) { myNoAction = action; } button.setMnemonic(mnemonic); } if (action.getValue(FOCUSED_ACTION) != null) { myPreferredFocusedComponent = button; } buttons.add(button); buttonsPanel.add(button); } return buttonsPanel; } /** * Creates <code>JButton</code> for the specified action. If the button has not <code>null</code> * value for <code>DialogWrapper.DEFAULT_ACTION</code> key then the created button will be the * default one for the dialog. * * @param action action for the button * @return button with action specified * * @see com.intellij.openapi.ui.DialogWrapper#DEFAULT_ACTION */ protected JButton createJButtonForAction(Action action) { JButton button; if (action instanceof OptionAction && UISettings.getShadowInstance().ALLOW_MERGE_BUTTONS) { final Action[] options = ((OptionAction)action).getOptions(); button = new JBOptionButton(action, options); final JBOptionButton eachOptionsButton = (JBOptionButton)button; eachOptionsButton.setOkToProcessDefaultMnemonics(false); eachOptionsButton.setOptionTooltipText( "Press " + KeymapUtil.getKeystrokeText(SHOW_OPTION_KEYSTROKE) + " to expand or use a mnemonic of a contained action"); myOptionsButtons.add(eachOptionsButton); final Set<JBOptionButton.OptionInfo> infos = eachOptionsButton.getOptionInfos(); for (final JBOptionButton.OptionInfo eachInfo : infos) { if (eachInfo.getMnemonic() >=0) { final CustomShortcutSet sc = new CustomShortcutSet(KeyStroke.getKeyStroke("alt pressed " + Character.valueOf((char)eachInfo.getMnemonic()))); new AnAction() { @Override public void actionPerformed(AnActionEvent e) { final JBOptionButton buttonToActivate = eachInfo.getButton(); buttonToActivate.showPopup(eachInfo.getAction(), true); } }.registerCustomShortcutSet(sc, getPeer().getRootPane()); } } } else { button = new JButton(action); } String text = button.getText(); if (SystemInfo.isMac) { button.putClientProperty("JButton.buttonType", "text"); } if (text != null) { int mnemonic = 0; StringBuilder plainText = new StringBuilder(); for (int i = 0; i < text.length(); i++) { char ch = text.charAt(i); if (ch == '_' || ch == '&') { i++; if (i >= text.length()) { break; } ch = text.charAt(i); if (ch != '_' && ch != '&') { // Mnemonic is case insensitive. int vk = ch; if (vk >= 'a' && vk <= 'z') { vk -= 'a' - 'A'; } mnemonic = vk; } } plainText.append(ch); } button.setText(plainText.toString()); final Object name = action.getValue(Action.NAME); if (mnemonic == KeyEvent.VK_Y && "Yes".equals(name)) { myYesAction = action; } else if (mnemonic == KeyEvent.VK_N && "No".equals(name)) { myNoAction = action; } button.setMnemonic(mnemonic); } setMargin(button); if (action.getValue(DEFAULT_ACTION) != null) { if (myPeer != null && !myPeer.isHeadless()) { getRootPane().setDefaultButton(button); } } return button; } private void setMargin(JButton button) { // Aqua LnF does a good job of setting proper margin between buttons. Setting them specifically causes them be 'square' style instead of // 'rounded', which is expected by apple users. if (!SystemInfo.isMac) { if (myButtonMargins == null) { return; } button.setMargin(myButtonMargins); } } protected DialogWrapperPeer createPeer(final Component parent, final boolean canBeParent) { return DialogWrapperPeerFactory.getInstance().createPeer(this, parent, canBeParent); } protected DialogWrapperPeer createPeer(boolean canBeParent, boolean toolkitModalIfPossible) { return DialogWrapperPeerFactory.getInstance().createPeer(this, canBeParent, toolkitModalIfPossible); } protected DialogWrapperPeer createPeer(final Project project, final boolean canBeParent) { return DialogWrapperPeerFactory.getInstance().createPeer(this, project, canBeParent); } @Nullable protected JComponent createTitlePane() { return null; } /** * Factory method. It creates the panel located at the * north of the dialog's content pane. The implementation can return <code>null</code> * value. In this case there will be no input panel. * * @return north panel */ @Nullable protected JComponent createNorthPanel() { return null; } /** * Factory method. It creates panel with dialog options. Options panel is located at the * center of the dialog's content pane. The implementation can return <code>null</code> * value. In this case there will be no options panel. * * @return center panel */ @Nullable protected abstract JComponent createCenterPanel(); /** * @see java.awt.Window#toFront() */ public void toFront() { myPeer.toFront(); } /** * @see java.awt.Window#toBack() */ public void toBack() { myPeer.toBack(); } /** * Dispose the wrapped and releases all resources allocated be the wrapper to help * more effecient garbage collection. You should never invoke this method twice or * invoke any method of the wrapper after invocation of <code>dispose</code>. * * @throws IllegalStateException if the dialog is disposed not on the event dispatch thread */ protected void dispose() { ensureEventDispatchThread(); myErrorTextAlarm.cancelAllRequests(); myValidationAlarm.cancelAllRequests(); myDisposed = true; if (myButtons != null) { for (JButton button : myButtons) { button.setAction(null); // avoid memory leak via KeyboardManager } } final JRootPane rootPane = getRootPane(); // if rootPane = null, dialog has already been disposed if (rootPane != null) { unregisterKeyboardActions(rootPane); if (myActualSize != null) { setSize(myActualSize.width, myActualSize.height); } myPeer.dispose(); } } public static void unregisterKeyboardActions(final JRootPane rootPane) { new AwtVisitor(rootPane) { public boolean visit(final Component component) { if (component instanceof JComponent) { final JComponent eachComp = (JComponent)component; final ActionMap actionMap = eachComp.getActionMap(); final KeyStroke[] strokes = eachComp.getRegisteredKeyStrokes(); for (KeyStroke eachStroke : strokes) { boolean remove = true; if (actionMap != null) { for (int i = 0; i < 3; i++) { final InputMap inputMap = eachComp.getInputMap(i); final Object key = inputMap.get(eachStroke); if (key != null) { final Action action = actionMap.get(key); if (action instanceof UIResource) remove = false; } } } if (remove) eachComp.unregisterKeyboardAction(eachStroke); } } return false; } }; } /** * This method is invoked by default implementation of "Cancel" action. It just closes dialog * with <code>CANCEL_EXIT_CODE</code>. This is convenient place to override functionality of "Cancel" action. * Note that the method does nothing if "Cancel" action isn't enabled. */ public void doCancelAction() { processDoNotAskOnCancel(); if (getCancelAction().isEnabled()) { close(CANCEL_EXIT_CODE); } } private void processDoNotAskOnCancel() { if (myDoNotAsk != null) { if (myDoNotAsk.shouldSaveOptionsOnCancel() && myDoNotAsk.canBeHidden()) { myDoNotAsk.setToBeShown(toBeShown(), CANCEL_EXIT_CODE); } } } /** * You can use this method if you want to know by which event this actions got triggered. It is called only if * the cancel action was triggered by some input event, <code>doCancelAction</code> is called otherwise. * * @param source AWT event * @see #doCancelAction */ public void doCancelAction(AWTEvent source) { doCancelAction(); } /** * Programmatically perform a "click" of default dialog's button. The method does * nothing if the dialog has no default button. */ public void clickDefaultButton() { JButton button = getRootPane().getDefaultButton(); if (button != null) { button.doClick(); } } /** * This method is invoked by default implementation of "OK" action. It just closes dialog * with <code>OK_EXIT_CODE</code>. This is convenient place to override functionality of "OK" action. * Note that the method does nothing if "OK" action isn't enabled. */ protected void doOKAction() { processDoNotAskOnOk(OK_EXIT_CODE); if (getOKAction().isEnabled()) { close(OK_EXIT_CODE); } } protected void processDoNotAskOnOk(int exitCode) { if (myDoNotAsk != null) { if (myDoNotAsk.canBeHidden()) { myDoNotAsk.setToBeShown(toBeShown(), exitCode); } } } /** * @return whether the native window cross button closes the window or not. * <code>true</code> means that cross performs hide or dispose of the dialog. */ public boolean shouldCloseOnCross() { return myCrossClosesWindow; } /** * This is factory method which creates action of dialog. Each action is represented * by <code>JButton</code> which is created by <code>createJButtonForAction(Action)</code> * method. These buttons are places into panel which is created by <code>createButtonsPanel</code> * method. Therefore you have enough ways to customise the dialog by overriding of * <code>createActions()</code>, <code>createButtonsPanel()</code> and * </code>createJButtonForAction(Action)</code> methods. By default the <code>createActions()</code> * method returns "OK" and "Cancel" action. The help action is automatically added is if * {@link #getHelpId()} returns non null value. * * @return dialog actions * * @see #createSouthPanel * @see #createJButtonForAction */ protected Action[] createActions() { if (getHelpId() == null) { if (SystemInfo.isMac) { return new Action[]{getCancelAction(), getOKAction()}; } return new Action[]{getOKAction(), getCancelAction()}; } else { if (SystemInfo.isMac) { return new Action[]{getHelpAction(), getCancelAction(), getOKAction()}; } return new Action[]{getOKAction(), getCancelAction(), getHelpAction()}; } } protected Action[] createLeftSideActions() { return new Action[0]; } /** * @return default implementation of "OK" action. This action just invokes * <code>doOKAction()</code> method. * @see #doOKAction */ protected Action getOKAction() { return myOKAction; } /** * @return default implementation of "Cancel" action. This action just invokes * <code>doCancelAction()</code> method. * @see #doCancelAction */ protected Action getCancelAction() { return myCancelAction; } /** * @return default implementation of "Help" action. This action just invokes * <code>doHelpAction()</code> method. * @see #doHelpAction */ protected Action getHelpAction() { return myHelpAction; } protected boolean isProgressDialog() { return false; } public final boolean isModalProgress() { return isProgressDialog(); } /** * Returns content pane * * @return content pane * @see javax.swing.JDialog#getContentPane */ public Container getContentPane() { assert myPeer != null; return myPeer.getContentPane(); } /** * @see javax.swing.JDialog#validate */ public void validate() { myPeer.validate(); } /** * @see javax.swing.JDialog#repaint */ public void repaint() { myPeer.repaint(); } /** * This is factory method. It returns key for installation into the dimension service. * If this method returns <code>null</code> then the component does not require installation * into dimension service. This default implementation returns <code>null</code>. * * @return dimension service key */ @Nullable @NonNls protected String getDimensionServiceKey() { return null; } @Nullable public final String getDimensionKey() { return getDimensionServiceKey(); } public int getExitCode() { return myExitCode; } /** * @return component which should be focused when the dialog appears * on the screen. */ @Nullable public JComponent getPreferredFocusedComponent() { return SystemInfo.isMac ? myPreferredFocusedComponent : null; } /** * @return horizontal stretch of the dialog. It means that the dialog's horizontal size is * the product of horizontal stretch by horizontal size of packed dialog. The default value * is <code>1.0f</code> */ public final float getHorizontalStretch() { return myHorizontalStretch; } /** * @return vertical stretch of the dialog. It means that the dialog's vertical size is * the product of vertical stretch by vertical size of packed dialog. The default value * is <code>1.0f</code> */ public final float getVerticalStretch() { return myVerticalStretch; } protected final void setHorizontalStretch(float hStretch) { myHorizontalStretch = hStretch; } protected final void setVerticalStretch(float vStretch) { myVerticalStretch = vStretch; } /** * @see java.awt.Window#getOwner * @return window owner */ public Window getOwner() { return myPeer.getOwner(); } public Window getWindow() { return myPeer.getWindow(); } /** * @see javax.swing.JDialog#getRootPane * @return root pane */ public JRootPane getRootPane() { return myPeer.getRootPane(); } /** * @see java.awt.Window#getSize * @return dialog size */ public Dimension getSize() { return myPeer.getSize(); } /** * @see java.awt.Dialog#getTitle * @return dialog title */ public String getTitle() { return myPeer.getTitle(); } protected void init() { myErrorText = new ErrorText(); myErrorText.setVisible(false); final JPanel root = new JPanel(new BorderLayout()); //{ // @Override // public void paint(Graphics g) { // if (ApplicationManager.getApplication() != null) { // UISettings.setupAntialiasing(g); // } // super.paint(g); // } //}; myPeer.setContentPane(root); final CustomShortcutSet sc = new CustomShortcutSet(SHOW_OPTION_KEYSTROKE); final AnAction toggleShowOptions = new AnAction() { @Override public void actionPerformed(AnActionEvent e) { expandNextOptionButton(); } }; toggleShowOptions.registerCustomShortcutSet(sc, root); final JPanel northSection = new JPanel(new BorderLayout()); root.add(northSection, BorderLayout.NORTH); JComponent titlePane = createTitlePane(); if (titlePane != null) { northSection.add(titlePane, BorderLayout.CENTER); } JComponent centerSection = new JPanel(new BorderLayout()); root.add(centerSection, BorderLayout.CENTER); root.setBorder(createContentPaneBorder()); final JComponent n = createNorthPanel(); if (n != null) { centerSection.add(n, BorderLayout.NORTH); } final JComponent c = createCenterPanel(); if (c != null) { centerSection.add(c, BorderLayout.CENTER); myErrorPane = c; } if (myErrorPane == null) { myErrorPane = root; } final JPanel southSection = new JPanel(new BorderLayout()); root.add(southSection, BorderLayout.SOUTH); southSection.add(myErrorText, BorderLayout.CENTER); final JComponent south = createSouthPanel(); if (south != null) { southSection.add(south, BorderLayout.SOUTH); } new MnemonicHelper().register(root); if (!postponeValidation()) { startTrackingValidation(); } if (SystemInfo.isWindows) { installEnterHook(root); } } private static void installEnterHook(JComponent root) { new AnAction() { @Override public void actionPerformed(AnActionEvent e) { final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); if (owner instanceof JButton && owner.isEnabled()) { ((JButton)owner).doClick(); } } @Override public void update(AnActionEvent e) { final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); e.getPresentation().setEnabled((owner instanceof JButton && owner.isEnabled())); } }.registerCustomShortcutSet(CustomShortcutSet.fromString("ENTER"), root); } private void expandNextOptionButton() { if (myCurrentOptionsButtonIndex > 0) { myOptionsButtons.get(myCurrentOptionsButtonIndex).closePopup(); myCurrentOptionsButtonIndex++; } else if (myOptionsButtons.size() > 0) { myCurrentOptionsButtonIndex = 0; } if (myCurrentOptionsButtonIndex >= 0 && myCurrentOptionsButtonIndex < myOptionsButtons.size()) { myOptionsButtons.get(myCurrentOptionsButtonIndex).showPopup(null, true); } } void startTrackingValidation() { SwingUtilities.invokeLater(new Runnable() { public void run() { if (!myValidationStarted && !myDisposed) { myValidationStarted = true; initValidation(); } } }); } protected final void initValidation() { myValidationAlarm.cancelAllRequests(); final Runnable validateRequest = new Runnable() { public void run() { if (myDisposed) return; final ValidationInfo result = doValidate(); if (result == null) { clearProblems(); } else { reportProblem(result); } if (!myDisposed) { initValidation(); } } }; if (getValidationThreadToUse() == Alarm.ThreadToUse.SWING_THREAD) { myValidationAlarm.addRequest(validateRequest, myValidationDelay, ModalityState.current()); } else { myValidationAlarm.addRequest(validateRequest, myValidationDelay); } } protected boolean isNorthStrictedToPreferredSize() { return true; } protected boolean isCenterStrictedToPreferredSize() { return false; } protected boolean isSouthStrictedToPreferredSize() { return true; } protected JComponent createContentPane() { return new JPanel(); } /** * @see java.awt.Window#pack */ public void pack() { myPeer.pack(); } public Dimension getPreferredSize() { return myPeer.getPreferredSize(); } /** * Sets horizontal alignment of dialog's the buttons. * * @param alignment alignment of the buttons. Acceptable values are * <code>SwingConstants.CENTER</code> and <code>SwingConstants.RIGHT</code>. * The <code>SwingConstants.RIGHT</code> is the default value. * @throws java.lang.IllegalArgumentException * if <code>alignment</code> isn't acceptable */ protected final void setButtonsAlignment(@MagicConstant(intValues = {SwingConstants.CENTER, SwingConstants.RIGHT}) int alignment) { if (SwingConstants.CENTER != alignment && SwingConstants.RIGHT != alignment) { throw new IllegalArgumentException("unknown alignment: " + alignment); } myButtonAlignment = alignment; } /** * Sets margin for command buttons ("OK", "Cancel", "Help"). * @param insets buttons margin */ public final void setButtonsMargin(@Nullable Insets insets) { myButtonMargins = insets; } public final void setCrossClosesWindow(boolean crossClosesWindow) { myCrossClosesWindow = crossClosesWindow; } protected final void setCancelButtonIcon(Icon icon) { // Setting icons causes buttons be 'square' style instead of // 'rounded', which is expected by apple users. if (!SystemInfo.isMac) { myCancelAction.putValue(Action.SMALL_ICON, icon); } } protected final void setCancelButtonText(String text) { myCancelAction.putValue(Action.NAME, text); } public void setModal(boolean modal) { myPeer.setModal(modal); } public boolean isModal() { return myPeer.isModal(); } protected void setOKActionEnabled(boolean isEnabled) { myOKAction.setEnabled(isEnabled); } protected final void setOKButtonIcon(Icon icon) { // Setting icons causes buttons be 'square' style instead of // 'rounded', which is expected by apple users. if (!SystemInfo.isMac) { myOKAction.putValue(Action.SMALL_ICON, icon); } } protected final void setOKButtonText(String text) { myOKAction.putValue(Action.NAME, text); } protected final void setOKButtonMnemonic(int c) { myOKAction.putValue(Action.MNEMONIC_KEY, c); } /** * @return the help identifier or null if no help is available. */ @Nullable protected String getHelpId() { return null; } /** * This method is invoked by default implementation of "Help" action. * This is convenient place to override functionality of "Help" action. * Note that the method does nothing if "Help" action isn't enabled. * <p/> * The default implementation shows the help page with id returned * by the method {@link #getHelpId()}. If that method returns null, * the message box with message "no help available" is shown. */ protected void doHelpAction() { if (myHelpAction.isEnabled()) { String helpId = getHelpId(); if (helpId != null) { HelpManager.getInstance().invokeHelp(helpId); } else { Messages.showMessageDialog(getContentPane(), UIBundle.message("there.is.no.help.for.this.dialog.error.message"), UIBundle.message("no.help.available.dialog.title"), Messages.getInformationIcon()); } } } public boolean isOK() { return getExitCode() == OK_EXIT_CODE; } public boolean isOKActionEnabled() { return myOKAction.isEnabled(); } /** * @see java.awt.Component#isVisible * @return <code>true</code> if and only if visible */ public boolean isVisible() { return myPeer.isVisible(); } /** * @see java.awt.Window#isShowing * @return <code>true</code> if and only if showing */ public boolean isShowing() { return myPeer.isShowing(); } /** * @param width width * @param height height * @see javax.swing.JDialog#setSize */ public void setSize(int width, int height) { myPeer.setSize(width, height); } /** * @param title title * @see javax.swing.JDialog#setTitle */ public void setTitle(String title) { myPeer.setTitle(title); } /** * @see javax.swing.JDialog#isResizable */ public void isResizable() { myPeer.isResizable(); } /** * @param resizable is resizable * @see javax.swing.JDialog#setResizable */ public void setResizable(boolean resizable) { myPeer.setResizable(resizable); } /** * @see javax.swing.JDialog#getLocation * @return dialog location */ public Point getLocation() { return myPeer.getLocation(); } /** * @param p new dialog location * @see javax.swing.JDialog#setLocation(Point) */ public void setLocation(Point p) { myPeer.setLocation(p); } /** * @param x x * @param y y * @see javax.swing.JDialog#setLocation(int,int) */ public void setLocation(int x, int y) { myPeer.setLocation(x, y); } public void centerRelativeToParent() { myPeer.centerInParent(); } /** * Show the dialog * * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ public void show() { showAndGetOk(); } public boolean showAndGet() { show(); return isOK(); } /** * You need this method ONLY for NON-MODAL dialogs. Otherwise, use {@link #show()} or {@link #showAndGet()}. * @return result callback */ public AsyncResult<Boolean> showAndGetOk() { final AsyncResult<Boolean> result = new AsyncResult<Boolean>(); ensureEventDispatchThread(); registerKeyboardShortcuts(); final Disposable uiParent = Disposer.get("ui"); if (uiParent != null) { // may be null if no app yet (license agreement) Disposer.register(uiParent, myDisposable); // ensure everything is disposed on app quit } myPeer.show().doWhenProcessed(new Runnable() { public void run() { result.setDone(isOK()); } }); return result; } /** * @return Location in absolute coordinates which is used when dialog has no dimension service key or no position was stored yet. * Can return null. In that case dialog will be centered relative to its owner. */ @Nullable public Point getInitialLocation() { return myInitialLocationCallback == null ? null : myInitialLocationCallback.compute(); } public void setInitialLocationCallback(Computable<Point> callback) { myInitialLocationCallback = callback; } private void registerKeyboardShortcuts() { ActionListener cancelKeyboardAction = new ActionListener() { public void actionPerformed(ActionEvent e) { MenuSelectionManager menuSelectionManager = MenuSelectionManager.defaultManager(); MenuElement[] selectedPath = menuSelectionManager.getSelectedPath(); if (selectedPath.length > 0) { // hide popup menu if any menuSelectionManager.clearSelectedPath(); } else { final StackingPopupDispatcher popupDispatcher = StackingPopupDispatcher.getInstance(); if (ApplicationManager.getApplication() == null || (popupDispatcher != null && !popupDispatcher.isPopupFocused())) { doCancelAction(e); } } } }; final JRootPane rootPane = getRootPane(); if (rootPane == null) return; rootPane.registerKeyboardAction(cancelKeyboardAction, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); registerForEveryKeyboardShortcut(cancelKeyboardAction, CommonShortcuts.getCloseActiveWindow()); if (ApplicationInfo.contextHelpAvailable()) { ActionListener helpAction = new ActionListener() { public void actionPerformed(ActionEvent e) { doHelpAction(); } }; registerForEveryKeyboardShortcut(helpAction, CommonShortcuts.getContextHelp()); rootPane.registerKeyboardAction(helpAction, KeyStroke.getKeyStroke(KeyEvent.VK_HELP, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); } if (myButtons != null) { rootPane.registerKeyboardAction(new AbstractAction() { public void actionPerformed(ActionEvent e) { focusPreviousButton(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_LEFT, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); rootPane.registerKeyboardAction(new AbstractAction() { public void actionPerformed(ActionEvent e) { focusNextButton(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_RIGHT, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); } if (myYesAction != null) { rootPane.registerKeyboardAction(myYesAction, KeyStroke.getKeyStroke(KeyEvent.VK_Y, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); } if (myNoAction != null) { rootPane.registerKeyboardAction(myNoAction, KeyStroke.getKeyStroke(KeyEvent.VK_N, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); } } private void registerForEveryKeyboardShortcut(ActionListener action, ShortcutSet shortcuts) { for (Shortcut shortcut : shortcuts.getShortcuts()){ if (shortcut instanceof KeyboardShortcut) { KeyboardShortcut ks = (KeyboardShortcut)shortcut; KeyStroke first = ks.getFirstKeyStroke(); KeyStroke second = ks.getSecondKeyStroke(); if (second == null) { getRootPane().registerKeyboardAction(action, first, JComponent.WHEN_IN_FOCUSED_WINDOW); } } } } private void focusPreviousButton() { for (int i = 0; i < myButtons.length; i++) { if (myButtons[i].hasFocus()) { if (i == 0) { myButtons[myButtons.length - 1].requestFocus(); return; } myButtons[i - 1].requestFocus(); return; } } } private void focusNextButton() { for (int i = 0; i < myButtons.length; i++) { if (myButtons[i].hasFocus()) { if (i == myButtons.length - 1) { myButtons[0].requestFocus(); return; } myButtons[i + 1].requestFocus(); return; } } } public long getTypeAheadTimeoutMs() { return 0l; } public boolean isToDispatchTypeAhead() { return isOK(); } public static boolean isMultipleModalDialogs() { final Component c = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); if (c != null) { final DialogWrapper wrapper = findInstance(c); return wrapper != null && wrapper.getPeer().getCurrentModalEntities().length > 1; } return false; } /** * Base class for dialog wrapper actions that need to ensure that only * one action for the dialog is running. */ protected abstract class DialogWrapperAction extends AbstractAction { /** * The constructor * * @param name the action name (see {@link Action#NAME}) */ protected DialogWrapperAction(String name) { putValue(NAME, name); } /** * {@inheritDoc} */ public void actionPerformed(ActionEvent e) { if (myClosed) return; if (myPerformAction) return; try { myPerformAction = true; doAction(e); } finally { myPerformAction = false; } } /** * Do actual work for the action. This method is called only if no other action * is performed in parallel (checked using {@link com.intellij.openapi.ui.DialogWrapper#myPerformAction}), * and dialog is active (checked using {@link com.intellij.openapi.ui.DialogWrapper#myClosed}) * * @param e action */ protected abstract void doAction(ActionEvent e); } protected class OkAction extends DialogWrapperAction { protected OkAction() { super(CommonBundle.getOkButtonText()); putValue(DEFAULT_ACTION, Boolean.TRUE); } @Override protected void doAction(ActionEvent e) { ValidationInfo info = doValidate(); if (info != null) { if (info.component != null && info.component.isVisible()) { IdeFocusManager.getInstance(null).requestFocus(info.component, true); } startTrackingValidation(); return; } doOKAction(); } } protected class CancelAction extends DialogWrapperAction { private CancelAction() { super(CommonBundle.getCancelButtonText()); } @Override protected void doAction(ActionEvent e) { doCancelAction(); } } /** * The action that just closes dialog with the specified exit code * (like the default behavior of the actions "Ok" and "Cancel"). */ protected class DialogWrapperExitAction extends DialogWrapperAction { /** * The exit code for the action */ protected final int myExitCode; /** * The constructor * * @param name the action name * @param exitCode the exit code for dialog */ public DialogWrapperExitAction(String name, int exitCode) { super(name); myExitCode = exitCode; } @Override protected void doAction(ActionEvent e) { if (isEnabled()) { close(myExitCode); } } } private class HelpAction extends AbstractAction { private HelpAction() { putValue(NAME, CommonBundle.getHelpButtonText()); } public void actionPerformed(ActionEvent e) { doHelpAction(); } } private Dimension myActualSize = null; private String myLastErrorText = null; protected final void setErrorText(@Nullable final String text) { if (Comparing.equal(myLastErrorText, text)) { return; } myLastErrorText = text; if (myActualSize == null && !StringUtil.isEmpty(text)) { myActualSize = getSize(); } myErrorTextAlarm.cancelAllRequests(); myErrorTextAlarm.addRequest(new Runnable() { public void run() { final String text = myLastErrorText; myErrorText.setError(text); if (text != null && text.length() > myMaxErrorTextLength) { // during the first update, resize only for growing. during a subsequent update, // if error text becomes longer, the min size calculation may not calculate enough size, // so we pack() even though it could cause the dialog to become smaller. if (myMaxErrorTextLength == 0) { updateHeightForErrorText(); } else { //if (getRootPane() != null) myPeer.pack(); } myMaxErrorTextLength = text.length(); updateHeightForErrorText(); } myErrorText.repaint(); if (StringUtil.isEmpty(text) && myActualSize != null) { resizeWithAnimation(myActualSize); myMaxErrorTextLength = 0; } } }, 300, null); } @Nullable public static DialogWrapper findInstance(Component c) { while (c != null){ if (c instanceof DialogWrapperDialog) { return ((DialogWrapperDialog)c).getDialogWrapper(); } c = c.getParent(); } return null; } private void resizeWithAnimation(final Dimension size) { //todo[kb]: fix this PITA myResizeInProgress = true; if (!Registry.is("enable.animation.on.dialogs")) { setSize(size.width, size.height); myResizeInProgress = false; return; } new Thread("DialogWrapper resizer") { int time = 200; int steps = 7; @Override public void run() { int step = 0; final Dimension cur = getSize(); int h = (size.height - cur.height) / steps; int w = (size.width - cur.width) / steps; while (step++ < steps) { setSize(cur.width + w * step, cur.height + h*step); try { //noinspection BusyWait sleep(time / steps); } catch (InterruptedException ignore) {} } setSize(size.width, size.height); //repaint(); if (myErrorText.shouldBeVisible()) { myErrorText.setVisible(true); } myResizeInProgress = false; } }.start(); } private void updateHeightForErrorText() { Dimension errorSize = myErrorText.getPreferredSize(); resizeWithAnimation(new Dimension(Math.max(myActualSize.width, errorSize.width + 40), myActualSize.height + errorSize.height + 10)); } private static class ErrorText extends JPanel { private final JLabel myLabel = new JLabel(); private Dimension myPrefSize; private String myText; private ErrorText() { setLayout(new BorderLayout()); JBScrollPane pane = new JBScrollPane(myLabel, ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); pane.setBorder(IdeBorderFactory.createEmptyBorder(0)); pane.setBackground(null); pane.getViewport().setBackground(null); pane.setOpaque(false); add(pane, BorderLayout.CENTER); } public void setError(String text) { final Dimension oldSize = getPreferredSize(); myText = text; if (text == null) { myLabel.setText(""); myLabel.setIcon(null); setVisible(false); setBorder(null); } else { myLabel.setText("<html><body><font color=red><left>" + text + "</left></b></font></body></html>"); myLabel.setIcon(AllIcons.Actions.Lightning); myLabel.setBorder(new EmptyBorder(4, 10, 0, 2)); setVisible(true); } final Dimension size = getPreferredSize(); if (oldSize.height < size.height) { revalidate(); } } public boolean shouldBeVisible() { return !StringUtil.isEmpty(myText); } public boolean isTextSet(@Nullable String text) { return StringUtil.equals(text, myText); } public Dimension getPreferredSize() { return myPrefSize == null ? myLabel.getPreferredSize() : myPrefSize; } } public final DialogWrapperPeer getPeer() { return myPeer; } /** * Ensure that dialog is used from even dispatch thread. * * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ private static void ensureEventDispatchThread() { if (!EventQueue.isDispatchThread()) { throw new IllegalStateException("The DialogWrapper can be used only on event dispatch thread."); } } public final Disposable getDisposable() { return myDisposable; } public interface DoNotAskOption { boolean isToBeShown(); void setToBeShown(boolean value, int exitCode); /** * Should be 'true' for checkbox to be visible. */ boolean canBeHidden(); boolean shouldSaveOptionsOnCancel(); String getDoNotShowMessage(); } private ErrorPaintingType getErrorPaintingType() { return ErrorPaintingType.SIGN; } private class ErrorPainter extends AbstractPainter { private ValidationInfo myInfo; @Override public void executePaint(Component component, Graphics2D g) { if (myInfo != null && myInfo.component != null) { final JComponent comp = myInfo.component; final int w = comp.getWidth(); final int h = comp.getHeight(); Point p; switch (getErrorPaintingType()) { case DOT: p = SwingUtilities.convertPoint(comp, 2, h/2 , component); AllIcons.Ide.ErrorPoint.paintIcon(component, g, p.x, p.y); break; case SIGN: p = SwingUtilities.convertPoint(comp, w, 0, component); AllIcons.Ide.ErrorSign.paintIcon(component, g, p.x - 8, p.y - 8); break; case LINE: p = SwingUtilities.convertPoint(comp, 0, h, component); final GraphicsConfig config = new GraphicsConfig(g); g.setColor(new Color(255, 0, 0 , 100)); g.fillRoundRect(p.x, p.y-2, w, 4, 2, 2); config.restore(); break; } } } @Override public boolean needsRepaint() { return true; } public void setValidationInfo(@Nullable ValidationInfo info) { myInfo = info; } } private static enum ErrorPaintingType {DOT, SIGN, LINE} }
platform/platform-api/src/com/intellij/openapi/ui/DialogWrapper.java
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.ui; import com.intellij.CommonBundle; import com.intellij.icons.AllIcons; import com.intellij.ide.ui.UISettings; import com.intellij.idea.ActionsBundle; import com.intellij.openapi.Disposable; import com.intellij.openapi.MnemonicHelper; import com.intellij.openapi.actionSystem.*; import com.intellij.openapi.application.ApplicationInfo; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.help.HelpManager; import com.intellij.openapi.keymap.KeymapUtil; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.popup.StackingPopupDispatcher; import com.intellij.openapi.util.*; import com.intellij.openapi.util.registry.Registry; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.IdeFocusManager; import com.intellij.openapi.wm.IdeGlassPaneUtil; import com.intellij.ui.IdeBorderFactory; import com.intellij.ui.UIBundle; import com.intellij.ui.components.JBOptionButton; import com.intellij.ui.components.JBScrollPane; import com.intellij.util.Alarm; import com.intellij.util.ArrayUtil; import com.intellij.util.ui.AwtVisitor; import com.intellij.util.ui.DialogUtil; import com.intellij.util.ui.UIUtil; import org.intellij.lang.annotations.MagicConstant; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.border.Border; import javax.swing.border.EmptyBorder; import javax.swing.plaf.UIResource; import java.awt.*; import java.awt.event.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Set; /** * The dialog wrapper. The dialog wrapper could be used only on event dispatch thread. * In case when the dialog must be created from other threads use * {@link EventQueue#invokeLater(Runnable)} or {@link EventQueue#invokeAndWait(Runnable)}. */ @SuppressWarnings({"SSBasedInspection", "MethodMayBeStatic", "UnusedDeclaration"}) public abstract class DialogWrapper { /** * The default exit code for "OK" action. */ public static final int OK_EXIT_CODE = 0; /** * The default exit code for "Cancel" action. */ public static final int CANCEL_EXIT_CODE = 1; /** * The default exit code for "Close" action. Equal to cancel. */ public static final int CLOSE_EXIT_CODE = CANCEL_EXIT_CODE; /** * If you use your custom exit codes you have have to start them with * this constant. */ public static final int NEXT_USER_EXIT_CODE = 2; /** * If your action returned by <code>createActions</code> method has non * <code>null</code> value for this key, then the button that corresponds to the action will be the * default button for the dialog. It's true if you don't change this behaviour * of <code>createJButtonForAction(Action)</code> method. */ @NonNls public static final String DEFAULT_ACTION = "DefaultAction"; @NonNls public static final String FOCUSED_ACTION = "FocusedAction"; private static final KeyStroke SHOW_OPTION_KEYSTROKE = KeyStroke.getKeyStroke(KeyEvent.VK_ENTER, InputEvent.ALT_MASK | InputEvent.SHIFT_MASK); private final DialogWrapperPeer myPeer; private int myExitCode = CANCEL_EXIT_CODE; /** * The shared instance of default border for dialog's content pane. */ public static final Border ourDefaultBorder = new EmptyBorder(UIUtil.PANEL_REGULAR_INSETS); private float myHorizontalStretch = 1.0f; private float myVerticalStretch = 1.0f; /** * Defines horizontal alignment of buttons. */ private int myButtonAlignment = SwingConstants.RIGHT; private boolean myCrossClosesWindow = true; private Insets myButtonMargins = new Insets(2, 16, 2, 16); protected Action myOKAction; protected Action myCancelAction; protected Action myHelpAction; private JButton[] myButtons; private boolean myClosed = false; protected boolean myPerformAction = false; private Action myYesAction = null; private Action myNoAction = null; protected JCheckBox myCheckBoxDoNotShowDialog; @Nullable private DoNotAskOption myDoNotAsk; private JComponent myPreferredFocusedComponent; private Computable<Point> myInitialLocationCallback; protected final Disposable myDisposable = new Disposable() { public String toString() { return DialogWrapper.this.toString(); } public void dispose() { DialogWrapper.this.dispose(); } }; private List<JBOptionButton> myOptionsButtons = new ArrayList<JBOptionButton>(); private int myCurrentOptionsButtonIndex = -1; private boolean myResizeInProgress = false; private ComponentAdapter myResizeListener; protected String getDoNotShowMessage() { return CommonBundle.message("dialog.options.do.not.show"); } public void setDoNotAskOption(@Nullable DoNotAskOption doNotAsk) { myDoNotAsk = doNotAsk; } private ErrorText myErrorText; private int myMaxErrorTextLength; private final Alarm myErrorTextAlarm = new Alarm(); /** * Creates modal <code>DialogWrapper</code>. The currently active window will be the dialog's parent. * * @param project parent window for the dialog will be calculated based on focused window for the * specified <code>project</code>. This parameter can be <code>null</code>. In this case parent window * will be suggested based on current focused window. * @param canBeParent specifies whether the dialog can be parent for other windows. This parameter is used * by <code>WindowManager</code>. * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ protected DialogWrapper(@Nullable Project project, boolean canBeParent) { myPeer = createPeer(project, canBeParent); final Window window = myPeer.getWindow(); if (window != null) { myResizeListener = new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { if (!myResizeInProgress) { myActualSize = myPeer.getSize(); if (myErrorText.isVisible()) { myActualSize.height -= myErrorText.getHeight() + 10; } } } }; window.addComponentListener(myResizeListener); } createDefaultActions(); } /** * Creates modal <code>DialogWrapper</code> that can be parent for other windows. * The currently active window will be the dialog's parent. * * @param project parent window for the dialog will be calculated based on focused window for the * specified <code>project</code>. This parameter can be <code>null</code>. In this case parent window * will be suggested based on current focused window. * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread * @see com.intellij.openapi.ui.DialogWrapper#DialogWrapper(com.intellij.openapi.project.Project, boolean) */ protected DialogWrapper(@Nullable Project project) { this(project, true); } /** * Creates modal <code>DialogWrapper</code>. The currently active window will be the dialog's parent. * * @param canBeParent specifies whether the dialog can be parent for other windows. This parameter is used * by <code>WindowManager</code>. * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ protected DialogWrapper(boolean canBeParent) { this((Project)null, canBeParent); } protected DialogWrapper(boolean canBeParent, boolean toolkitModalIfPossible) { ensureEventDispatchThread(); myPeer = createPeer(canBeParent, toolkitModalIfPossible); createDefaultActions(); } /** * @param parent parent component which is used to calculate heavy weight window ancestor. * <code>parent</code> cannot be <code>null</code> and must be showing. * @param canBeParent can be parent * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ protected DialogWrapper(@NotNull Component parent, boolean canBeParent) { ensureEventDispatchThread(); myPeer = createPeer(parent, canBeParent); createDefaultActions(); } //validation private final Alarm myValidationAlarm = new Alarm(getValidationThreadToUse(), myDisposable); protected Alarm.ThreadToUse getValidationThreadToUse() { return Alarm.ThreadToUse.SWING_THREAD; } private int myValidationDelay = 300; private boolean myDisposed = false; private boolean myValidationStarted = false; private final ErrorPainter myErrorPainter = new ErrorPainter(); private JComponent myErrorPane; private boolean myErrorPainterInstalled = false; /** * Allows to postpone first start of validation * * @return <code>false</code> if start validation in <code>init()</code> method */ protected boolean postponeValidation() { return true; } /** * Validates a user input and returns <code>null</code> if everything is fine * or returns a problem description with component where is the problem has been found. * * @return <code>null</code> if everything is OK or a problem descriptor */ @Nullable protected ValidationInfo doValidate() { return null; } public void setValidationDelay(int delay) { myValidationDelay = delay; } private void reportProblem(final ValidationInfo info) { installErrorPainter(); myErrorPainter.setValidationInfo(info); if (! myErrorText.isTextSet(info.message)) { SwingUtilities.invokeLater(new Runnable() { public void run() { if (myDisposed) return; setErrorText(info.message); myPeer.getRootPane().getGlassPane().repaint(); getOKAction().setEnabled(false); } }); } } private void installErrorPainter() { if (myErrorPainterInstalled) return; myErrorPainterInstalled = true; UIUtil.invokeLaterIfNeeded(new Runnable() { @Override public void run() { IdeGlassPaneUtil.installPainter(myErrorPane, myErrorPainter, myDisposable); } }); } private void clearProblems() { myErrorPainter.setValidationInfo(null); if (! myErrorText.isTextSet(null)) { SwingUtilities.invokeLater(new Runnable() { public void run() { if (myDisposed) return; setErrorText(null); myPeer.getRootPane().getGlassPane().repaint(); getOKAction().setEnabled(true); } }); } } protected void createDefaultActions() { myOKAction = new OkAction(); myCancelAction = new CancelAction(); myHelpAction = new HelpAction(); } public void setUndecorated(boolean undecorated) { myPeer.setUndecorated(undecorated); } public final void addMouseListener(MouseListener listener) { myPeer.addMouseListener(listener); } public final void addMouseListener(MouseMotionListener listener) { myPeer.addMouseListener(listener); } public final void addKeyListener(KeyListener listener) { myPeer.addKeyListener(listener); } /** * Closes and disposes the dialog and sets the specified exit code. * * @param exitCode exit code * @param isOk is OK * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ public final void close(int exitCode, boolean isOk) { ensureEventDispatchThread(); if (myClosed) return; myClosed = true; myExitCode = exitCode; Window window = getWindow(); if (window != null && myResizeListener != null) { window.removeComponentListener(myResizeListener); myResizeListener = null; } if (isOk) { processDoNotAskOnOk(exitCode); } else { processDoNotAskOnCancel(); } Disposer.dispose(myDisposable); } public final void close(int exitCode) { close(exitCode, exitCode != CANCEL_EXIT_CODE); } /** * Factory method. It creates border for dialog's content pane. By default content * pane has has empty border with <code>(8,12,8,12)</code> insets. The subclasses can * return <code>null</code> in overridden methods. In this case there will be no * any border in the content pane. * * @return content pane border */ @Nullable protected Border createContentPaneBorder() { return ourDefaultBorder; } /** * This is factory method. It creates the panel located at the south of the content pane. By default that * panel contains dialog's buttons. This default implementation uses <code>createActions()</code> * and <code>createJButtonForAction(Action)</code> methods to construct the panel. * * @return south panel */ @Nullable protected JComponent createSouthPanel() { Action[] actions = filter(createActions()); Action[] leftSideActions = createLeftSideActions(); List<JButton> buttons = new ArrayList<JButton>(); boolean hasHelpToMoveToLeftSide = false; if (UIUtil.isUnderAquaLookAndFeel() && Arrays.asList(actions).contains(getHelpAction())) { hasHelpToMoveToLeftSide = true; actions = ArrayUtil.remove(actions, getHelpAction()); } if (SystemInfo.isMac) { for (Action action : actions) { if (action instanceof MacOtherAction) { leftSideActions = ArrayUtil.append(leftSideActions, action); actions = ArrayUtil.remove(actions, action); break; } } } else if (UIUtil.isUnderGTKLookAndFeel() && Arrays.asList(actions).contains(getHelpAction())) { leftSideActions = ArrayUtil.append(leftSideActions, getHelpAction()); actions = ArrayUtil.remove(actions, getHelpAction()); } JPanel panel = new JPanel(new BorderLayout()); final JPanel lrButtonsPanel = new JPanel(new GridBagLayout()); final Insets insets = SystemInfo.isMacOSLeopard ? new Insets(0, 0, 0, 0) : new Insets(8, 0, 0, 0); if (actions.length > 0 || leftSideActions.length > 0) { int gridX = 0; if (leftSideActions.length > 0) { JPanel buttonsPanel = createButtons(leftSideActions, buttons); if (actions.length > 0) { buttonsPanel.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 20)); // leave some space between button groups } lrButtonsPanel.add(buttonsPanel, new GridBagConstraints(gridX++, 0, 1, 1, 0, 0, GridBagConstraints.CENTER, GridBagConstraints.NONE, insets, 0, 0)); } lrButtonsPanel.add(Box.createHorizontalGlue(), // left strut new GridBagConstraints(gridX++, 0, 1, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, insets, 0, 0)); if (actions.length > 0) { if (SystemInfo.isMac) { // move ok action to the right int okNdx = ArrayUtil.indexOf(actions, getOKAction()); if (okNdx >= 0 && okNdx != actions.length - 1) { actions = ArrayUtil.append(ArrayUtil.remove(actions, getOKAction()), getOKAction()); } // move cancel action to the left int cancelNdx = ArrayUtil.indexOf(actions, getCancelAction()); if (cancelNdx > 0) { actions = ArrayUtil.mergeArrays(new Action[] {getCancelAction()}, ArrayUtil.remove(actions, getCancelAction())); } /*if (!hasFocusedAction(actions)) { int ndx = ArrayUtil.find(actions, getCancelAction()); if (ndx >= 0) { actions[ndx].putValue(FOCUSED_ACTION, Boolean.TRUE); } }*/ } JPanel buttonsPanel = createButtons(actions, buttons); lrButtonsPanel.add(buttonsPanel, new GridBagConstraints(gridX++, 0, 1, 1, 0, 0, GridBagConstraints.CENTER, GridBagConstraints.NONE, insets, 0, 0)); } if (SwingConstants.CENTER == myButtonAlignment) { lrButtonsPanel.add(Box.createHorizontalGlue(), // right strut new GridBagConstraints(gridX, 0, 1, 1, 1, 0, GridBagConstraints.CENTER, GridBagConstraints.HORIZONTAL, insets, 0, 0)); } myButtons = buttons.toArray(new JButton[buttons.size()]); } if (hasHelpToMoveToLeftSide) { JButton helpButton = new JButton(getHelpAction()); if (!UIUtil.isUnderDarcula()) { helpButton.putClientProperty("JButton.buttonType", "help"); helpButton.setText(""); } helpButton.setMargin(insets); helpButton.setToolTipText(ActionsBundle.actionDescription("HelpTopics")); panel.add(helpButton, BorderLayout.WEST); } panel.add(lrButtonsPanel, BorderLayout.CENTER); final DoNotAskOption askOption = myDoNotAsk; if (askOption != null) { myCheckBoxDoNotShowDialog = new JCheckBox(askOption.getDoNotShowMessage()); JComponent southPanel = panel; if (!askOption.canBeHidden()) { return southPanel; } final JPanel withCB = addDoNotShowCheckBox(southPanel, myCheckBoxDoNotShowDialog); myCheckBoxDoNotShowDialog.setSelected(!askOption.isToBeShown()); DialogUtil.registerMnemonic(myCheckBoxDoNotShowDialog, '&'); panel = withCB; } panel.setBorder(IdeBorderFactory.createEmptyBorder(new Insets(8, 0, 0, 0))); return panel; } private Action[] filter(Action[] actions) { ArrayList<Action> answer = new ArrayList<Action>(); for (Action action : actions) { if (action != null && (ApplicationInfo.contextHelpAvailable() || action != getHelpAction())) { answer.add(action); } } return answer.toArray(new Action[answer.size()]); } protected boolean toBeShown() { return !myCheckBoxDoNotShowDialog.isSelected(); } public boolean isTypeAheadEnabled() { return false; } public static JPanel addDoNotShowCheckBox(JComponent southPanel, JCheckBox checkBox) { final JPanel panel = new JPanel(new BorderLayout()); JPanel wrapper = new JPanel(new GridBagLayout()); wrapper.add(checkBox); panel.add(wrapper, BorderLayout.WEST); panel.add(southPanel, BorderLayout.EAST); checkBox.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 20)); return panel; } private boolean hasFocusedAction(Action[] actions) { for (Action action : actions) { if (action.getValue(FOCUSED_ACTION) != null && (Boolean)action.getValue(FOCUSED_ACTION)) { return true; } } return false; } private JPanel createButtons(Action[] actions, List<JButton> buttons) { if (!UISettings.getShadowInstance().ALLOW_MERGE_BUTTONS) { final List<Action> actionList = new ArrayList<Action>(); for (Action action : actions) { actionList.add(action); if (action instanceof OptionAction) { final Action[] options = ((OptionAction)action).getOptions(); actionList.addAll(Arrays.asList(options)); } } if (actionList.size() != actions.length) { actions = actionList.toArray(actionList.toArray(new Action[actionList.size()])); } } JPanel buttonsPanel = new JPanel(new GridLayout(1, actions.length, SystemInfo.isMacOSLeopard ? 0 : 5, 0)); for (final Action action : actions) { JButton button = createJButtonForAction(action); final Object value = action.getValue(Action.MNEMONIC_KEY); if (value instanceof Integer) { final int mnemonic = ((Integer)value).intValue(); final Object name = action.getValue(Action.NAME); if (mnemonic == 'Y' && "Yes".equals(name)) { myYesAction = action; } else if (mnemonic == 'N' && "No".equals(name)) { myNoAction = action; } button.setMnemonic(mnemonic); } if (action.getValue(FOCUSED_ACTION) != null) { myPreferredFocusedComponent = button; } buttons.add(button); buttonsPanel.add(button); } return buttonsPanel; } /** * Creates <code>JButton</code> for the specified action. If the button has not <code>null</code> * value for <code>DialogWrapper.DEFAULT_ACTION</code> key then the created button will be the * default one for the dialog. * * @param action action for the button * @return button with action specified * * @see com.intellij.openapi.ui.DialogWrapper#DEFAULT_ACTION */ protected JButton createJButtonForAction(Action action) { JButton button; if (action instanceof OptionAction && UISettings.getShadowInstance().ALLOW_MERGE_BUTTONS) { final Action[] options = ((OptionAction)action).getOptions(); button = new JBOptionButton(action, options); final JBOptionButton eachOptionsButton = (JBOptionButton)button; eachOptionsButton.setOkToProcessDefaultMnemonics(false); eachOptionsButton.setOptionTooltipText( "Press " + KeymapUtil.getKeystrokeText(SHOW_OPTION_KEYSTROKE) + " to expand or use a mnemonic of a contained action"); myOptionsButtons.add(eachOptionsButton); final Set<JBOptionButton.OptionInfo> infos = eachOptionsButton.getOptionInfos(); for (final JBOptionButton.OptionInfo eachInfo : infos) { if (eachInfo.getMnemonic() >=0) { final CustomShortcutSet sc = new CustomShortcutSet(KeyStroke.getKeyStroke("alt pressed " + Character.valueOf((char)eachInfo.getMnemonic()))); new AnAction() { @Override public void actionPerformed(AnActionEvent e) { final JBOptionButton buttonToActivate = eachInfo.getButton(); buttonToActivate.showPopup(eachInfo.getAction(), true); } }.registerCustomShortcutSet(sc, getPeer().getRootPane()); } } } else { button = new JButton(action); } String text = button.getText(); if (SystemInfo.isMac) { button.putClientProperty("JButton.buttonType", "text"); } if (text != null) { int mnemonic = 0; StringBuilder plainText = new StringBuilder(); for (int i = 0; i < text.length(); i++) { char ch = text.charAt(i); if (ch == '_' || ch == '&') { i++; if (i >= text.length()) { break; } ch = text.charAt(i); if (ch != '_' && ch != '&') { // Mnemonic is case insensitive. int vk = ch; if (vk >= 'a' && vk <= 'z') { vk -= 'a' - 'A'; } mnemonic = vk; } } plainText.append(ch); } button.setText(plainText.toString()); final Object name = action.getValue(Action.NAME); if (mnemonic == KeyEvent.VK_Y && "Yes".equals(name)) { myYesAction = action; } else if (mnemonic == KeyEvent.VK_N && "No".equals(name)) { myNoAction = action; } button.setMnemonic(mnemonic); } setMargin(button); if (action.getValue(DEFAULT_ACTION) != null) { if (myPeer != null && !myPeer.isHeadless()) { getRootPane().setDefaultButton(button); } } return button; } private void setMargin(JButton button) { // Aqua LnF does a good job of setting proper margin between buttons. Setting them specifically causes them be 'square' style instead of // 'rounded', which is expected by apple users. if (!SystemInfo.isMac) { if (myButtonMargins == null) { return; } button.setMargin(myButtonMargins); } } protected DialogWrapperPeer createPeer(final Component parent, final boolean canBeParent) { return DialogWrapperPeerFactory.getInstance().createPeer(this, parent, canBeParent); } protected DialogWrapperPeer createPeer(boolean canBeParent, boolean toolkitModalIfPossible) { return DialogWrapperPeerFactory.getInstance().createPeer(this, canBeParent, toolkitModalIfPossible); } protected DialogWrapperPeer createPeer(final Project project, final boolean canBeParent) { return DialogWrapperPeerFactory.getInstance().createPeer(this, project, canBeParent); } @Nullable protected JComponent createTitlePane() { return null; } /** * Factory method. It creates the panel located at the * north of the dialog's content pane. The implementation can return <code>null</code> * value. In this case there will be no input panel. * * @return north panel */ @Nullable protected JComponent createNorthPanel() { return null; } /** * Factory method. It creates panel with dialog options. Options panel is located at the * center of the dialog's content pane. The implementation can return <code>null</code> * value. In this case there will be no options panel. * * @return center panel */ @Nullable protected abstract JComponent createCenterPanel(); /** * @see java.awt.Window#toFront() */ public void toFront() { myPeer.toFront(); } /** * @see java.awt.Window#toBack() */ public void toBack() { myPeer.toBack(); } /** * Dispose the wrapped and releases all resources allocated be the wrapper to help * more effecient garbage collection. You should never invoke this method twice or * invoke any method of the wrapper after invocation of <code>dispose</code>. * * @throws IllegalStateException if the dialog is disposed not on the event dispatch thread */ protected void dispose() { ensureEventDispatchThread(); myErrorTextAlarm.cancelAllRequests(); myValidationAlarm.cancelAllRequests(); myDisposed = true; if (myButtons != null) { for (JButton button : myButtons) { button.setAction(null); // avoid memory leak via KeyboardManager } } final JRootPane rootPane = getRootPane(); // if rootPane = null, dialog has already been disposed if (rootPane != null) { unregisterKeyboardActions(rootPane); if (myActualSize != null) { setSize(myActualSize.width, myActualSize.height); } myPeer.dispose(); } } public static void unregisterKeyboardActions(final JRootPane rootPane) { new AwtVisitor(rootPane) { public boolean visit(final Component component) { if (component instanceof JComponent) { final JComponent eachComp = (JComponent)component; final ActionMap actionMap = eachComp.getActionMap(); final KeyStroke[] strokes = eachComp.getRegisteredKeyStrokes(); for (KeyStroke eachStroke : strokes) { boolean remove = true; if (actionMap != null) { for (int i = 0; i < 3; i++) { final InputMap inputMap = eachComp.getInputMap(i); final Object key = inputMap.get(eachStroke); if (key != null) { final Action action = actionMap.get(key); if (action instanceof UIResource) remove = false; } } } if (remove) eachComp.unregisterKeyboardAction(eachStroke); } } return false; } }; } /** * This method is invoked by default implementation of "Cancel" action. It just closes dialog * with <code>CANCEL_EXIT_CODE</code>. This is convenient place to override functionality of "Cancel" action. * Note that the method does nothing if "Cancel" action isn't enabled. */ public void doCancelAction() { processDoNotAskOnCancel(); if (getCancelAction().isEnabled()) { close(CANCEL_EXIT_CODE); } } private void processDoNotAskOnCancel() { if (myDoNotAsk != null) { if (myDoNotAsk.shouldSaveOptionsOnCancel() && myDoNotAsk.canBeHidden()) { myDoNotAsk.setToBeShown(toBeShown(), CANCEL_EXIT_CODE); } } } /** * You can use this method if you want to know by which event this actions got triggered. It is called only if * the cancel action was triggered by some input event, <code>doCancelAction</code> is called otherwise. * * @param source AWT event * @see #doCancelAction */ public void doCancelAction(AWTEvent source) { doCancelAction(); } /** * Programmatically perform a "click" of default dialog's button. The method does * nothing if the dialog has no default button. */ public void clickDefaultButton() { JButton button = getRootPane().getDefaultButton(); if (button != null) { button.doClick(); } } /** * This method is invoked by default implementation of "OK" action. It just closes dialog * with <code>OK_EXIT_CODE</code>. This is convenient place to override functionality of "OK" action. * Note that the method does nothing if "OK" action isn't enabled. */ protected void doOKAction() { processDoNotAskOnOk(OK_EXIT_CODE); if (getOKAction().isEnabled()) { close(OK_EXIT_CODE); } } protected void processDoNotAskOnOk(int exitCode) { if (myDoNotAsk != null) { if (myDoNotAsk.canBeHidden()) { myDoNotAsk.setToBeShown(toBeShown(), exitCode); } } } /** * @return whether the native window cross button closes the window or not. * <code>true</code> means that cross performs hide or dispose of the dialog. */ public boolean shouldCloseOnCross() { return myCrossClosesWindow; } /** * This is factory method which creates action of dialog. Each action is represented * by <code>JButton</code> which is created by <code>createJButtonForAction(Action)</code> * method. These buttons are places into panel which is created by <code>createButtonsPanel</code> * method. Therefore you have enough ways to customise the dialog by overriding of * <code>createActions()</code>, <code>createButtonsPanel()</code> and * </code>createJButtonForAction(Action)</code> methods. By default the <code>createActions()</code> * method returns "OK" and "Cancel" action. The help action is automatically added is if * {@link #getHelpId()} returns non null value. * * @return dialog actions * * @see #createSouthPanel * @see #createJButtonForAction */ protected Action[] createActions() { if (getHelpId() == null) { if (SystemInfo.isMac) { return new Action[]{getCancelAction(), getOKAction()}; } return new Action[]{getOKAction(), getCancelAction()}; } else { if (SystemInfo.isMac) { return new Action[]{getHelpAction(), getCancelAction(), getOKAction()}; } return new Action[]{getOKAction(), getCancelAction(), getHelpAction()}; } } protected Action[] createLeftSideActions() { return new Action[0]; } /** * @return default implementation of "OK" action. This action just invokes * <code>doOKAction()</code> method. * @see #doOKAction */ protected Action getOKAction() { return myOKAction; } /** * @return default implementation of "Cancel" action. This action just invokes * <code>doCancelAction()</code> method. * @see #doCancelAction */ protected Action getCancelAction() { return myCancelAction; } /** * @return default implementation of "Help" action. This action just invokes * <code>doHelpAction()</code> method. * @see #doHelpAction */ protected Action getHelpAction() { return myHelpAction; } protected boolean isProgressDialog() { return false; } public final boolean isModalProgress() { return isProgressDialog(); } /** * Returns content pane * * @return content pane * @see javax.swing.JDialog#getContentPane */ public Container getContentPane() { assert myPeer != null; return myPeer.getContentPane(); } /** * @see javax.swing.JDialog#validate */ public void validate() { myPeer.validate(); } /** * @see javax.swing.JDialog#repaint */ public void repaint() { myPeer.repaint(); } /** * This is factory method. It returns key for installation into the dimension service. * If this method returns <code>null</code> then the component does not require installation * into dimension service. This default implementation returns <code>null</code>. * * @return dimension service key */ @Nullable @NonNls protected String getDimensionServiceKey() { return null; } @Nullable public final String getDimensionKey() { return getDimensionServiceKey(); } public int getExitCode() { return myExitCode; } /** * @return component which should be focused when the dialog appears * on the screen. */ @Nullable public JComponent getPreferredFocusedComponent() { return SystemInfo.isMac ? myPreferredFocusedComponent : null; } /** * @return horizontal stretch of the dialog. It means that the dialog's horizontal size is * the product of horizontal stretch by horizontal size of packed dialog. The default value * is <code>1.0f</code> */ public final float getHorizontalStretch() { return myHorizontalStretch; } /** * @return vertical stretch of the dialog. It means that the dialog's vertical size is * the product of vertical stretch by vertical size of packed dialog. The default value * is <code>1.0f</code> */ public final float getVerticalStretch() { return myVerticalStretch; } protected final void setHorizontalStretch(float hStretch) { myHorizontalStretch = hStretch; } protected final void setVerticalStretch(float vStretch) { myVerticalStretch = vStretch; } /** * @see java.awt.Window#getOwner * @return window owner */ public Window getOwner() { return myPeer.getOwner(); } public Window getWindow() { return myPeer.getWindow(); } /** * @see javax.swing.JDialog#getRootPane * @return root pane */ public JRootPane getRootPane() { return myPeer.getRootPane(); } /** * @see java.awt.Window#getSize * @return dialog size */ public Dimension getSize() { return myPeer.getSize(); } /** * @see java.awt.Dialog#getTitle * @return dialog title */ public String getTitle() { return myPeer.getTitle(); } protected void init() { myErrorText = new ErrorText(); myErrorText.setVisible(false); final JPanel root = new JPanel(new BorderLayout()); //{ // @Override // public void paint(Graphics g) { // if (ApplicationManager.getApplication() != null) { // UISettings.setupAntialiasing(g); // } // super.paint(g); // } //}; myPeer.setContentPane(root); final CustomShortcutSet sc = new CustomShortcutSet(SHOW_OPTION_KEYSTROKE); final AnAction toggleShowOptions = new AnAction() { @Override public void actionPerformed(AnActionEvent e) { expandNextOptionButton(); } }; toggleShowOptions.registerCustomShortcutSet(sc, root); final JPanel northSection = new JPanel(new BorderLayout()); root.add(northSection, BorderLayout.NORTH); JComponent titlePane = createTitlePane(); if (titlePane != null) { northSection.add(titlePane, BorderLayout.CENTER); } JComponent centerSection = new JPanel(new BorderLayout()); root.add(centerSection, BorderLayout.CENTER); root.setBorder(createContentPaneBorder()); final JComponent n = createNorthPanel(); if (n != null) { centerSection.add(n, BorderLayout.NORTH); } final JComponent c = createCenterPanel(); if (c != null) { centerSection.add(c, BorderLayout.CENTER); myErrorPane = c; } if (myErrorPane == null) { myErrorPane = root; } final JPanel southSection = new JPanel(new BorderLayout()); root.add(southSection, BorderLayout.SOUTH); southSection.add(myErrorText, BorderLayout.CENTER); final JComponent south = createSouthPanel(); if (south != null) { southSection.add(south, BorderLayout.SOUTH); } new MnemonicHelper().register(root); if (!postponeValidation()) { startTrackingValidation(); } if (SystemInfo.isWindows) { installEnterHook(root); } } private static void installEnterHook(JComponent root) { new AnAction() { @Override public void actionPerformed(AnActionEvent e) { final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); if (owner instanceof JButton && owner.isEnabled()) { ((JButton)owner).doClick(); } } @Override public void update(AnActionEvent e) { final Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); e.getPresentation().setEnabled((owner instanceof JButton && owner.isEnabled())); } }.registerCustomShortcutSet(CustomShortcutSet.fromString("ENTER"), root); } private void expandNextOptionButton() { if (myCurrentOptionsButtonIndex > 0) { myOptionsButtons.get(myCurrentOptionsButtonIndex).closePopup(); myCurrentOptionsButtonIndex++; } else if (myOptionsButtons.size() > 0) { myCurrentOptionsButtonIndex = 0; } if (myCurrentOptionsButtonIndex >= 0 && myCurrentOptionsButtonIndex < myOptionsButtons.size()) { myOptionsButtons.get(myCurrentOptionsButtonIndex).showPopup(null, true); } } void startTrackingValidation() { SwingUtilities.invokeLater(new Runnable() { public void run() { if (!myValidationStarted && !myDisposed) { myValidationStarted = true; initValidation(); } } }); } protected final void initValidation() { myValidationAlarm.cancelAllRequests(); final Runnable validateRequest = new Runnable() { public void run() { if (myDisposed) return; final ValidationInfo result = doValidate(); if (result == null) { clearProblems(); } else { reportProblem(result); } if (!myDisposed) { initValidation(); } } }; if (getValidationThreadToUse() == Alarm.ThreadToUse.SWING_THREAD) { myValidationAlarm.addRequest(validateRequest, myValidationDelay, ModalityState.current()); } else { myValidationAlarm.addRequest(validateRequest, myValidationDelay); } } protected boolean isNorthStrictedToPreferredSize() { return true; } protected boolean isCenterStrictedToPreferredSize() { return false; } protected boolean isSouthStrictedToPreferredSize() { return true; } protected JComponent createContentPane() { return new JPanel(); } /** * @see java.awt.Window#pack */ public void pack() { myPeer.pack(); } public Dimension getPreferredSize() { return myPeer.getPreferredSize(); } /** * Sets horizontal alignment of dialog's the buttons. * * @param alignment alignment of the buttons. Acceptable values are * <code>SwingConstants.CENTER</code> and <code>SwingConstants.RIGHT</code>. * The <code>SwingConstants.RIGHT</code> is the default value. * @throws java.lang.IllegalArgumentException * if <code>alignment</code> isn't acceptable */ protected final void setButtonsAlignment(@MagicConstant(intValues = {SwingConstants.CENTER, SwingConstants.RIGHT}) int alignment) { if (SwingConstants.CENTER != alignment && SwingConstants.RIGHT != alignment) { throw new IllegalArgumentException("unknown alignment: " + alignment); } myButtonAlignment = alignment; } /** * Sets margin for command buttons ("OK", "Cancel", "Help"). * @param insets buttons margin */ public final void setButtonsMargin(@Nullable Insets insets) { myButtonMargins = insets; } public final void setCrossClosesWindow(boolean crossClosesWindow) { myCrossClosesWindow = crossClosesWindow; } protected final void setCancelButtonIcon(Icon icon) { // Setting icons causes buttons be 'square' style instead of // 'rounded', which is expected by apple users. if (!SystemInfo.isMac) { myCancelAction.putValue(Action.SMALL_ICON, icon); } } protected final void setCancelButtonText(String text) { myCancelAction.putValue(Action.NAME, text); } public void setModal(boolean modal) { myPeer.setModal(modal); } public boolean isModal() { return myPeer.isModal(); } protected void setOKActionEnabled(boolean isEnabled) { myOKAction.setEnabled(isEnabled); } protected final void setOKButtonIcon(Icon icon) { // Setting icons causes buttons be 'square' style instead of // 'rounded', which is expected by apple users. if (!SystemInfo.isMac) { myOKAction.putValue(Action.SMALL_ICON, icon); } } protected final void setOKButtonText(String text) { myOKAction.putValue(Action.NAME, text); } protected final void setOKButtonMnemonic(int c) { myOKAction.putValue(Action.MNEMONIC_KEY, c); } /** * @return the help identifier or null if no help is available. */ @Nullable protected String getHelpId() { return null; } /** * This method is invoked by default implementation of "Help" action. * This is convenient place to override functionality of "Help" action. * Note that the method does nothing if "Help" action isn't enabled. * <p/> * The default implementation shows the help page with id returned * by the method {@link #getHelpId()}. If that method returns null, * the message box with message "no help available" is shown. */ protected void doHelpAction() { if (myHelpAction.isEnabled()) { String helpId = getHelpId(); if (helpId != null) { HelpManager.getInstance().invokeHelp(helpId); } else { Messages.showMessageDialog(getContentPane(), UIBundle.message("there.is.no.help.for.this.dialog.error.message"), UIBundle.message("no.help.available.dialog.title"), Messages.getInformationIcon()); } } } public boolean isOK() { return getExitCode() == OK_EXIT_CODE; } public boolean isOKActionEnabled() { return myOKAction.isEnabled(); } /** * @see java.awt.Component#isVisible * @return <code>true</code> if and only if visible */ public boolean isVisible() { return myPeer.isVisible(); } /** * @see java.awt.Window#isShowing * @return <code>true</code> if and only if showing */ public boolean isShowing() { return myPeer.isShowing(); } /** * @param width width * @param height height * @see javax.swing.JDialog#setSize */ public void setSize(int width, int height) { myPeer.setSize(width, height); } /** * @param title title * @see javax.swing.JDialog#setTitle */ public void setTitle(String title) { myPeer.setTitle(title); } /** * @see javax.swing.JDialog#isResizable */ public void isResizable() { myPeer.isResizable(); } /** * @param resizable is resizable * @see javax.swing.JDialog#setResizable */ public void setResizable(boolean resizable) { myPeer.setResizable(resizable); } /** * @see javax.swing.JDialog#getLocation * @return dialog location */ public Point getLocation() { return myPeer.getLocation(); } /** * @param p new dialog location * @see javax.swing.JDialog#setLocation(Point) */ public void setLocation(Point p) { myPeer.setLocation(p); } /** * @param x x * @param y y * @see javax.swing.JDialog#setLocation(int,int) */ public void setLocation(int x, int y) { myPeer.setLocation(x, y); } public void centerRelativeToParent() { myPeer.centerInParent(); } /** * Show the dialog * * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ public void show() { showAndGetOk(); } public AsyncResult<Boolean> showAndGetOk() { final AsyncResult<Boolean> result = new AsyncResult<Boolean>(); ensureEventDispatchThread(); registerKeyboardShortcuts(); final Disposable uiParent = Disposer.get("ui"); if (uiParent != null) { // may be null if no app yet (license agreement) Disposer.register(uiParent, myDisposable); // ensure everything is disposed on app quit } myPeer.show().doWhenProcessed(new Runnable() { public void run() { result.setDone(isOK()); } }); return result; } /** * @return Location in absolute coordinates which is used when dialog has no dimension service key or no position was stored yet. * Can return null. In that case dialog will be centered relative to its owner. */ @Nullable public Point getInitialLocation() { return myInitialLocationCallback == null ? null : myInitialLocationCallback.compute(); } public void setInitialLocationCallback(Computable<Point> callback) { myInitialLocationCallback = callback; } private void registerKeyboardShortcuts() { ActionListener cancelKeyboardAction = new ActionListener() { public void actionPerformed(ActionEvent e) { MenuSelectionManager menuSelectionManager = MenuSelectionManager.defaultManager(); MenuElement[] selectedPath = menuSelectionManager.getSelectedPath(); if (selectedPath.length > 0) { // hide popup menu if any menuSelectionManager.clearSelectedPath(); } else { final StackingPopupDispatcher popupDispatcher = StackingPopupDispatcher.getInstance(); if (ApplicationManager.getApplication() == null || (popupDispatcher != null && !popupDispatcher.isPopupFocused())) { doCancelAction(e); } } } }; final JRootPane rootPane = getRootPane(); if (rootPane == null) return; rootPane.registerKeyboardAction(cancelKeyboardAction, KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); registerForEveryKeyboardShortcut(cancelKeyboardAction, CommonShortcuts.getCloseActiveWindow()); if (ApplicationInfo.contextHelpAvailable()) { ActionListener helpAction = new ActionListener() { public void actionPerformed(ActionEvent e) { doHelpAction(); } }; registerForEveryKeyboardShortcut(helpAction, CommonShortcuts.getContextHelp()); rootPane.registerKeyboardAction(helpAction, KeyStroke.getKeyStroke(KeyEvent.VK_HELP, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); } if (myButtons != null) { rootPane.registerKeyboardAction(new AbstractAction() { public void actionPerformed(ActionEvent e) { focusPreviousButton(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_LEFT, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); rootPane.registerKeyboardAction(new AbstractAction() { public void actionPerformed(ActionEvent e) { focusNextButton(); } }, KeyStroke.getKeyStroke(KeyEvent.VK_RIGHT, 0), JComponent.WHEN_ANCESTOR_OF_FOCUSED_COMPONENT); } if (myYesAction != null) { rootPane.registerKeyboardAction(myYesAction, KeyStroke.getKeyStroke(KeyEvent.VK_Y, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); } if (myNoAction != null) { rootPane.registerKeyboardAction(myNoAction, KeyStroke.getKeyStroke(KeyEvent.VK_N, 0), JComponent.WHEN_IN_FOCUSED_WINDOW); } } private void registerForEveryKeyboardShortcut(ActionListener action, ShortcutSet shortcuts) { for (Shortcut shortcut : shortcuts.getShortcuts()){ if (shortcut instanceof KeyboardShortcut) { KeyboardShortcut ks = (KeyboardShortcut)shortcut; KeyStroke first = ks.getFirstKeyStroke(); KeyStroke second = ks.getSecondKeyStroke(); if (second == null) { getRootPane().registerKeyboardAction(action, first, JComponent.WHEN_IN_FOCUSED_WINDOW); } } } } private void focusPreviousButton() { for (int i = 0; i < myButtons.length; i++) { if (myButtons[i].hasFocus()) { if (i == 0) { myButtons[myButtons.length - 1].requestFocus(); return; } myButtons[i - 1].requestFocus(); return; } } } private void focusNextButton() { for (int i = 0; i < myButtons.length; i++) { if (myButtons[i].hasFocus()) { if (i == myButtons.length - 1) { myButtons[0].requestFocus(); return; } myButtons[i + 1].requestFocus(); return; } } } public long getTypeAheadTimeoutMs() { return 0l; } public boolean isToDispatchTypeAhead() { return isOK(); } public static boolean isMultipleModalDialogs() { final Component c = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner(); if (c != null) { final DialogWrapper wrapper = findInstance(c); return wrapper != null && wrapper.getPeer().getCurrentModalEntities().length > 1; } return false; } /** * Base class for dialog wrapper actions that need to ensure that only * one action for the dialog is running. */ protected abstract class DialogWrapperAction extends AbstractAction { /** * The constructor * * @param name the action name (see {@link Action#NAME}) */ protected DialogWrapperAction(String name) { putValue(NAME, name); } /** * {@inheritDoc} */ public void actionPerformed(ActionEvent e) { if (myClosed) return; if (myPerformAction) return; try { myPerformAction = true; doAction(e); } finally { myPerformAction = false; } } /** * Do actual work for the action. This method is called only if no other action * is performed in parallel (checked using {@link com.intellij.openapi.ui.DialogWrapper#myPerformAction}), * and dialog is active (checked using {@link com.intellij.openapi.ui.DialogWrapper#myClosed}) * * @param e action */ protected abstract void doAction(ActionEvent e); } protected class OkAction extends DialogWrapperAction { protected OkAction() { super(CommonBundle.getOkButtonText()); putValue(DEFAULT_ACTION, Boolean.TRUE); } @Override protected void doAction(ActionEvent e) { ValidationInfo info = doValidate(); if (info != null) { if (info.component != null && info.component.isVisible()) { IdeFocusManager.getInstance(null).requestFocus(info.component, true); } startTrackingValidation(); return; } doOKAction(); } } protected class CancelAction extends DialogWrapperAction { private CancelAction() { super(CommonBundle.getCancelButtonText()); } @Override protected void doAction(ActionEvent e) { doCancelAction(); } } /** * The action that just closes dialog with the specified exit code * (like the default behavior of the actions "Ok" and "Cancel"). */ protected class DialogWrapperExitAction extends DialogWrapperAction { /** * The exit code for the action */ protected final int myExitCode; /** * The constructor * * @param name the action name * @param exitCode the exit code for dialog */ public DialogWrapperExitAction(String name, int exitCode) { super(name); myExitCode = exitCode; } @Override protected void doAction(ActionEvent e) { if (isEnabled()) { close(myExitCode); } } } private class HelpAction extends AbstractAction { private HelpAction() { putValue(NAME, CommonBundle.getHelpButtonText()); } public void actionPerformed(ActionEvent e) { doHelpAction(); } } private Dimension myActualSize = null; private String myLastErrorText = null; protected final void setErrorText(@Nullable final String text) { if (Comparing.equal(myLastErrorText, text)) { return; } myLastErrorText = text; if (myActualSize == null && !StringUtil.isEmpty(text)) { myActualSize = getSize(); } myErrorTextAlarm.cancelAllRequests(); myErrorTextAlarm.addRequest(new Runnable() { public void run() { final String text = myLastErrorText; myErrorText.setError(text); if (text != null && text.length() > myMaxErrorTextLength) { // during the first update, resize only for growing. during a subsequent update, // if error text becomes longer, the min size calculation may not calculate enough size, // so we pack() even though it could cause the dialog to become smaller. if (myMaxErrorTextLength == 0) { updateHeightForErrorText(); } else { //if (getRootPane() != null) myPeer.pack(); } myMaxErrorTextLength = text.length(); updateHeightForErrorText(); } myErrorText.repaint(); if (StringUtil.isEmpty(text) && myActualSize != null) { resizeWithAnimation(myActualSize); myMaxErrorTextLength = 0; } } }, 300, null); } @Nullable public static DialogWrapper findInstance(Component c) { while (c != null){ if (c instanceof DialogWrapperDialog) { return ((DialogWrapperDialog)c).getDialogWrapper(); } c = c.getParent(); } return null; } private void resizeWithAnimation(final Dimension size) { //todo[kb]: fix this PITA myResizeInProgress = true; if (!Registry.is("enable.animation.on.dialogs")) { setSize(size.width, size.height); myResizeInProgress = false; return; } new Thread("DialogWrapper resizer") { int time = 200; int steps = 7; @Override public void run() { int step = 0; final Dimension cur = getSize(); int h = (size.height - cur.height) / steps; int w = (size.width - cur.width) / steps; while (step++ < steps) { setSize(cur.width + w * step, cur.height + h*step); try { //noinspection BusyWait sleep(time / steps); } catch (InterruptedException ignore) {} } setSize(size.width, size.height); //repaint(); if (myErrorText.shouldBeVisible()) { myErrorText.setVisible(true); } myResizeInProgress = false; } }.start(); } private void updateHeightForErrorText() { Dimension errorSize = myErrorText.getPreferredSize(); resizeWithAnimation(new Dimension(Math.max(myActualSize.width, errorSize.width + 40), myActualSize.height + errorSize.height + 10)); } private static class ErrorText extends JPanel { private final JLabel myLabel = new JLabel(); private Dimension myPrefSize; private String myText; private ErrorText() { setLayout(new BorderLayout()); JBScrollPane pane = new JBScrollPane(myLabel, ScrollPaneConstants.VERTICAL_SCROLLBAR_NEVER, ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); pane.setBorder(IdeBorderFactory.createEmptyBorder(0)); pane.setBackground(null); pane.getViewport().setBackground(null); pane.setOpaque(false); add(pane, BorderLayout.CENTER); } public void setError(String text) { final Dimension oldSize = getPreferredSize(); myText = text; if (text == null) { myLabel.setText(""); myLabel.setIcon(null); setVisible(false); setBorder(null); } else { myLabel.setText("<html><body><font color=red><left>" + text + "</left></b></font></body></html>"); myLabel.setIcon(AllIcons.Actions.Lightning); myLabel.setBorder(new EmptyBorder(4, 10, 0, 2)); setVisible(true); } final Dimension size = getPreferredSize(); if (oldSize.height < size.height) { revalidate(); } } public boolean shouldBeVisible() { return !StringUtil.isEmpty(myText); } public boolean isTextSet(@Nullable String text) { return StringUtil.equals(text, myText); } public Dimension getPreferredSize() { return myPrefSize == null ? myLabel.getPreferredSize() : myPrefSize; } } public final DialogWrapperPeer getPeer() { return myPeer; } /** * Ensure that dialog is used from even dispatch thread. * * @throws IllegalStateException if the dialog is invoked not on the event dispatch thread */ private static void ensureEventDispatchThread() { if (!EventQueue.isDispatchThread()) { throw new IllegalStateException("The DialogWrapper can be used only on event dispatch thread."); } } public final Disposable getDisposable() { return myDisposable; } public interface DoNotAskOption { boolean isToBeShown(); void setToBeShown(boolean value, int exitCode); /** * Should be 'true' for checkbox to be visible. */ boolean canBeHidden(); boolean shouldSaveOptionsOnCancel(); String getDoNotShowMessage(); } private ErrorPaintingType getErrorPaintingType() { return ErrorPaintingType.SIGN; } private class ErrorPainter extends AbstractPainter { private ValidationInfo myInfo; @Override public void executePaint(Component component, Graphics2D g) { if (myInfo != null && myInfo.component != null) { final JComponent comp = myInfo.component; final int w = comp.getWidth(); final int h = comp.getHeight(); Point p; switch (getErrorPaintingType()) { case DOT: p = SwingUtilities.convertPoint(comp, 2, h/2 , component); AllIcons.Ide.ErrorPoint.paintIcon(component, g, p.x, p.y); break; case SIGN: p = SwingUtilities.convertPoint(comp, w, 0, component); AllIcons.Ide.ErrorSign.paintIcon(component, g, p.x - 8, p.y - 8); break; case LINE: p = SwingUtilities.convertPoint(comp, 0, h, component); final GraphicsConfig config = new GraphicsConfig(g); g.setColor(new Color(255, 0, 0 , 100)); g.fillRoundRect(p.x, p.y-2, w, 4, 2, 2); config.restore(); break; } } } @Override public boolean needsRepaint() { return true; } public void setValidationInfo(@Nullable ValidationInfo info) { myInfo = info; } } private static enum ErrorPaintingType {DOT, SIGN, LINE} }
do not use showAndGet() for modal dialogs
platform/platform-api/src/com/intellij/openapi/ui/DialogWrapper.java
do not use showAndGet() for modal dialogs
<ide><path>latform/platform-api/src/com/intellij/openapi/ui/DialogWrapper.java <ide> showAndGetOk(); <ide> } <ide> <add> public boolean showAndGet() { <add> show(); <add> return isOK(); <add> } <add> <add> /** <add> * You need this method ONLY for NON-MODAL dialogs. Otherwise, use {@link #show()} or {@link #showAndGet()}. <add> * @return result callback <add> */ <ide> public AsyncResult<Boolean> showAndGetOk() { <ide> final AsyncResult<Boolean> result = new AsyncResult<Boolean>(); <ide>
Java
apache-2.0
88ba04e909de442c4ed04a9ace8accf3ac2e839f
0
fengjx/ttwx,fengjx/ttwx,fengjx/ttwx,fengjx/ttwx
package com.fengjx.ttwx.common.plugin.db; import com.fengjx.ttwx.common.utils.CommonUtils; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.BatchPreparedStatementSetter; import org.springframework.jdbc.core.JdbcTemplate; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * @author fengjx. * @date:2015/5/8 0008 */ public abstract class Model { // 注入jdbcTemplate @Autowired private JdbcTemplate jdbcTemplate; public boolean insert(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); String pk = table.getPrimaryKey(); if (StringUtils.isBlank((String) attrs.get(pk))) { attrs.put(pk, CommonUtils.getPrimaryKey()); } StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelSave(table, attrs, sql, paras); return jdbcTemplate.update(sql.toString(), paras.toArray()) >= 1; } public boolean insert(Map<String, Object> attrs) { return insert(getClass(), attrs); } /** * Delete model by id. * * @param id the id value of the model * @return true if delete succeed otherwise false */ public boolean deleteById(Object id) { if (id == null) { throw new IllegalArgumentException("id can not be null"); } return deleteById(this.getClass(), id); } public boolean deleteById(Class<? extends Model> cls, Object id) { Table table = TableUtil.getTable(cls); String sql = Config.dialect.forModelDeleteById(table); return jdbcTemplate.update(sql, id) >= 1; } /** * Update model. */ public boolean update(Map<String, Object> attrs) { return update(getClass(), attrs); } /** * Update model. */ public boolean update(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); String pKey = table.getPrimaryKey(); Object id = attrs.get(pKey); if (id == null) { throw new MyDbException("You can't update model without Primary Key."); } StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelUpdate(table, attrs, pKey, id, sql, paras); if (paras.size() <= 1) { // Needn't update return false; } int result = jdbcTemplate.update(sql.toString(), paras.toArray()); return result >= 1; } public Record findById(Object id) { return findById(id, "*"); } /** * Find model by id. Fetch the specific columns only. Example: User user = * findById(15, "name, age"); * * @param id the id value of the model * @param columns the specific columns separate with comma character ==> "," */ public Record findById(Object id, String columns) { return findById(this.getClass(), id, columns); } public Record findById(Class<? extends Model> cls, Object id, String columns) { Table table = TableUtil.getTable(cls); String sql = Config.dialect.forModelFindById(table, columns); return findOne(sql, id); } /** * 查询单条记录 * * @param attrs 查询条件及参数 * @return */ public Record findOne(Map<String, Object> attrs) { return findOne(this.getClass(), attrs); } /** * 根据Model查询单条记录 * * @param cls 映射的class * @param attrs 查询条件及参数 * @return */ public Record findOne(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelFind(table, sql, "*", null, attrs, paras); return findOne(sql.toString(), paras.toArray()); } /** * 根据sql查询单条记录 * * @param sql * @param params * @return */ public Record findOne(String sql, Object... params) { List<Map<String, Object>> list = jdbcTemplate.queryForList(sql, params); if (CollectionUtils.isEmpty(list)) { return new Record(); } else if (list.size() > 1) { throw new MyDbException("Incorrect result size: expected 1, actual " + list.size()); } Map<String, Object> map = list.get(0); return new Record(map); } /** * 查询多条记录 * * @param attrs 查询条件及参数 * @return */ public List<Map<String, Object>> findList(Map<String, Object> attrs) { return findList(this.getClass(), attrs); } /** * 根据Model查询多条记录 * * @param cls 映射的class * @param attrs 查询条件及参数 * @return */ public List<Map<String, Object>> findList(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelFind(table, sql, "*", null, attrs, paras); return findList(sql.toString(), paras.toArray()); } /** * 根据sql查询多条记录 * * @param sql * @param params * @return */ public List<Map<String, Object>> findList(String sql, Object... params) { return jdbcTemplate.queryForList(sql, params); } /** * 单表查询,查询当前Model关联的表,此查询依赖PageContext * * @param attrs * @return */ public Page<Map<String, Object>> paginate(Map<String, Object> attrs) { return paginate(attrs, null); } public Page<Map<String, Object>> paginate(Map<String, Object> attrs, String orderby) { return paginate(this.getClass(), attrs, orderby); } /** * 单表查询,此查询依赖PageContext * * @param cls * @param attrs * @return */ public Page<Map<String, Object>> paginate(Class<? extends Model> cls, Map<String, Object> attrs) { return paginate(cls, attrs, null); } /** * paginate(User.class,attrs,"order by in_time") * * @param cls * @param attrs * @param orderby * @return */ public Page<Map<String, Object>> paginate(Class<? extends Model> cls, Map<String, Object> attrs, String orderby) { Table table = TableUtil.getTable(cls); StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelFind(table, sql, "*", orderby, attrs, paras); return paginate(PageContext.getPageNumber(), PageContext.getPageSize(), sql.toString(), paras.toArray()); } /** * 分页查询,此查询依赖PageContext * * @param sql * @param paras * @return */ public Page<Map<String, Object>> paginate(String sql, Object... paras) { return paginate(PageContext.getPageNumber(), PageContext.getPageSize(), sql, paras); } /** * 分页查询 * * @param pageNumber * @param pageSize * @param sql * @param paras * @return */ public Page<Map<String, Object>> paginate(int pageNumber, int pageSize, String sql, Object... paras) { if (pageNumber < 1 || pageSize < 1) { throw new MyDbException("pageNumber and pageSize must be more than 0"); } int totalRow = 0; int totalPage = 0; totalRow = getCount(sql, paras); if (totalRow < 1) { return new Page(new ArrayList<Map<String, Object>>(0), pageNumber, pageSize, 0, 0); } totalPage = totalRow / pageSize; if (totalRow % pageSize != 0) { totalPage++; } StringBuilder pageSql = new StringBuilder(); Config.dialect.forPaginate(pageSql, pageNumber, pageSize, sql); List<Map<String, Object>> list = findList(pageSql.toString(), paras); return new Page(list, pageNumber, pageSize, totalPage, totalRow); } /** * 获得总记录数 * * @param sql * @param paras * @return */ public int getCount(String sql, Object... paras) { StringBuilder countSql = new StringBuilder(); Config.dialect.forCount(countSql, sql); return jdbcTemplate.queryForObject(countSql.toString(), paras, Integer.class); } /** * 执行新增、更新、删除语句 * * @param sql * @param args * @return */ public int execute(String sql, Object... args) { return jdbcTemplate.update(sql, args); } /** * 批量执行新增、更新、删除语句 * * @param sql * @param bpss * @return */ public int[] batchExecute(String sql, BatchPreparedStatementSetter bpss) { return jdbcTemplate.batchUpdate(sql, bpss); } /** * 批量执行新增、更新、删除语句 * * @param sqls * @return */ public int[] batchExecute(String... sqls) { return jdbcTemplate.batchUpdate(sqls); } /** * 获得当前Model全部字段名 * * @return */ public String getColumnsStr() { return getColumnsStr(this.getClass()); } /** * 通过class获得映射table的字段(如:id,name,age) * * @param cls * @return */ public String getColumnsStr(Class<? extends Model> cls) { Table t = TableUtil.getTable(cls); return t.getColumnsStr(); } /** * 通过class获得映射table的字段(如:id,name,age) * * @param cls * @return */ public String getColumnsStr(Class<? extends Model> cls, String alias) { Table t = TableUtil.getTable(cls); StringBuilder columnsStr = new StringBuilder(); String[] columns = t.getColumns(); for (String col : columns) { columnsStr.append(" ,").append(alias).append(".").append(col); } columnsStr.delete(0, 2); return columnsStr.toString(); } /** * 通过class获得映射表明 * * @param cls * @return */ public String getTableName(Class<? extends Model> cls) { Table t = TableUtil.getTable(cls); return t.getName(); } /** * 获得当前Model表名 * * @return */ public String getTableName() { return getTableName(this.getClass()); } /** * 获得单表查询sql(如:select id, name, age from user) * * @param cls * @return */ public String getSelectSql(Class<? extends Model> cls) { StringBuilder sql = new StringBuilder(); sql.append("select ").append(getColumnsStr(cls)); sql.append(" from ").append(getTableName(cls)); return sql.toString(); } /** * 获得带别名的单表查询sql(如:select u.id, u.name, u.age from user u) * * @param cls * @param alias * @return */ public String getSelectSql(Class<? extends Model> cls, String alias) { StringBuilder sql = new StringBuilder(); sql.append("select ").append(getColumnsStr(cls, alias)); sql.append(" from ").append(getTableName(cls)).append(" ").append(alias); return sql.toString(); } /** * 获得带别名的单表查询sql(如:select u.id, u.name, u.age from user u) * * @return */ public String getSelectSql() { return getSelectSql(this.getClass()); } /** * 获得单表查询sql(如:select id, name, age from user) * * @return */ public String getSelectSql(String alias) { return getSelectSql(this.getClass(), alias); } public JdbcTemplate getJdbcTemplate() { return jdbcTemplate; } public void setJdbcTemplate(JdbcTemplate jdbcTemplate) { this.jdbcTemplate = jdbcTemplate; } }
src/main/java/com/fengjx/ttwx/common/plugin/db/Model.java
package com.fengjx.ttwx.common.plugin.db; import com.fengjx.ttwx.common.utils.CommonUtils; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.jdbc.core.BatchPreparedStatementSetter; import org.springframework.jdbc.core.JdbcTemplate; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * @author fengjx. * @date:2015/5/8 0008 */ public abstract class Model { // 注入jdbcTemplate @Autowired private JdbcTemplate jdbcTemplate; public boolean insert(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); String pk = table.getPrimaryKey(); if (StringUtils.isBlank((String) attrs.get(pk))) { attrs.put(pk, CommonUtils.getPrimaryKey()); } StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelSave(table, attrs, sql, paras); return jdbcTemplate.update(sql.toString(), paras.toArray()) >= 1; } public boolean insert(Map<String, Object> attrs) { return insert(getClass(), attrs); } /** * Delete model by id. * * @param id the id value of the model * @return true if delete succeed otherwise false */ public boolean deleteById(Object id) { if (id == null) { throw new IllegalArgumentException("id can not be null"); } return deleteById(this.getClass(), id); } public boolean deleteById(Class<? extends Model> cls, Object id) { Table table = TableUtil.getTable(cls); String sql = Config.dialect.forModelDeleteById(table); return jdbcTemplate.update(sql, id) >= 1; } /** * Update model. */ public boolean update(Map<String, Object> attrs) { return update(getClass(), attrs); } /** * Update model. */ public boolean update(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); String pKey = table.getPrimaryKey(); Object id = attrs.get(pKey); if (id == null) { throw new MyDbException("You can't update model without Primary Key."); } StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelUpdate(table, attrs, pKey, id, sql, paras); if (paras.size() <= 1) { // Needn't update return false; } int result = jdbcTemplate.update(sql.toString(), paras.toArray()); return result >= 1; } /** * 新增或者更新 * * @param cls * @param attrs * @return */ public boolean insertOrUpdate(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); String pKey = table.getPrimaryKey(); Object id = attrs.get(pKey); if (null == id || "".equals(id)) { return insert(cls, attrs); } else { return update(cls, attrs); } } /** * 新增或者更新 * * @param attrs * @return */ public boolean insertOrUpdate(Map<String, Object> attrs) { return insertOrUpdate(this.getClass(), attrs); } public Record findById(Object id) { return findById(id, "*"); } /** * Find model by id. Fetch the specific columns only. Example: User user = * findById(15, "name, age"); * * @param id the id value of the model * @param columns the specific columns separate with comma character ==> "," */ public Record findById(Object id, String columns) { return findById(this.getClass(), id, columns); } public Record findById(Class<? extends Model> cls, Object id, String columns) { Table table = TableUtil.getTable(cls); String sql = Config.dialect.forModelFindById(table, columns); return findOne(sql, id); } /** * 查询单条记录 * * @param attrs 查询条件及参数 * @return */ public Record findOne(Map<String, Object> attrs) { return findOne(this.getClass(), attrs); } /** * 根据Model查询单条记录 * * @param cls 映射的class * @param attrs 查询条件及参数 * @return */ public Record findOne(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelFind(table, sql, "*", null, attrs, paras); return findOne(sql.toString(), paras.toArray()); } /** * 根据sql查询单条记录 * * @param sql * @param params * @return */ public Record findOne(String sql, Object... params) { List<Map<String, Object>> list = jdbcTemplate.queryForList(sql, params); if (CollectionUtils.isEmpty(list)) { return new Record(); } else if (list.size() > 1) { throw new MyDbException("Incorrect result size: expected 1, actual " + list.size()); } Map<String, Object> map = list.get(0); return new Record(map); } /** * 查询多条记录 * * @param attrs 查询条件及参数 * @return */ public List<Map<String, Object>> findList(Map<String, Object> attrs) { return findList(this.getClass(), attrs); } /** * 根据Model查询多条记录 * * @param cls 映射的class * @param attrs 查询条件及参数 * @return */ public List<Map<String, Object>> findList(Class<? extends Model> cls, Map<String, Object> attrs) { Table table = TableUtil.getTable(cls); StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelFind(table, sql, "*", null, attrs, paras); return findList(sql.toString(), paras.toArray()); } /** * 根据sql查询多条记录 * * @param sql * @param params * @return */ public List<Map<String, Object>> findList(String sql, Object... params) { return jdbcTemplate.queryForList(sql, params); } /** * 单表查询,查询当前Model关联的表,此查询依赖PageContext * * @param attrs * @return */ public Page<Map<String, Object>> paginate(Map<String, Object> attrs) { return paginate(attrs, null); } public Page<Map<String, Object>> paginate(Map<String, Object> attrs, String orderby) { return paginate(this.getClass(), attrs, orderby); } /** * 单表查询,此查询依赖PageContext * * @param cls * @param attrs * @return */ public Page<Map<String, Object>> paginate(Class<? extends Model> cls, Map<String, Object> attrs) { return paginate(cls, attrs, null); } /** * paginate(User.class,attrs,"order by in_time") * * @param cls * @param attrs * @param orderby * @return */ public Page<Map<String, Object>> paginate(Class<? extends Model> cls, Map<String, Object> attrs, String orderby) { Table table = TableUtil.getTable(cls); StringBuilder sql = new StringBuilder(); List<Object> paras = new ArrayList(); Config.dialect.forModelFind(table, sql, "*", orderby, attrs, paras); return paginate(PageContext.getPageNumber(), PageContext.getPageSize(), sql.toString(), paras.toArray()); } /** * 分页查询,此查询依赖PageContext * * @param sql * @param paras * @return */ public Page<Map<String, Object>> paginate(String sql, Object... paras) { return paginate(PageContext.getPageNumber(), PageContext.getPageSize(), sql, paras); } /** * 分页查询 * * @param pageNumber * @param pageSize * @param sql * @param paras * @return */ public Page<Map<String, Object>> paginate(int pageNumber, int pageSize, String sql, Object... paras) { if (pageNumber < 1 || pageSize < 1) { throw new MyDbException("pageNumber and pageSize must be more than 0"); } int totalRow = 0; int totalPage = 0; totalRow = getCount(sql, paras); if (totalRow < 1) { return new Page(new ArrayList<Map<String, Object>>(0), pageNumber, pageSize, 0, 0); } totalPage = totalRow / pageSize; if (totalRow % pageSize != 0) { totalPage++; } StringBuilder pageSql = new StringBuilder(); Config.dialect.forPaginate(pageSql, pageNumber, pageSize, sql); List<Map<String, Object>> list = findList(pageSql.toString(), paras); return new Page(list, pageNumber, pageSize, totalPage, totalRow); } /** * 获得总记录数 * * @param sql * @param paras * @return */ public int getCount(String sql, Object... paras) { StringBuilder countSql = new StringBuilder(); Config.dialect.forCount(countSql, sql); return jdbcTemplate.queryForObject(countSql.toString(), paras, Integer.class); } /** * 执行新增、更新、删除语句 * * @param sql * @param args * @return */ public int execute(String sql, Object... args) { return jdbcTemplate.update(sql, args); } /** * 批量执行新增、更新、删除语句 * * @param sql * @param bpss * @return */ public int[] batchExecute(String sql, BatchPreparedStatementSetter bpss) { return jdbcTemplate.batchUpdate(sql, bpss); } /** * 批量执行新增、更新、删除语句 * * @param sqls * @return */ public int[] batchExecute(String... sqls) { return jdbcTemplate.batchUpdate(sqls); } /** * 获得当前Model全部字段名 * * @return */ public String getColumnsStr() { return getColumnsStr(this.getClass()); } /** * 通过class获得映射table的字段(如:id,name,age) * * @param cls * @return */ public String getColumnsStr(Class<? extends Model> cls) { Table t = TableUtil.getTable(cls); return t.getColumnsStr(); } /** * 通过class获得映射table的字段(如:id,name,age) * * @param cls * @return */ public String getColumnsStr(Class<? extends Model> cls, String alias) { Table t = TableUtil.getTable(cls); StringBuilder columnsStr = new StringBuilder(); String[] columns = t.getColumns(); for (String col : columns) { columnsStr.append(" ,").append(alias).append(".").append(col); } columnsStr.delete(0, 2); return columnsStr.toString(); } /** * 通过class获得映射表明 * * @param cls * @return */ public String getTableName(Class<? extends Model> cls) { Table t = TableUtil.getTable(cls); return t.getName(); } /** * 获得当前Model表名 * * @return */ public String getTableName() { return getTableName(this.getClass()); } /** * 获得单表查询sql(如:select id, name, age from user) * * @param cls * @return */ public String getSelectSql(Class<? extends Model> cls) { StringBuilder sql = new StringBuilder(); sql.append("select ").append(getColumnsStr(cls)); sql.append(" from ").append(getTableName(cls)); return sql.toString(); } /** * 获得带别名的单表查询sql(如:select u.id, u.name, u.age from user u) * * @param cls * @param alias * @return */ public String getSelectSql(Class<? extends Model> cls, String alias) { StringBuilder sql = new StringBuilder(); sql.append("select ").append(getColumnsStr(cls, alias)); sql.append(" from ").append(getTableName(cls)).append(" ").append(alias); return sql.toString(); } /** * 获得带别名的单表查询sql(如:select u.id, u.name, u.age from user u) * * @return */ public String getSelectSql() { return getSelectSql(this.getClass()); } /** * 获得单表查询sql(如:select id, name, age from user) * * @return */ public String getSelectSql(String alias) { return getSelectSql(this.getClass(), alias); } public JdbcTemplate getJdbcTemplate() { return jdbcTemplate; } public void setJdbcTemplate(JdbcTemplate jdbcTemplate) { this.jdbcTemplate = jdbcTemplate; } }
删除方法insertOrUpdate
src/main/java/com/fengjx/ttwx/common/plugin/db/Model.java
删除方法insertOrUpdate
<ide><path>rc/main/java/com/fengjx/ttwx/common/plugin/db/Model.java <ide> } <ide> int result = jdbcTemplate.update(sql.toString(), paras.toArray()); <ide> return result >= 1; <del> } <del> <del> /** <del> * 新增或者更新 <del> * <del> * @param cls <del> * @param attrs <del> * @return <del> */ <del> public boolean insertOrUpdate(Class<? extends Model> cls, Map<String, Object> attrs) { <del> Table table = TableUtil.getTable(cls); <del> String pKey = table.getPrimaryKey(); <del> Object id = attrs.get(pKey); <del> if (null == id || "".equals(id)) { <del> return insert(cls, attrs); <del> } else { <del> return update(cls, attrs); <del> } <del> } <del> <del> /** <del> * 新增或者更新 <del> * <del> * @param attrs <del> * @return <del> */ <del> public boolean insertOrUpdate(Map<String, Object> attrs) { <del> return insertOrUpdate(this.getClass(), attrs); <ide> } <ide> <ide> public Record findById(Object id) {
Java
apache-2.0
f95518a379d670dea5d86a74761be2b1d0c45996
0
tapglue/android_sdk,tapglue/android_sdk
package com.tapglue.sdk; import android.app.Application; import android.test.ApplicationTestCase; import com.tapglue.sdk.entities.Connection; import com.tapglue.sdk.entities.ConnectionList; import com.tapglue.sdk.entities.Friend; import com.tapglue.sdk.entities.User; import com.tapglue.sdk.http.payloads.SocialConnections; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.HashMap; import static com.tapglue.sdk.entities.Connection.Type; import static org.hamcrest.core.IsCollectionContaining.hasItems; import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsNot.not; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; public class ConnectionIntegrationTest extends ApplicationTestCase<Application>{ private static final String PASSWORD = "superSecretPassword"; private static final String USER_1 = "user1"; private static final String USER_2 = "user2"; Configuration configuration; Tapglue tapglue; User user1 = new User(USER_1, PASSWORD); User user2 = new User(USER_2, PASSWORD); public ConnectionIntegrationTest() { super(Application.class); configuration = new Configuration(TestData.URL, TestData.TOKEN); configuration.setLogging(true); } @Override protected void setUp() throws Exception { super.setUp(); createApplication(); tapglue = new Tapglue(configuration, getContext()); user1 = tapglue.createUser(user1); user2 = tapglue.createUser(user2); } @Override protected void tearDown() throws Exception { tapglue.loginWithUsername(USER_1, PASSWORD); tapglue.deleteCurrentUser(); tapglue.loginWithUsername(USER_2, PASSWORD); tapglue.deleteCurrentUser(); super.tearDown(); } public void testRetrieveUser() throws IOException { user1 = tapglue.loginWithUsername(USER_1, PASSWORD); assertThat(tapglue.retrieveUser(user2.getId()), equalTo(user2)); } public void testRetrieveFollowings() throws IOException { tapglue.loginWithUsername("john", PasswordHasher.hashPassword("qwert")); List<User> followings = tapglue.retrieveFollowings(); assertThat(followings.size(), equalTo(5)); } public void testRetrieveFollowers() throws IOException { tapglue.loginWithUsername("john", PasswordHasher.hashPassword("qwert")); List<User> followers = tapglue.retrieveFollowers(); assertThat(followers.size(), equalTo(6)); } public void testRetrieveFriends() throws IOException { tapglue.loginWithUsername("john", PasswordHasher.hashPassword("qwert")); List<User> friends = tapglue.retrieveFriends(); assertThat(friends.size(), equalTo(0)); } public void testCreateConnection() throws IOException { User user1 = new User("createConnectionUser1", PASSWORD); user1 = tapglue.createUser(user1); User user2 = new User("createConnectionUser2", PASSWORD); tapglue.createUser(user2); tapglue.loginWithUsername("createConnectionUser2", PASSWORD); Connection connection = new Connection(user1, Connection.Type.FOLLOW, Connection.State.CONFIRMED); Connection createdConnection = tapglue.createConnection(connection); tapglue.deleteCurrentUser(); tapglue.loginWithUsername("createConnectionUser1", PASSWORD); tapglue.deleteCurrentUser(); } public void testRetrievePendingOutgoingConnections() throws IOException { User user1 = new User("retrievePending1", PASSWORD); tapglue.createUser(user1); user1 = tapglue.loginWithUsername("retrievePending1", PASSWORD); User user2 = new User("retrievePending2", PASSWORD); tapglue.createUser(user2); tapglue.loginWithUsername("retrievePending2", PASSWORD); tapglue.createConnection(new Friend(user1)); ConnectionList connectionList = tapglue.retrievePendingConnections(); assertThat(connectionList.getOutgoingConnections().get(0).getUserTo(), equalTo(user1)); tapglue.deleteCurrentUser(); tapglue.loginWithUsername("retrievePending1", PASSWORD); tapglue.deleteCurrentUser(); } public void testRetrievePendingIncomingConnections() throws IOException { User user1 = new User("retrievePendingInc1", PASSWORD); tapglue.createUser(user1); user1 = tapglue.loginWithUsername("retrievePendingInc1", PASSWORD); User user2 = new User("retrievePendingInc2", PASSWORD); tapglue.createUser(user2); user2 = tapglue.loginWithUsername("retrievePendingInc2", PASSWORD); tapglue.createConnection(new Friend(user1)); tapglue.loginWithUsername("retrievePendingInc1", PASSWORD); ConnectionList connectionList = tapglue.retrievePendingConnections(); assertThat(connectionList.getIncomingConnections().get(0).getUserFrom(), equalTo(user2)); tapglue.deleteCurrentUser(); tapglue.loginWithUsername("retrievePendingInc2", PASSWORD); tapglue.deleteCurrentUser(); } public void testRetrieveRejectedConnections() throws IOException { //create user 1 User user1 = new User("retrieveRejected1", PASSWORD); user1 = tapglue.createUser(user1); //create and login user 2 User user2 = new User("retrieveRejected2", PASSWORD); tapglue.createUser(user2); user2 = tapglue.loginWithUsername("retrieveRejected2", PASSWORD); //user 2 sends friend request to user 1 tapglue.createConnection(new Friend(user1)); //login user 1 and retrieve pending connections tapglue.loginWithUsername("retrieveRejected1", PASSWORD); ConnectionList pending = tapglue.retrievePendingConnections(); User pendingUser = pending.getIncomingConnections().get(0).getUserFrom(); //user 1 rejects user 2 friend request tapglue.createConnection(new Connection(pendingUser, Connection.Type.FRIEND, Connection.State.REJECTED)); //login with user 2 tapglue.loginWithUsername("retrieveRejected2", PASSWORD); ConnectionList rejected = tapglue.retrieveRejectedConnections(); assertThat(rejected.getOutgoingConnections().get(0).getUserTo(), equalTo(user1)); tapglue.deleteCurrentUser(); tapglue.loginWithUsername("retrieveRejected1", PASSWORD); tapglue.deleteCurrentUser(); } public void testSocialConnections () throws Exception { Map<String, String> socialIds = new HashMap<>(); String platform = "platformName"; socialIds.put(platform, "id1"); user1.setSocialIds(socialIds); tapglue.loginWithUsername(USER_1, PASSWORD); user1 = tapglue.updateCurrentUser(user1); socialIds.put(platform, "id2"); user2.setSocialIds(socialIds); tapglue.loginWithUsername(USER_2, PASSWORD); user2 = tapglue.updateCurrentUser(user2); List<String> socialIdsArray = Arrays.asList("id1"); SocialConnections connections = new SocialConnections(platform, Type.FOLLOW, user2.getSocialIds().get(platform), socialIdsArray); assertThat(tapglue.createSocialConnections(connections), hasItems(user1)); } public void testRefresh() throws Exception { tapglue.loginWithUsername(USER_1, PASSWORD); user1 = tapglue.refreshCurrentUser(); assertThat(user1.getSessionToken(), not(nullValue())); } public void testUserSearch() throws Exception { tapglue.loginWithUsername(USER_1, PASSWORD); List<User> users = tapglue.searchUsers(USER_2); assertThat(users, hasItems(user2)); } public void testUserEmailSearch() throws Exception { user2 = tapglue.loginWithUsername(USER_2, PASSWORD); user2.setEmail("[email protected]"); tapglue.updateCurrentUser(user2); tapglue.loginWithUsername(USER_1, PASSWORD); List<String> emails = Arrays.asList("[email protected]"); List<User> users = tapglue.searchUsersByEmail(emails); assertThat(users, hasItems(user2)); } public void testUserSocialSearch() throws Exception { user2 = tapglue.loginWithUsername(USER_2, PASSWORD); Map<String, String> socialIds = new HashMap<>(); String platform = "facebook"; socialIds.put(platform, "id24"); user2.setSocialIds(socialIds); user2 = tapglue.updateCurrentUser(user2); tapglue.loginWithUsername(USER_1, PASSWORD); List<String> socialIdsArray = Arrays.asList("id24"); List<User> users = tapglue.searchUsersBySocialIds(platform, socialIdsArray); assertThat(users, hasItems(user2)); } }
sdk/src/androidTest/java/com/tapglue/sdk/ConnectionIntegrationTest.java
package com.tapglue.sdk; import android.app.Application; import android.test.ApplicationTestCase; import com.tapglue.sdk.entities.Connection; import com.tapglue.sdk.entities.ConnectionList; import com.tapglue.sdk.entities.Friend; import com.tapglue.sdk.entities.User; import com.tapglue.sdk.http.payloads.SocialConnections; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.HashMap; import static com.tapglue.sdk.entities.Connection.Type; import static org.hamcrest.core.IsCollectionContaining.hasItems; import static org.hamcrest.core.IsEqual.equalTo; import static org.hamcrest.core.IsNot.not; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; public class ConnectionIntegrationTest extends ApplicationTestCase<Application>{ private static final String PASSWORD = "superSecretPassword"; private static final String USER_1 = "user1"; private static final String USER_2 = "user2"; Configuration configuration; Tapglue tapglue; User user1 = new User(USER_1, PASSWORD); User user2 = new User(USER_2, PASSWORD); public ConnectionIntegrationTest() { super(Application.class); configuration = new Configuration(TestData.URL, TestData.TOKEN); configuration.setLogging(true); } @Override protected void setUp() throws Exception { super.setUp(); createApplication(); tapglue = new Tapglue(configuration, getContext()); user1 = tapglue.createUser(user1); user2 = tapglue.createUser(user2); } @Override protected void tearDown() throws Exception { tapglue.loginWithUsername(USER_1, PASSWORD); tapglue.deleteCurrentUser(); tapglue.loginWithUsername(USER_2, PASSWORD); tapglue.deleteCurrentUser(); super.tearDown(); } public void testRetrieveUser() throws IOException { User user = new User("retrieveUserTest", PASSWORD); User createdUser = tapglue.createUser(user); tapglue.loginWithUsername("retrieveUserTest", PASSWORD); assertThat(tapglue.retrieveUser(createdUser.getId()), equalTo(createdUser)); tapglue.deleteCurrentUser(); } public void testRetrieveFollowings() throws IOException { tapglue.loginWithUsername("john", PasswordHasher.hashPassword("qwert")); List<User> followings = tapglue.retrieveFollowings(); assertThat(followings.size(), equalTo(5)); } public void testRetrieveFollowers() throws IOException { tapglue.loginWithUsername("john", PasswordHasher.hashPassword("qwert")); List<User> followers = tapglue.retrieveFollowers(); assertThat(followers.size(), equalTo(6)); } public void testRetrieveFriends() throws IOException { tapglue.loginWithUsername("john", PasswordHasher.hashPassword("qwert")); List<User> friends = tapglue.retrieveFriends(); assertThat(friends.size(), equalTo(0)); } public void testCreateConnection() throws IOException { User user1 = new User("createConnectionUser1", PASSWORD); user1 = tapglue.createUser(user1); User user2 = new User("createConnectionUser2", PASSWORD); tapglue.createUser(user2); tapglue.loginWithUsername("createConnectionUser2", PASSWORD); Connection connection = new Connection(user1, Connection.Type.FOLLOW, Connection.State.CONFIRMED); Connection createdConnection = tapglue.createConnection(connection); tapglue.deleteCurrentUser(); tapglue.loginWithUsername("createConnectionUser1", PASSWORD); tapglue.deleteCurrentUser(); } public void testRetrievePendingOutgoingConnections() throws IOException { User user1 = new User("retrievePending1", PASSWORD); tapglue.createUser(user1); user1 = tapglue.loginWithUsername("retrievePending1", PASSWORD); User user2 = new User("retrievePending2", PASSWORD); tapglue.createUser(user2); tapglue.loginWithUsername("retrievePending2", PASSWORD); tapglue.createConnection(new Friend(user1)); ConnectionList connectionList = tapglue.retrievePendingConnections(); assertThat(connectionList.getOutgoingConnections().get(0).getUserTo(), equalTo(user1)); tapglue.deleteCurrentUser(); tapglue.loginWithUsername("retrievePending1", PASSWORD); tapglue.deleteCurrentUser(); } public void testRetrievePendingIncomingConnections() throws IOException { User user1 = new User("retrievePendingInc1", PASSWORD); tapglue.createUser(user1); user1 = tapglue.loginWithUsername("retrievePendingInc1", PASSWORD); User user2 = new User("retrievePendingInc2", PASSWORD); tapglue.createUser(user2); user2 = tapglue.loginWithUsername("retrievePendingInc2", PASSWORD); tapglue.createConnection(new Friend(user1)); tapglue.loginWithUsername("retrievePendingInc1", PASSWORD); ConnectionList connectionList = tapglue.retrievePendingConnections(); assertThat(connectionList.getIncomingConnections().get(0).getUserFrom(), equalTo(user2)); tapglue.deleteCurrentUser(); tapglue.loginWithUsername("retrievePendingInc2", PASSWORD); tapglue.deleteCurrentUser(); } public void testRetrieveRejectedConnections() throws IOException { //create user 1 User user1 = new User("retrieveRejected1", PASSWORD); user1 = tapglue.createUser(user1); //create and login user 2 User user2 = new User("retrieveRejected2", PASSWORD); tapglue.createUser(user2); user2 = tapglue.loginWithUsername("retrieveRejected2", PASSWORD); //user 2 sends friend request to user 1 tapglue.createConnection(new Friend(user1)); //login user 1 and retrieve pending connections tapglue.loginWithUsername("retrieveRejected1", PASSWORD); ConnectionList pending = tapglue.retrievePendingConnections(); User pendingUser = pending.getIncomingConnections().get(0).getUserFrom(); //user 1 rejects user 2 friend request tapglue.createConnection(new Connection(pendingUser, Connection.Type.FRIEND, Connection.State.REJECTED)); //login with user 2 tapglue.loginWithUsername("retrieveRejected2", PASSWORD); ConnectionList rejected = tapglue.retrieveRejectedConnections(); assertThat(rejected.getOutgoingConnections().get(0).getUserTo(), equalTo(user1)); tapglue.deleteCurrentUser(); tapglue.loginWithUsername("retrieveRejected1", PASSWORD); tapglue.deleteCurrentUser(); } public void testSocialConnections () throws Exception { Map<String, String> socialIds = new HashMap<>(); String platform = "platformName"; socialIds.put(platform, "id1"); user1.setSocialIds(socialIds); tapglue.loginWithUsername(USER_1, PASSWORD); user1 = tapglue.updateCurrentUser(user1); socialIds.put(platform, "id2"); user2.setSocialIds(socialIds); tapglue.loginWithUsername(USER_2, PASSWORD); user2 = tapglue.updateCurrentUser(user2); List<String> socialIdsArray = Arrays.asList("id1"); SocialConnections connections = new SocialConnections(platform, Type.FOLLOW, user2.getSocialIds().get(platform), socialIdsArray); assertThat(tapglue.createSocialConnections(connections), hasItems(user1)); } public void testRefresh() throws Exception { tapglue.loginWithUsername(USER_1, PASSWORD); user1 = tapglue.refreshCurrentUser(); assertThat(user1.getSessionToken(), not(nullValue())); } public void testUserSearch() throws Exception { tapglue.loginWithUsername(USER_1, PASSWORD); List<User> users = tapglue.searchUsers(USER_2); assertThat(users, hasItems(user2)); } public void testUserEmailSearch() throws Exception { user2 = tapglue.loginWithUsername(USER_2, PASSWORD); user2.setEmail("[email protected]"); tapglue.updateCurrentUser(user2); tapglue.loginWithUsername(USER_1, PASSWORD); List<String> emails = Arrays.asList("[email protected]"); List<User> users = tapglue.searchUsersByEmail(emails); assertThat(users, hasItems(user2)); } public void testUserSocialSearch() throws Exception { user2 = tapglue.loginWithUsername(USER_2, PASSWORD); Map<String, String> socialIds = new HashMap<>(); String platform = "facebook"; socialIds.put(platform, "id24"); user2.setSocialIds(socialIds); user2 = tapglue.updateCurrentUser(user2); tapglue.loginWithUsername(USER_1, PASSWORD); List<String> socialIdsArray = Arrays.asList("id24"); List<User> users = tapglue.searchUsersBySocialIds(platform, socialIdsArray); assertThat(users, hasItems(user2)); } }
Fix retrieveUser acceptance test
sdk/src/androidTest/java/com/tapglue/sdk/ConnectionIntegrationTest.java
Fix retrieveUser acceptance test
<ide><path>dk/src/androidTest/java/com/tapglue/sdk/ConnectionIntegrationTest.java <ide> } <ide> <ide> public void testRetrieveUser() throws IOException { <del> User user = new User("retrieveUserTest", PASSWORD); <del> User createdUser = tapglue.createUser(user); <del> tapglue.loginWithUsername("retrieveUserTest", PASSWORD); <del> <del> assertThat(tapglue.retrieveUser(createdUser.getId()), equalTo(createdUser)); <del> <del> tapglue.deleteCurrentUser(); <add> user1 = tapglue.loginWithUsername(USER_1, PASSWORD); <add> <add> assertThat(tapglue.retrieveUser(user2.getId()), equalTo(user2)); <ide> } <ide> <ide> public void testRetrieveFollowings() throws IOException {
JavaScript
mit
ec641a78986886e8d2f99e3f640017db1dd43797
0
jimbolla/react-redux,gnoff/react-redux,rackt/react-redux,gaearon/react-redux,reactjs/react-redux
// encapsulates the subscription logic for connecting a component to the redux store, as // well as nesting subscriptions of descendant components, so that we can ensure the // ancestor components re-render before descendants function initListeners() { let current = [] let next = [] return { clear() { next = null current = null }, notify() { current = next for (let i = 0; i < current.length; i++) { current[i]() } }, subscribe(listener) { let isSubscribed = true if (next === current) next = current.slice() next.push(listener) return function unsubscribe() { if (!isSubscribed || !current) return isSubscribed = false if (next === current) next = current.slice() next.splice(next.indexOf(listener), 1) } } } } export default class Subscription { constructor(store, parentSub) { this.subscribe = parentSub ? parentSub.addNestedSub.bind(parentSub) : store.subscribe.bind(store) this.unsubscribe = null this.listeners = initListeners() } addNestedSub(listener) { this.trySubscribe() return this.listeners.subscribe(listener) } notifyNestedSubs() { this.listeners.notify() } isSubscribed() { return Boolean(this.unsubscribe) } trySubscribe() { if (!this.unsubscribe) { this.unsubscribe = this.subscribe(this.onStateChange) } } tryUnsubscribe() { if (this.unsubscribe) { this.unsubscribe() this.listeners.clear() } this.unsubscribe = null this.subscribe = null this.listeners = { notify() {} } } }
src/utils/Subscription.js
// encapsulates the subscription logic for connecting a component to the redux store, as // well as nesting subscriptions of descendant components, so that we can ensure the // ancestor components re-render before descendants function initListeners() { let count = 0 let current = [] let next = [] return { clear() { count = 0 next = null current = null }, notify() { current = next for (let i = 0; i < count; i++) { current[i]() } }, subscribe(listener) { let isSubscribed = true if (next === current) next = current.slice() next.push(listener) count++ return function unsubscribe() { if (!isSubscribed || count === 0) return isSubscribed = false if (next === current) next = current.slice() next.splice(next.indexOf(listener), 1) count-- } } } } export default class Subscription { constructor(store, parentSub) { this.subscribe = parentSub ? parentSub.addNestedSub.bind(parentSub) : store.subscribe.bind(store) this.unsubscribe = null this.listeners = initListeners() } addNestedSub(listener) { this.trySubscribe() return this.listeners.subscribe(listener) } notifyNestedSubs() { this.listeners.notify() } isSubscribed() { return Boolean(this.unsubscribe) } trySubscribe() { if (!this.unsubscribe) { this.unsubscribe = this.subscribe(this.onStateChange) } } tryUnsubscribe() { if (this.unsubscribe) { this.unsubscribe() this.listeners.clear() } this.unsubscribe = null this.subscribe = null this.listeners = { notify() {} } } }
refactors out count variable in Subscription.js
src/utils/Subscription.js
refactors out count variable in Subscription.js
<ide><path>rc/utils/Subscription.js <ide> // ancestor components re-render before descendants <ide> <ide> function initListeners() { <del> let count = 0 <ide> let current = [] <ide> let next = [] <ide> <ide> return { <ide> clear() { <del> count = 0 <ide> next = null <ide> current = null <ide> }, <ide> <ide> notify() { <ide> current = next <del> for (let i = 0; i < count; i++) { <add> for (let i = 0; i < current.length; i++) { <ide> current[i]() <ide> } <ide> }, <ide> let isSubscribed = true <ide> if (next === current) next = current.slice() <ide> next.push(listener) <del> count++ <ide> <ide> return function unsubscribe() { <del> if (!isSubscribed || count === 0) return <add> if (!isSubscribed || !current) return <ide> isSubscribed = false <ide> <ide> if (next === current) next = current.slice() <ide> next.splice(next.indexOf(listener), 1) <del> count-- <ide> } <ide> } <ide> }
Java
mit
0bc580b66699bc53104b93f9a30c30d2463ce70a
0
seece/yotris
package yotris.util; import java.util.ArrayList; public class ScoreDAO { private String filepath; private ArrayList<ScoreEntry> scorelist; public ScoreDAO() { this("data/scores.dat"); } public ScoreDAO(String filepath) { this.filepath = filepath; this.scorelist = new ArrayList<>(); } public ArrayList<ScoreEntry> getScorelist() { return scorelist; } public boolean saveScorelist() { return true; } }
yotris/src/yotris/util/ScoreDAO.java
package yotris.util; import java.util.ArrayList; public class ScoreDAO { private String filepath; private ArrayList<ScoreEntry> scorelist; public ScoreDAO() { this("data/scores.dat"); } public ScoreDAO(String filepath) { this.filepath = filepath; this.scorelist = new ArrayList<>(); } public ArrayList<ScoreEntry> getScorelist() { return scorelist; } }
add mockup saving
yotris/src/yotris/util/ScoreDAO.java
add mockup saving
<ide><path>otris/src/yotris/util/ScoreDAO.java <ide> return scorelist; <ide> } <ide> <add> public boolean saveScorelist() { <add> return true; <add> } <add> <add> <ide> <ide> }
Java
mit
8301a11f6bb77fc4f961877270249b30a65faf86
0
mickleness/pumpernickel,mickleness/pumpernickel,mickleness/pumpernickel
/** * This software is released as part of the Pumpernickel project. * * All com.pump resources in the Pumpernickel project are distributed under the * MIT License: * https://raw.githubusercontent.com/mickleness/pumpernickel/master/License.txt * * More information about the Pumpernickel project is available here: * https://mickleness.github.io/pumpernickel/ */ package com.pump.io.location; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; import javax.swing.Icon; import javax.swing.SwingUtilities; import com.pump.icon.FileIcon; import com.pump.io.IOUtils; import com.pump.swing.BasicCancellable; import com.pump.swing.Cancellable; import com.pump.util.Receiver; /** * An <code>IOLocation</code> that is associated with a * <code>java.io.File</code>. * * <p> * These can only be instantiated by a <code>LocationFactory</code>. * * @see com.pump.io.location.LocationFactory */ public class FileLocation extends CachedLocation { protected final File file; protected FileLocation(File f) { if (f == null) throw new NullPointerException(); File choice; try { choice = f.getCanonicalFile(); if (choice == null) { throw new NullPointerException(); } } catch (IOException e) { choice = f; } file = choice; if (file == null) { throw new NullPointerException(); } } @Override protected String doGetPath() { return file.toURI().toString(); } @Override public String getParentPath() { String path = file.toURI().toString(); if (path.endsWith("/")) path = path.substring(0, path.length() - 1); int i = path.lastIndexOf('/'); if (i == -1) return null; String returnValue = path.substring(0, i); if (returnValue.equals("file:")) return null; return path.substring(0, i); } public File getFile() { return file; } @Override protected boolean doCanWrite() { boolean returnValue = file.canWrite(); if (file.isDirectory()) { // the following is not safe in a sandbox File[] children = file.listFiles(); if (children == null) returnValue = false; } return returnValue; } @Override protected boolean doCanRead() { boolean returnValue = file.canRead(); if (file.isDirectory()) { // the following is not safe in a sandbox File[] children = file.listFiles(); if (children == null) returnValue = false; } return returnValue; } @Override protected long doGetModificationDate() { return file.lastModified(); } @Override public boolean equals(Object obj) { if (obj != null && obj.getClass().equals(this.getClass())) { FileLocation fl = (FileLocation) obj; return fl.file.equals(file); } return false; } @Override public IOLocation getChild(String name) throws IOException { IOLocation returnValue = super.getChild(name); if (returnValue != null) return returnValue; File newFile = new File(file, name); return LocationFactory.get().create(newFile); } @Override public void mkdir() throws MakeDirectoryException { try { if (file.mkdir() == false) { if (file.exists() && file.isDirectory()) { // what the heck? mkdir returns false but the operation // worked...? return; } // hey, I wish I knew more details too. Don't blame me. throw new MakeDirectoryException( "could not create directory \"" + getURL() + "\""); } } finally { flush(); } } @Override public InputStream createInputStream() throws IOException { return new FileInputStream(file); } /** * Returns the parent of a File. * <P> * The method <code>java.io.File.getParentFile()</code> can mysteriously * return <code>null</code> on Vista for files that have a very real parent. * (This was first discovered for * "C:\Users\jeremy\workspace\Tech4Learning\Tech4Learning.pref".) * * @param f * @return */ private static File getParentFile(File f) { File parent = f.getParentFile(); if (parent != null) { return parent; } String path = f.getAbsolutePath(); int i = path.lastIndexOf(File.separator); if (i == 0) { return null; } else if (i != -1) { path = path.substring(0, i); File returnValue = new File(path); /** * On Vista if you ask for the parent of "C:\" then the code above * will create a file for "C:". But if you call getAbsolutePath() on * this file, it will return your classpath. Sooo... here's an * attempted workaround for this case: */ if (returnValue.getPath().equals(returnValue.getAbsolutePath()) == false) { // A lack of a separator says we're pretty near the root level // anyway, right? if (path.indexOf(File.separator) == -1) return null; } return returnValue; } return null; } @Override public OutputStream createOutputStream() throws MissingParentException, FileCreationException, IOException { File parent = getParentFile(file); if (parent != null) { parent.mkdirs(); } if (parent.exists() == false) throw new MissingParentException(); if (file.exists() == false && file.createNewFile() == false) throw new FileCreationException("the file \"" + getPath() + "\" could not be created"); if (file.exists() == false) throw new FileCreationException("the file \"" + getPath() + "\" did not exist"); flush(); return new FileOutputStream(file); } @Override protected String doGetName() { String name = file.getName(); if (name.equals("") && file.getAbsolutePath().equals("/")) { // special case for UNIX/Macs: File v = new File("/Volumes/"); File[] volumes = v.listFiles(); try { for (int a = 0; volumes != null && a < volumes.length; a++) { if (volumes[a].getCanonicalFile().equals(file)) { return volumes[a].getName(); } } } catch (IOException e) { e.printStackTrace(); } } return name; } @Override public IOLocation getParent() { File parent = getParentFile(file); if (parent != null) { return LocationFactory.get().create(parent); } return null; } @Override protected boolean doIsDirectory() { return file.isDirectory(); } @Override protected boolean doIsNavigable() { return isDefaultNavigable(this); } @Override public Icon getIcon(BasicCancellable cancellable) { final Icon[] iconWrapper = new Icon[] { null }; final Throwable[] errorWrapper = new Throwable[] { null }; Runnable runnable = new Runnable() { public void run() { try { iconWrapper[0] = FileIcon.getIcon(file); } catch (Throwable t) { errorWrapper[0] = t; } finally { synchronized (iconWrapper) { iconWrapper.notify(); } } } }; if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { SwingUtilities.invokeLater(runnable); while (iconWrapper[0] == null && errorWrapper[0] == null) { synchronized (iconWrapper) { try { iconWrapper.wait(); } catch (InterruptedException e) { } } } } if (errorWrapper[0] instanceof RuntimeException) { throw (RuntimeException) errorWrapper[0]; } else if (errorWrapper[0] instanceof Error) { throw (Error) errorWrapper[0]; } return iconWrapper[0]; } @Override protected void doListChildren(Receiver<IOLocation> receiver, Cancellable cancellable) { File[] files = file.listFiles(); if (files == null) files = new File[] {}; for (int a = 0; a < files.length; a++) { if (cancellable != null && cancellable.isCancelled()) return; IOLocation loc = LocationFactory.get().create(files[a]); receiver.add(loc); } } @Override protected boolean doIsHidden() { return file.isHidden(); } @Override public IOLocation setName(String s) throws SetNameException { try { File dest = new File(getParentFile(file), s); if (file.renameTo(dest)) return LocationFactory.get().create(dest); throw new SetNameException("renaming \"" + file.getAbsolutePath() + "\" failed"); } finally { flush(); } } @Override public void delete() throws IOException { try { file.delete(); if (file.exists()) throw new DeleteException("the file \"" + getPath() + "\" still exists"); } finally { flush(); } } @Override protected boolean doExists() { return file.exists(); } @Override protected long doLength() { return file.length(); } @Override public int hashCode() { return file.hashCode(); } @Override public String toString() { return "FileLocation[ path = \"" + file.getAbsolutePath() + "\" ]"; } @Override protected boolean doIsAlias() { return IOUtils.isAlias(file); } public URL getURL() { try { // Mike assures me that by calling toURI().toURL() we're encoding // spaces correctly automatically return file.toURI().toURL(); } catch (MalformedURLException e) { e.printStackTrace(); return null; } } }
pump-location/src/main/java/com/pump/io/location/FileLocation.java
/** * This software is released as part of the Pumpernickel project. * * All com.pump resources in the Pumpernickel project are distributed under the * MIT License: * https://raw.githubusercontent.com/mickleness/pumpernickel/master/License.txt * * More information about the Pumpernickel project is available here: * https://mickleness.github.io/pumpernickel/ */ package com.pump.io.location; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; import javax.swing.Icon; import javax.swing.SwingUtilities; import com.pump.icon.FileIcon; import com.pump.io.IOUtils; import com.pump.swing.Cancellable; import com.pump.util.Receiver; /** An <code>IOLocation</code> that is associated with a <code>java.io.File</code>. * * <p>These can only be instantiated by a <code>LocationFactory</code>. * * @see com.pump.io.location.LocationFactory */ public class FileLocation extends CachedLocation { protected final File file; protected FileLocation(File f) { if(f==null) throw new NullPointerException(); File choice; try { choice = f.getCanonicalFile(); if(choice==null) { throw new NullPointerException(); } } catch (IOException e) { choice = f; } file = choice; if(file==null) { throw new NullPointerException(); } } @Override protected String doGetPath() { return file.toURI().toString(); } @Override public String getParentPath() { String path = file.toURI().toString(); if(path.endsWith("/")) path = path.substring(0,path.length()-1); int i = path.lastIndexOf('/'); if(i==-1) return null; String returnValue = path.substring(0,i); if(returnValue.equals("file:")) return null; return path.substring(0,i); } public File getFile() { return file; } @Override protected boolean doCanWrite() { boolean returnValue = file.canWrite(); if(file.isDirectory()) { //the following is not safe in a sandbox File[] children = file.listFiles(); if(children==null) returnValue = false; } return returnValue; } @Override protected boolean doCanRead() { boolean returnValue = file.canRead(); if(file.isDirectory()) { //the following is not safe in a sandbox File[] children = file.listFiles(); if(children==null) returnValue = false; } return returnValue; } @Override protected long doGetModificationDate() { return file.lastModified(); } @Override public boolean equals(Object obj) { if(obj!=null && obj.getClass().equals(this.getClass())) { FileLocation fl = (FileLocation)obj; return fl.file.equals(file); } return false; } @Override public IOLocation getChild(String name) throws IOException { IOLocation returnValue = super.getChild(name); if(returnValue!=null) return returnValue; File newFile = new File(file,name); return LocationFactory.get().create(newFile); } @Override public void mkdir() throws MakeDirectoryException { try { if(file.mkdir()==false) { if(file.exists() && file.isDirectory()) { //what the heck? mkdir returns false but the operation worked...? return; } //hey, I wish I knew more details too. Don't blame me. throw new MakeDirectoryException("could not create directory \""+getURL()+"\""); } } finally { flush(); } } @Override public InputStream createInputStream() throws IOException { return new FileInputStream(file); } /** Returns the parent of a File. * <P>The method <code>java.io.File.getParentFile()</code> can * mysteriously return <code>null</code> on Vista for files * that have a very real parent. (This was first discovered for * "C:\Users\jeremy\workspace\Tech4Learning\Tech4Learning.pref".) * @param f * @return */ private static File getParentFile(File f) { File parent = f.getParentFile(); if(parent!=null) { return parent; } String path = f.getAbsolutePath(); int i = path.lastIndexOf(File.separator); if(i==0) { return null; } else if(i!=-1) { path = path.substring(0,i); File returnValue = new File(path); /** On Vista if you ask for the parent of "C:\" then * the code above will create a file for "C:". * But if you call getAbsolutePath() on this file, it will * return your classpath. Sooo... here's an attempted * workaround for this case: */ if(returnValue.getPath().equals(returnValue.getAbsolutePath())==false) { //A lack of a separator says we're pretty near the root level anyway, right? if(path.indexOf(File.separator)==-1) return null; } return returnValue; } return null; } @Override public OutputStream createOutputStream() throws MissingParentException, FileCreationException, IOException { File parent = getParentFile(file); if(parent!=null) { parent.mkdirs(); } if(parent.exists()==false) throw new MissingParentException(); if(file.exists()==false && file.createNewFile()==false) throw new FileCreationException("the file \""+getPath()+"\" could not be created"); if(file.exists()==false) throw new FileCreationException("the file \""+getPath()+"\" did not exist"); flush(); return new FileOutputStream(file); } @Override protected String doGetName() { String name = file.getName(); if(name.equals("") && file.getAbsolutePath().equals("/")) { //special case for UNIX/Macs: File v = new File("/Volumes/"); File[] volumes = v.listFiles(); try { for(int a = 0; volumes!=null && a<volumes.length; a++) { if(volumes[a].getCanonicalFile().equals(file)) { return volumes[a].getName(); } } } catch(IOException e) { e.printStackTrace(); } } return name; } @Override public IOLocation getParent() { File parent = getParentFile(file); if(parent!=null) { return LocationFactory.get().create(parent); } return null; } @Override protected boolean doIsDirectory() { return file.isDirectory(); } @Override protected boolean doIsNavigable() { return isDefaultNavigable(this); } public Icon getIcon() { final Icon[] iconWrapper = new Icon[] { null }; final Throwable[] errorWrapper = new Throwable[] { null }; Runnable runnable = new Runnable() { public void run() { try { iconWrapper[0] = FileIcon.getIcon(file); } catch(Throwable t) { errorWrapper[0] = t; } finally { synchronized(iconWrapper) { iconWrapper.notify(); } } } }; if(SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { SwingUtilities.invokeLater(runnable); while(iconWrapper[0]==null && errorWrapper[0]==null) { synchronized(iconWrapper) { try { iconWrapper.wait(); } catch (InterruptedException e) {} } } } if(errorWrapper[0] instanceof RuntimeException) { throw (RuntimeException)errorWrapper[0]; } else if(errorWrapper[0] instanceof Error) { throw (Error)errorWrapper[0]; } return iconWrapper[0]; } @Override protected void doListChildren(Receiver<IOLocation> receiver,Cancellable cancellable) { File[] files = file.listFiles(); if(files==null) files = new File[] {}; for(int a = 0; a<files.length; a++) { if(cancellable!=null && cancellable.isCancelled()) return; IOLocation loc = LocationFactory.get().create(files[a]); receiver.add(loc); } } @Override protected boolean doIsHidden() { return file.isHidden(); } @Override public IOLocation setName(String s) throws SetNameException { try { File dest = new File(getParentFile(file), s); if(file.renameTo(dest)) return LocationFactory.get().create(dest); throw new SetNameException("renaming \""+file.getAbsolutePath()+"\" failed"); } finally { flush(); } } @Override public void delete() throws IOException { try { file.delete(); if(file.exists()) throw new DeleteException("the file \""+getPath()+"\" still exists"); } finally { flush(); } } @Override protected boolean doExists() { return file.exists(); } @Override protected long doLength() { return file.length(); } @Override public int hashCode() { return file.hashCode(); } @Override public String toString() { return "FileLocation[ path = \""+file.getAbsolutePath()+"\" ]"; } @Override protected boolean doIsAlias() { return IOUtils.isAlias(file); } public URL getURL() { try { //Mike assures me that by calling toURI().toURL() we're encoding spaces correctly automatically return file.toURI().toURL(); } catch (MalformedURLException e) { e.printStackTrace(); return null; } } }
Fixing getIcon(..) method so it correctly consults FileIcon
pump-location/src/main/java/com/pump/io/location/FileLocation.java
Fixing getIcon(..) method so it correctly consults FileIcon
<ide><path>ump-location/src/main/java/com/pump/io/location/FileLocation.java <ide> <ide> import com.pump.icon.FileIcon; <ide> import com.pump.io.IOUtils; <add>import com.pump.swing.BasicCancellable; <ide> import com.pump.swing.Cancellable; <ide> import com.pump.util.Receiver; <ide> <del>/** An <code>IOLocation</code> that is associated with a <code>java.io.File</code>. <add>/** <add> * An <code>IOLocation</code> that is associated with a <add> * <code>java.io.File</code>. <ide> * <del> * <p>These can only be instantiated by a <code>LocationFactory</code>. <add> * <p> <add> * These can only be instantiated by a <code>LocationFactory</code>. <ide> * <ide> * @see com.pump.io.location.LocationFactory <ide> */ <ide> public class FileLocation extends CachedLocation { <del> <del> protected final File file; <del> protected FileLocation(File f) { <del> if(f==null) throw new NullPointerException(); <del> File choice; <add> <add> protected final File file; <add> <add> protected FileLocation(File f) { <add> if (f == null) <add> throw new NullPointerException(); <add> File choice; <add> try { <add> choice = f.getCanonicalFile(); <add> if (choice == null) { <add> throw new NullPointerException(); <add> } <add> } catch (IOException e) { <add> choice = f; <add> } <add> file = choice; <add> if (file == null) { <add> throw new NullPointerException(); <add> } <add> } <add> <add> @Override <add> protected String doGetPath() { <add> return file.toURI().toString(); <add> } <add> <add> @Override <add> public String getParentPath() { <add> String path = file.toURI().toString(); <add> if (path.endsWith("/")) <add> path = path.substring(0, path.length() - 1); <add> int i = path.lastIndexOf('/'); <add> if (i == -1) <add> return null; <add> <add> String returnValue = path.substring(0, i); <add> if (returnValue.equals("file:")) <add> return null; <add> return path.substring(0, i); <add> } <add> <add> public File getFile() { <add> return file; <add> } <add> <add> @Override <add> protected boolean doCanWrite() { <add> boolean returnValue = file.canWrite(); <add> if (file.isDirectory()) { <add> // the following is not safe in a sandbox <add> File[] children = file.listFiles(); <add> if (children == null) <add> returnValue = false; <add> } <add> return returnValue; <add> } <add> <add> @Override <add> protected boolean doCanRead() { <add> boolean returnValue = file.canRead(); <add> if (file.isDirectory()) { <add> // the following is not safe in a sandbox <add> File[] children = file.listFiles(); <add> if (children == null) <add> returnValue = false; <add> } <add> return returnValue; <add> } <add> <add> @Override <add> protected long doGetModificationDate() { <add> return file.lastModified(); <add> } <add> <add> @Override <add> public boolean equals(Object obj) { <add> if (obj != null && obj.getClass().equals(this.getClass())) { <add> FileLocation fl = (FileLocation) obj; <add> return fl.file.equals(file); <add> } <add> return false; <add> } <add> <add> @Override <add> public IOLocation getChild(String name) throws IOException { <add> IOLocation returnValue = super.getChild(name); <add> if (returnValue != null) <add> return returnValue; <add> <add> File newFile = new File(file, name); <add> return LocationFactory.get().create(newFile); <add> } <add> <add> @Override <add> public void mkdir() throws MakeDirectoryException { <add> try { <add> if (file.mkdir() == false) { <add> if (file.exists() && file.isDirectory()) { <add> // what the heck? mkdir returns false but the operation <add> // worked...? <add> return; <add> } <add> // hey, I wish I knew more details too. Don't blame me. <add> throw new MakeDirectoryException( <add> "could not create directory \"" + getURL() + "\""); <add> } <add> } finally { <add> flush(); <add> } <add> } <add> <add> @Override <add> public InputStream createInputStream() throws IOException { <add> return new FileInputStream(file); <add> } <add> <add> /** <add> * Returns the parent of a File. <add> * <P> <add> * The method <code>java.io.File.getParentFile()</code> can mysteriously <add> * return <code>null</code> on Vista for files that have a very real parent. <add> * (This was first discovered for <add> * "C:\Users\jeremy\workspace\Tech4Learning\Tech4Learning.pref".) <add> * <add> * @param f <add> * @return <add> */ <add> private static File getParentFile(File f) { <add> File parent = f.getParentFile(); <add> if (parent != null) { <add> return parent; <add> } <add> <add> String path = f.getAbsolutePath(); <add> int i = path.lastIndexOf(File.separator); <add> if (i == 0) { <add> return null; <add> } else if (i != -1) { <add> path = path.substring(0, i); <add> File returnValue = new File(path); <add> <add> /** <add> * On Vista if you ask for the parent of "C:\" then the code above <add> * will create a file for "C:". But if you call getAbsolutePath() on <add> * this file, it will return your classpath. Sooo... here's an <add> * attempted workaround for this case: <add> */ <add> if (returnValue.getPath().equals(returnValue.getAbsolutePath()) == false) { <add> // A lack of a separator says we're pretty near the root level <add> // anyway, right? <add> if (path.indexOf(File.separator) == -1) <add> return null; <add> } <add> return returnValue; <add> } <add> return null; <add> } <add> <add> @Override <add> public OutputStream createOutputStream() throws MissingParentException, <add> FileCreationException, IOException { <add> File parent = getParentFile(file); <add> if (parent != null) { <add> parent.mkdirs(); <add> } <add> if (parent.exists() == false) <add> throw new MissingParentException(); <add> if (file.exists() == false && file.createNewFile() == false) <add> throw new FileCreationException("the file \"" + getPath() <add> + "\" could not be created"); <add> if (file.exists() == false) <add> throw new FileCreationException("the file \"" + getPath() <add> + "\" did not exist"); <add> flush(); <add> return new FileOutputStream(file); <add> } <add> <add> @Override <add> protected String doGetName() { <add> String name = file.getName(); <add> if (name.equals("") && file.getAbsolutePath().equals("/")) { <add> // special case for UNIX/Macs: <add> File v = new File("/Volumes/"); <add> File[] volumes = v.listFiles(); <add> try { <add> for (int a = 0; volumes != null && a < volumes.length; a++) { <add> if (volumes[a].getCanonicalFile().equals(file)) { <add> return volumes[a].getName(); <add> } <add> } <add> } catch (IOException e) { <add> e.printStackTrace(); <add> } <add> } <add> return name; <add> } <add> <add> @Override <add> public IOLocation getParent() { <add> File parent = getParentFile(file); <add> if (parent != null) { <add> return LocationFactory.get().create(parent); <add> } <add> return null; <add> } <add> <add> @Override <add> protected boolean doIsDirectory() { <add> return file.isDirectory(); <add> } <add> <add> @Override <add> protected boolean doIsNavigable() { <add> return isDefaultNavigable(this); <add> } <add> <add> @Override <add> public Icon getIcon(BasicCancellable cancellable) { <add> final Icon[] iconWrapper = new Icon[] { null }; <add> final Throwable[] errorWrapper = new Throwable[] { null }; <add> Runnable runnable = new Runnable() { <add> public void run() { <ide> try { <del> choice = f.getCanonicalFile(); <del> if(choice==null) { <del> throw new NullPointerException(); <del> } <del> } catch (IOException e) { <del> choice = f; <add> iconWrapper[0] = FileIcon.getIcon(file); <add> } catch (Throwable t) { <add> errorWrapper[0] = t; <add> } finally { <add> synchronized (iconWrapper) { <add> iconWrapper.notify(); <add> } <ide> } <del> file = choice; <del> if(file==null) { <del> throw new NullPointerException(); <add> } <add> }; <add> if (SwingUtilities.isEventDispatchThread()) { <add> runnable.run(); <add> } else { <add> SwingUtilities.invokeLater(runnable); <add> while (iconWrapper[0] == null && errorWrapper[0] == null) { <add> synchronized (iconWrapper) { <add> try { <add> iconWrapper.wait(); <add> } catch (InterruptedException e) { <add> } <ide> } <del> } <del> <del> @Override <del> protected String doGetPath() { <del> return file.toURI().toString(); <del> } <del> <del> @Override <del> public String getParentPath() { <del> String path = file.toURI().toString(); <del> if(path.endsWith("/")) <del> path = path.substring(0,path.length()-1); <del> int i = path.lastIndexOf('/'); <del> if(i==-1) return null; <del> <del> String returnValue = path.substring(0,i); <del> if(returnValue.equals("file:")) <del> return null; <del> return path.substring(0,i); <del> } <del> <del> public File getFile() { <del> return file; <del> } <del> <del> @Override <del> protected boolean doCanWrite() { <del> boolean returnValue = file.canWrite(); <del> if(file.isDirectory()) { <del> //the following is not safe in a sandbox <del> File[] children = file.listFiles(); <del> if(children==null) <del> returnValue = false; <del> } <del> return returnValue; <del> } <del> <del> @Override <del> protected boolean doCanRead() { <del> boolean returnValue = file.canRead(); <del> if(file.isDirectory()) { <del> //the following is not safe in a sandbox <del> File[] children = file.listFiles(); <del> if(children==null) <del> returnValue = false; <del> } <del> return returnValue; <del> } <del> <del> @Override <del> protected long doGetModificationDate() { <del> return file.lastModified(); <del> } <del> <del> @Override <del> public boolean equals(Object obj) { <del> if(obj!=null && obj.getClass().equals(this.getClass())) { <del> FileLocation fl = (FileLocation)obj; <del> return fl.file.equals(file); <del> } <del> return false; <del> } <del> <del> @Override <del> public IOLocation getChild(String name) throws IOException { <del> IOLocation returnValue = super.getChild(name); <del> if(returnValue!=null) return returnValue; <del> <del> File newFile = new File(file,name); <del> return LocationFactory.get().create(newFile); <del> } <del> <del> @Override <del> public void mkdir() throws MakeDirectoryException { <del> try { <del> if(file.mkdir()==false) { <del> if(file.exists() && file.isDirectory()) { <del> //what the heck? mkdir returns false but the operation worked...? <del> return; <del> } <del> //hey, I wish I knew more details too. Don't blame me. <del> throw new MakeDirectoryException("could not create directory \""+getURL()+"\""); <del> } <del> } finally { <del> flush(); <del> } <del> } <del> <del> @Override <del> public InputStream createInputStream() throws IOException { <del> return new FileInputStream(file); <del> } <del> <del> /** Returns the parent of a File. <del> * <P>The method <code>java.io.File.getParentFile()</code> can <del> * mysteriously return <code>null</code> on Vista for files <del> * that have a very real parent. (This was first discovered for <del> * "C:\Users\jeremy\workspace\Tech4Learning\Tech4Learning.pref".) <del> * @param f <del> * @return <del> */ <del> private static File getParentFile(File f) { <del> File parent = f.getParentFile(); <del> if(parent!=null) { <del> return parent; <del> } <del> <del> String path = f.getAbsolutePath(); <del> int i = path.lastIndexOf(File.separator); <del> if(i==0) { <del> return null; <del> } else if(i!=-1) { <del> path = path.substring(0,i); <del> File returnValue = new File(path); <del> <del> /** On Vista if you ask for the parent of "C:\" then <del> * the code above will create a file for "C:". <del> * But if you call getAbsolutePath() on this file, it will <del> * return your classpath. Sooo... here's an attempted <del> * workaround for this case: <del> */ <del> if(returnValue.getPath().equals(returnValue.getAbsolutePath())==false) { <del> //A lack of a separator says we're pretty near the root level anyway, right? <del> if(path.indexOf(File.separator)==-1) <del> return null; <del> } <del> return returnValue; <del> } <del> return null; <del> } <del> <del> @Override <del> public OutputStream createOutputStream() throws MissingParentException, FileCreationException, IOException { <del> File parent = getParentFile(file); <del> if(parent!=null) { <del> parent.mkdirs(); <del> } <del> if(parent.exists()==false) <del> throw new MissingParentException(); <del> if(file.exists()==false && file.createNewFile()==false) <del> throw new FileCreationException("the file \""+getPath()+"\" could not be created"); <del> if(file.exists()==false) <del> throw new FileCreationException("the file \""+getPath()+"\" did not exist"); <del> flush(); <del> return new FileOutputStream(file); <del> } <del> <del> @Override <del> protected String doGetName() { <del> String name = file.getName(); <del> if(name.equals("") && file.getAbsolutePath().equals("/")) { <del> //special case for UNIX/Macs: <del> File v = new File("/Volumes/"); <del> File[] volumes = v.listFiles(); <del> try { <del> for(int a = 0; volumes!=null && a<volumes.length; a++) { <del> if(volumes[a].getCanonicalFile().equals(file)) { <del> return volumes[a].getName(); <del> } <del> } <del> } catch(IOException e) { <del> e.printStackTrace(); <del> } <del> } <del> return name; <del> } <del> <del> @Override <del> public IOLocation getParent() { <del> File parent = getParentFile(file); <del> if(parent!=null) { <del> return LocationFactory.get().create(parent); <del> } <del> return null; <del> } <del> <del> @Override <del> protected boolean doIsDirectory() { <del> return file.isDirectory(); <del> } <del> <del> @Override <del> protected boolean doIsNavigable() { <del> return isDefaultNavigable(this); <del> } <del> <del> public Icon getIcon() { <del> final Icon[] iconWrapper = new Icon[] { null }; <del> final Throwable[] errorWrapper = new Throwable[] { null }; <del> Runnable runnable = new Runnable() { <del> public void run() { <del> try { <del> iconWrapper[0] = FileIcon.getIcon(file); <del> } catch(Throwable t) { <del> errorWrapper[0] = t; <del> } finally { <del> synchronized(iconWrapper) { <del> iconWrapper.notify(); <del> } <del> } <del> } <del> }; <del> if(SwingUtilities.isEventDispatchThread()) { <del> runnable.run(); <del> } else { <del> SwingUtilities.invokeLater(runnable); <del> while(iconWrapper[0]==null && errorWrapper[0]==null) { <del> synchronized(iconWrapper) { <del> try { <del> iconWrapper.wait(); <del> } catch (InterruptedException e) {} <del> } <del> } <del> } <del> if(errorWrapper[0] instanceof RuntimeException) { <del> throw (RuntimeException)errorWrapper[0]; <del> } else if(errorWrapper[0] instanceof Error) { <del> throw (Error)errorWrapper[0]; <del> } <del> return iconWrapper[0]; <del> } <del> <del> @Override <del> protected void doListChildren(Receiver<IOLocation> receiver,Cancellable cancellable) { <del> File[] files = file.listFiles(); <del> <del> if(files==null) <del> files = new File[] {}; <del> <del> for(int a = 0; a<files.length; a++) { <del> if(cancellable!=null && cancellable.isCancelled()) <del> return; <del> <del> IOLocation loc = LocationFactory.get().create(files[a]); <del> receiver.add(loc); <del> } <del> } <del> <del> @Override <del> protected boolean doIsHidden() { <del> return file.isHidden(); <del> } <del> <del> @Override <del> public IOLocation setName(String s) throws SetNameException { <del> try { <del> File dest = new File(getParentFile(file), s); <del> if(file.renameTo(dest)) <del> return LocationFactory.get().create(dest); <del> throw new SetNameException("renaming \""+file.getAbsolutePath()+"\" failed"); <del> } finally { <del> flush(); <del> } <del> } <del> <del> @Override <del> public void delete() throws IOException { <del> try { <del> file.delete(); <del> if(file.exists()) <del> throw new DeleteException("the file \""+getPath()+"\" still exists"); <del> } finally { <del> flush(); <del> } <del> } <del> <del> @Override <del> protected boolean doExists() { <del> return file.exists(); <del> } <del> <del> @Override <del> protected long doLength() { <del> return file.length(); <del> } <del> <del> @Override <del> public int hashCode() { <del> return file.hashCode(); <del> } <del> <del> @Override <del> public String toString() { <del> return "FileLocation[ path = \""+file.getAbsolutePath()+"\" ]"; <del> } <del> <del> @Override <del> protected boolean doIsAlias() { <del> return IOUtils.isAlias(file); <del> } <del> <del> <del> public URL getURL() { <del> try { <del> //Mike assures me that by calling toURI().toURL() we're encoding spaces correctly automatically <del> return file.toURI().toURL(); <del> } catch (MalformedURLException e) { <del> e.printStackTrace(); <del> return null; <del> } <del> } <add> } <add> } <add> if (errorWrapper[0] instanceof RuntimeException) { <add> throw (RuntimeException) errorWrapper[0]; <add> } else if (errorWrapper[0] instanceof Error) { <add> throw (Error) errorWrapper[0]; <add> } <add> return iconWrapper[0]; <add> } <add> <add> @Override <add> protected void doListChildren(Receiver<IOLocation> receiver, <add> Cancellable cancellable) { <add> File[] files = file.listFiles(); <add> <add> if (files == null) <add> files = new File[] {}; <add> <add> for (int a = 0; a < files.length; a++) { <add> if (cancellable != null && cancellable.isCancelled()) <add> return; <add> <add> IOLocation loc = LocationFactory.get().create(files[a]); <add> receiver.add(loc); <add> } <add> } <add> <add> @Override <add> protected boolean doIsHidden() { <add> return file.isHidden(); <add> } <add> <add> @Override <add> public IOLocation setName(String s) throws SetNameException { <add> try { <add> File dest = new File(getParentFile(file), s); <add> if (file.renameTo(dest)) <add> return LocationFactory.get().create(dest); <add> throw new SetNameException("renaming \"" + file.getAbsolutePath() <add> + "\" failed"); <add> } finally { <add> flush(); <add> } <add> } <add> <add> @Override <add> public void delete() throws IOException { <add> try { <add> file.delete(); <add> if (file.exists()) <add> throw new DeleteException("the file \"" + getPath() <add> + "\" still exists"); <add> } finally { <add> flush(); <add> } <add> } <add> <add> @Override <add> protected boolean doExists() { <add> return file.exists(); <add> } <add> <add> @Override <add> protected long doLength() { <add> return file.length(); <add> } <add> <add> @Override <add> public int hashCode() { <add> return file.hashCode(); <add> } <add> <add> @Override <add> public String toString() { <add> return "FileLocation[ path = \"" + file.getAbsolutePath() + "\" ]"; <add> } <add> <add> @Override <add> protected boolean doIsAlias() { <add> return IOUtils.isAlias(file); <add> } <add> <add> public URL getURL() { <add> try { <add> // Mike assures me that by calling toURI().toURL() we're encoding <add> // spaces correctly automatically <add> return file.toURI().toURL(); <add> } catch (MalformedURLException e) { <add> e.printStackTrace(); <add> return null; <add> } <add> } <ide> }
Java
apache-2.0
f5493dcbeae061129e0ab9145f9133ac08aa4809
0
maichler/izpack,rsharipov/izpack,Helpstone/izpack,tomas-forsman/izpack,yukron/izpack,izpack/izpack,maichler/izpack,Murdock01/izpack,tomas-forsman/izpack,yukron/izpack,optotronic/izpack,maichler/izpack,izpack/izpack,stenix71/izpack,kanayo/izpack,mtjandra/izpack,akuhtz/izpack,kanayo/izpack,Murdock01/izpack,codehaus/izpack,izpack/izpack,Murdock01/izpack,Murdock01/izpack,bradcfisher/izpack,stenix71/izpack,akuhtz/izpack,akuhtz/izpack,stenix71/izpack,awilhelm/izpack-with-ips,izpack/izpack,Helpstone/izpack,mtjandra/izpack,Helpstone/izpack,bradcfisher/izpack,yukron/izpack,akuhtz/izpack,tomas-forsman/izpack,kanayo/izpack,Murdock01/izpack,yukron/izpack,kanayo/izpack,codehaus/izpack,tomas-forsman/izpack,awilhelm/izpack-with-ips,Sage-ERP-X3/izpack,stenix71/izpack,optotronic/izpack,codehaus/izpack,akuhtz/izpack,bradcfisher/izpack,codehaus/izpack,rkrell/izpack,maichler/izpack,izpack/izpack,stenix71/izpack,awilhelm/izpack-with-ips,optotronic/izpack,dasapich/izpack,rkrell/izpack,dasapich/izpack,bradcfisher/izpack,bradcfisher/izpack,stenix71/izpack,yukron/izpack,dasapich/izpack,mtjandra/izpack,kanayo/izpack,rsharipov/izpack,rkrell/izpack,optotronic/izpack,stenix71/izpack,optotronic/izpack,Helpstone/izpack,dasapich/izpack,rsharipov/izpack,rkrell/izpack,mtjandra/izpack,bradcfisher/izpack,dasapich/izpack,codehaus/izpack,kanayo/izpack,tomas-forsman/izpack,rkrell/izpack,yukron/izpack,rsharipov/izpack,Murdock01/izpack,optotronic/izpack,yukron/izpack,izpack/izpack,maichler/izpack,mtjandra/izpack,rsharipov/izpack,mtjandra/izpack,Helpstone/izpack,bradcfisher/izpack,maichler/izpack,awilhelm/izpack-with-ips,Sage-ERP-X3/izpack,Helpstone/izpack,Sage-ERP-X3/izpack,awilhelm/izpack-with-ips,codehaus/izpack,akuhtz/izpack,dasapich/izpack,rkrell/izpack,akuhtz/izpack,dasapich/izpack,maichler/izpack,Sage-ERP-X3/izpack,Helpstone/izpack,rsharipov/izpack,tomas-forsman/izpack,Sage-ERP-X3/izpack,codehaus/izpack,izpack/izpack,rsharipov/izpack,tomas-forsman/izpack,Murdock01/izpack,Sage-ERP-X3/izpack,rkrell/izpack,optotronic/izpack,mtjandra/izpack,Sage-ERP-X3/izpack
/* * IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved. * * http://izpack.org/ * http://izpack.codehaus.org/ * * Copyright 2001 Johannes Lehtinen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.izforge.izpack.util; import java.io.*; import java.util.HashMap; import java.util.Map; import java.util.Properties; /** * Substitutes variables occurring in an input stream or a string. This implementation supports a * generic variable value mapping and escapes the possible special characters occurring in the * substituted values. The file types specifically supported are plain text files (no escaping), * Java properties files, and XML files. A valid variable name matches the regular expression * [a-zA-Z][a-zA-Z0-9_]* and names are case sensitive. Variables are referenced either by $NAME or * ${NAME} (the latter syntax being useful in situations like ${NAME}NOTPARTOFNAME). If a referenced * variable is undefined then it is not substituted but the corresponding part of the stream is * copied as is. * * @author Johannes Lehtinen <[email protected]> */ public class VariableSubstitutor implements Serializable { /** * */ private static final long serialVersionUID = 3907213762447685687L; /** * The variable value mappings */ protected transient Properties variables; /** * Whether braces are required for substitution. */ protected boolean bracesRequired = false; /** * A constant for file type. Plain file. */ protected final static int TYPE_PLAIN = 0; /** * A constant for file type. Java properties file. */ protected final static int TYPE_JAVA_PROPERTIES = 1; /** * A constant for file type. XML file. */ protected final static int TYPE_XML = 2; /** * A constant for file type. Shell file. */ protected final static int TYPE_SHELL = 3; /** * A constant for file type. Plain file with '@' start char. */ protected final static int TYPE_AT = 4; /** * A constant for file type. Java file, where \ have to be escaped. */ protected final static int TYPE_JAVA = 5; /** * A constant for file type. Plain file with ANT-like variable markers, ie @param@ */ protected final static int TYPE_ANT = 6; /** * PLAIN = "plain" */ public final static String PLAIN = "plain"; /** * A mapping of file type names to corresponding integer constants. */ protected final static Map<String, Integer> typeNameToConstantMap; // Initialize the file type map static { typeNameToConstantMap = new HashMap<String, Integer>(); typeNameToConstantMap.put("plain", TYPE_PLAIN); typeNameToConstantMap.put("javaprop", TYPE_JAVA_PROPERTIES); typeNameToConstantMap.put("java", TYPE_JAVA); typeNameToConstantMap.put("xml", TYPE_XML); typeNameToConstantMap.put("shell", TYPE_SHELL); typeNameToConstantMap.put("at", TYPE_AT); typeNameToConstantMap.put("ant", TYPE_ANT); } /** * Constructs a new substitutor using the specified variable value mappings. The environment * hashtable is copied by reference. Braces are not required by default * * @param variables the map with variable value mappings */ public VariableSubstitutor(Properties variables) { this.variables = variables; } /** * Get whether this substitutor requires braces. */ public boolean areBracesRequired() { return bracesRequired; } /** * Specify whether this substitutor requires braces. */ public void setBracesRequired(boolean braces) { bracesRequired = braces; } /** * Substitutes the variables found in the specified string. Escapes special characters using * file type specific escaping if necessary. * * @param str the string to check for variables * @param type the escaping type or null for plain * @return the string with substituted variables * @throws IllegalArgumentException if unknown escaping type specified */ public String substitute(String str, String type) throws IllegalArgumentException { if (str == null) { return null; } // Create reader and writer for the strings StringReader reader = new StringReader(str); StringWriter writer = new StringWriter(); // Substitute any variables try { substitute(reader, writer, type); } catch (IOException e) { throw new Error("Unexpected I/O exception when reading/writing memory " + "buffer; nested exception is: " + e); } // Return the resulting string return writer.getBuffer().toString(); } /** * Substitutes the variables found in the specified input stream. Escapes special characters * using file type specific escaping if necessary. * * @param in the input stream to read * @param out the output stream to write * @param type the file type or null for plain * @param encoding the character encoding or null for default * @return the number of substitutions made * @throws IllegalArgumentException if unknown file type specified * @throws UnsupportedEncodingException if encoding not supported * @throws IOException if an I/O error occurs */ public int substitute(InputStream in, OutputStream out, String type, String encoding) throws IllegalArgumentException, UnsupportedEncodingException, IOException { // Check if file type specific default encoding known if (encoding == null) { int t = getTypeConstant(type); switch (t) { case TYPE_JAVA_PROPERTIES: encoding = "ISO-8859-1"; break; case TYPE_XML: encoding = "UTF-8"; break; } } // Create the reader and writer InputStreamReader reader = (encoding != null ? new InputStreamReader(in, encoding) : new InputStreamReader(in)); OutputStreamWriter writer = (encoding != null ? new OutputStreamWriter(out, encoding) : new OutputStreamWriter(out)); // Copy the data and substitute variables int subs = substitute(reader, writer, type); // Flush the writer so that everything gets written out writer.flush(); return subs; } /** * Substitute method Variant that gets An Input Stream and returns A String * * @param in The Input Stream, with Placeholders * @param type The used FormatType * @return the substituted result as string * @throws IllegalArgumentException If a wrong input was given. * @throws UnsupportedEncodingException If the file comes with a wrong Encoding * @throws IOException If an I/O Error occurs. */ public String substitute(InputStream in, String type ) throws IllegalArgumentException, UnsupportedEncodingException, IOException { // Check if file type specific default encoding known String encoding = PLAIN; { int t = getTypeConstant(type); switch (t) { case TYPE_JAVA_PROPERTIES: encoding = "ISO-8859-1"; break; case TYPE_XML: encoding = "UTF-8"; break; } } // Create the reader and writer InputStreamReader reader = ((encoding != null) ? new InputStreamReader(in, encoding) : new InputStreamReader(in)); StringWriter writer = new StringWriter(); // Copy the data and substitute variables substitute(reader, writer, type); // Flush the writer so that everything gets written out writer.flush(); return writer.getBuffer().toString(); } /** * Substitutes the variables found in the data read from the specified reader. Escapes special * characters using file type specific escaping if necessary. * * @param reader the reader to read * @param writer the writer used to write data out * @param type the file type or null for plain * @return the number of substitutions made * @throws IllegalArgumentException if unknown file type specified * @throws IOException if an I/O error occurs */ public int substitute(Reader reader, Writer writer, String type) throws IllegalArgumentException, IOException { // Check the file type int t = getTypeConstant(type); // determine character which starts (and ends) a variable char variable_start = '$'; char variable_end = '\0'; if (t == TYPE_SHELL) { variable_start = '%'; } else if (t == TYPE_AT) { variable_start = '@'; } else if (t == TYPE_ANT) { variable_start = '@'; variable_end = '@'; } int subs = 0; // Copy data and substitute variables int c = reader.read(); while (true) { // Find the next potential variable reference or EOF while (c != -1 && c != variable_start) { writer.write(c); c = reader.read(); } if (c == -1) { return subs; } // Check if braces used or start char escaped boolean braces = false; c = reader.read(); if (c == '{') { braces = true; c = reader.read(); } else if (bracesRequired) { writer.write(variable_start); continue; } else if (c == -1) { writer.write(variable_start); return subs; } // Read the variable name StringBuffer nameBuffer = new StringBuffer(); while (c != -1 && (braces && c != '}') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (braces && (c == '[') || (c == ']')) || (((c >= '0' && c <= '9') || c == '_' || c == '.' || c == '-') && nameBuffer.length() > 0)) { nameBuffer.append((char) c); c = reader.read(); } String name = nameBuffer.toString(); // Check if a legal and defined variable found String varvalue = null; if (((!braces || c == '}') && (!braces || variable_end == '\0' || variable_end == c) ) && name.length() > 0) { // check for environment variables if (braces && name.startsWith("ENV[") && (name.lastIndexOf(']') == name.length() - 1)) { varvalue = IoHelper.getenv(name.substring(4, name.length() - 1)); } else { varvalue = variables.getProperty(name); } subs++; } // Substitute the variable... if (varvalue != null) { writer.write(escapeSpecialChars(varvalue, t)); if (braces || variable_end != '\0') { c = reader.read(); } } // ...or ignore it else { writer.write(variable_start); if (braces) { writer.write('{'); } writer.write(name); } } } /** * Returns the internal constant for the specified file type. * * @param type the type name or null for plain * @return the file type constant */ protected int getTypeConstant(String type) { if (type == null) { return TYPE_PLAIN; } Integer integer = typeNameToConstantMap.get(type); if (integer == null) { throw new IllegalArgumentException("Unknown file type " + type); } else { return integer; } } /** * Escapes the special characters in the specified string using file type specific rules. * * @param str the string to check for special characters * @param type the target file type (one of TYPE_xxx) * @return the string with the special characters properly escaped */ protected String escapeSpecialChars(String str, int type) { StringBuffer buffer; int len; int i; switch (type) { case TYPE_PLAIN: case TYPE_AT: case TYPE_ANT: return str; case TYPE_SHELL: //apple mac has major problem with \r, make sure they are gone return str.replace("\r",""); case TYPE_JAVA_PROPERTIES: case TYPE_JAVA: buffer = new StringBuffer(str); len = str.length(); for (i = 0; i < len; i++) { // Check for control characters char c = buffer.charAt(i); if (type == TYPE_JAVA_PROPERTIES) { if (c == '\t' || c == '\n' || c == '\r') { char tag; if (c == '\t') { tag = 't'; } else if (c == '\n') { tag = 'n'; } else { tag = 'r'; } buffer.replace(i, i + 1, "\\" + tag); len++; i++; } // Check for special characters if (c == '\\' || c == '"' || c == '\'' || c == ' ') { buffer.insert(i, '\\'); len++; i++; } } else { if (c == '\\') { buffer.replace(i, i + 1, "\\\\"); len++; i++; } } } return buffer.toString(); case TYPE_XML: buffer = new StringBuffer(str); len = str.length(); for (i = 0; i < len; i++) { String r = null; char c = buffer.charAt(i); switch (c) { case '<': r = "&lt;"; break; case '>': r = "&gt;"; break; case '&': r = "&amp;"; break; case '\'': r = "&apos;"; break; case '"': r = "&quot;"; break; } if (r != null) { buffer.replace(i, i + 1, r); len = buffer.length(); i += r.length() - 1; } } return buffer.toString(); default: throw new Error("Unknown file type constant " + type); } } }
src/lib/com/izforge/izpack/util/VariableSubstitutor.java
/* * IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved. * * http://izpack.org/ * http://izpack.codehaus.org/ * * Copyright 2001 Johannes Lehtinen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.izforge.izpack.util; import java.io.*; import java.util.HashMap; import java.util.Map; import java.util.Properties; /** * Substitutes variables occurring in an input stream or a string. This implementation supports a * generic variable value mapping and escapes the possible special characters occurring in the * substituted values. The file types specifically supported are plain text files (no escaping), * Java properties files, and XML files. A valid variable name matches the regular expression * [a-zA-Z][a-zA-Z0-9_]* and names are case sensitive. Variables are referenced either by $NAME or * ${NAME} (the latter syntax being useful in situations like ${NAME}NOTPARTOFNAME). If a referenced * variable is undefined then it is not substituted but the corresponding part of the stream is * copied as is. * * @author Johannes Lehtinen <[email protected]> */ public class VariableSubstitutor implements Serializable { /** * */ private static final long serialVersionUID = 3907213762447685687L; /** * The variable value mappings */ protected transient Properties variables; /** * Whether braces are required for substitution. */ protected boolean bracesRequired = false; /** * A constant for file type. Plain file. */ protected final static int TYPE_PLAIN = 0; /** * A constant for file type. Java properties file. */ protected final static int TYPE_JAVA_PROPERTIES = 1; /** * A constant for file type. XML file. */ protected final static int TYPE_XML = 2; /** * A constant for file type. Shell file. */ protected final static int TYPE_SHELL = 3; /** * A constant for file type. Plain file with '@' start char. */ protected final static int TYPE_AT = 4; /** * A constant for file type. Java file, where \ have to be escaped. */ protected final static int TYPE_JAVA = 5; /** * A constant for file type. Plain file with ANT-like variable markers, ie @param@ */ protected final static int TYPE_ANT = 6; /** * PLAIN = "plain" */ public final static String PLAIN = "plain"; /** * A mapping of file type names to corresponding integer constants. */ protected final static Map<String, Integer> typeNameToConstantMap; // Initialize the file type map static { typeNameToConstantMap = new HashMap<String, Integer>(); typeNameToConstantMap.put("plain", TYPE_PLAIN); typeNameToConstantMap.put("javaprop", TYPE_JAVA_PROPERTIES); typeNameToConstantMap.put("java", TYPE_JAVA); typeNameToConstantMap.put("xml", TYPE_XML); typeNameToConstantMap.put("shell", TYPE_SHELL); typeNameToConstantMap.put("at", TYPE_AT); typeNameToConstantMap.put("ant", TYPE_ANT); } /** * Constructs a new substitutor using the specified variable value mappings. The environment * hashtable is copied by reference. Braces are not required by default * * @param variables the map with variable value mappings */ public VariableSubstitutor(Properties variables) { this.variables = variables; } /** * Get whether this substitutor requires braces. */ public boolean areBracesRequired() { return bracesRequired; } /** * Specify whether this substitutor requires braces. */ public void setBracesRequired(boolean braces) { bracesRequired = braces; } /** * Substitutes the variables found in the specified string. Escapes special characters using * file type specific escaping if necessary. * * @param str the string to check for variables * @param type the escaping type or null for plain * @return the string with substituted variables * @throws IllegalArgumentException if unknown escaping type specified */ public String substitute(String str, String type) throws IllegalArgumentException { if (str == null) { return null; } // Create reader and writer for the strings StringReader reader = new StringReader(str); StringWriter writer = new StringWriter(); // Substitute any variables try { substitute(reader, writer, type); } catch (IOException e) { throw new Error("Unexpected I/O exception when reading/writing memory " + "buffer; nested exception is: " + e); } // Return the resulting string return writer.getBuffer().toString(); } /** * Substitutes the variables found in the specified input stream. Escapes special characters * using file type specific escaping if necessary. * * @param in the input stream to read * @param out the output stream to write * @param type the file type or null for plain * @param encoding the character encoding or null for default * @return the number of substitutions made * @throws IllegalArgumentException if unknown file type specified * @throws UnsupportedEncodingException if encoding not supported * @throws IOException if an I/O error occurs */ public int substitute(InputStream in, OutputStream out, String type, String encoding) throws IllegalArgumentException, UnsupportedEncodingException, IOException { // Check if file type specific default encoding known if (encoding == null) { int t = getTypeConstant(type); switch (t) { case TYPE_JAVA_PROPERTIES: encoding = "ISO-8859-1"; break; case TYPE_XML: encoding = "UTF-8"; break; } } // Create the reader and writer InputStreamReader reader = (encoding != null ? new InputStreamReader(in, encoding) : new InputStreamReader(in)); OutputStreamWriter writer = (encoding != null ? new OutputStreamWriter(out, encoding) : new OutputStreamWriter(out)); // Copy the data and substitute variables int subs = substitute(reader, writer, type); // Flush the writer so that everything gets written out writer.flush(); return subs; } /** * Substitute method Variant that gets An Input Stream and returns A String * * @param in The Input Stream, with Placeholders * @param type The used FormatType * @return the substituted result as string * @throws IllegalArgumentException If a wrong input was given. * @throws UnsupportedEncodingException If the file comes with a wrong Encoding * @throws IOException If an I/O Error occurs. */ public String substitute(InputStream in, String type ) throws IllegalArgumentException, UnsupportedEncodingException, IOException { // Check if file type specific default encoding known String encoding = PLAIN; { int t = getTypeConstant(type); switch (t) { case TYPE_JAVA_PROPERTIES: encoding = "ISO-8859-1"; break; case TYPE_XML: encoding = "UTF-8"; break; } } // Create the reader and writer InputStreamReader reader = ((encoding != null) ? new InputStreamReader(in, encoding) : new InputStreamReader(in)); StringWriter writer = new StringWriter(); // Copy the data and substitute variables substitute(reader, writer, type); // Flush the writer so that everything gets written out writer.flush(); return writer.getBuffer().toString(); } /** * Substitutes the variables found in the data read from the specified reader. Escapes special * characters using file type specific escaping if necessary. * * @param reader the reader to read * @param writer the writer used to write data out * @param type the file type or null for plain * @return the number of substitutions made * @throws IllegalArgumentException if unknown file type specified * @throws IOException if an I/O error occurs */ public int substitute(Reader reader, Writer writer, String type) throws IllegalArgumentException, IOException { // Check the file type int t = getTypeConstant(type); // determine character which starts (and ends) a variable char variable_start = '$'; char variable_end = '\0'; if (t == TYPE_SHELL) { variable_start = '%'; } else if (t == TYPE_AT) { variable_start = '@'; } else if (t == TYPE_ANT) { variable_start = '@'; variable_end = '@'; } int subs = 0; // Copy data and substitute variables int c = reader.read(); // Ignore BOM of UTF-8 if (c == 0xEF) { for (int i = 0; i < 2; i++) { c = reader.read(); } } // Ignore quaint return values at UTF-8 BOMs. if (c > 0xFF) { c = reader.read(); } while (true) { // Find the next potential variable reference or EOF while (c != -1 && c != variable_start) { writer.write(c); c = reader.read(); } if (c == -1) { return subs; } // Check if braces used or start char escaped boolean braces = false; c = reader.read(); if (c == '{') { braces = true; c = reader.read(); } else if (bracesRequired) { writer.write(variable_start); continue; } else if (c == -1) { writer.write(variable_start); return subs; } // Read the variable name StringBuffer nameBuffer = new StringBuffer(); while (c != -1 && (braces && c != '}') || (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (braces && (c == '[') || (c == ']')) || (((c >= '0' && c <= '9') || c == '_' || c == '.' || c == '-') && nameBuffer.length() > 0)) { nameBuffer.append((char) c); c = reader.read(); } String name = nameBuffer.toString(); // Check if a legal and defined variable found String varvalue = null; if (((!braces || c == '}') && (!braces || variable_end == '\0' || variable_end == c) ) && name.length() > 0) { // check for environment variables if (braces && name.startsWith("ENV[") && (name.lastIndexOf(']') == name.length() - 1)) { varvalue = IoHelper.getenv(name.substring(4, name.length() - 1)); } else { varvalue = variables.getProperty(name); } subs++; } // Substitute the variable... if (varvalue != null) { writer.write(escapeSpecialChars(varvalue, t)); if (braces || variable_end != '\0') { c = reader.read(); } } // ...or ignore it else { writer.write(variable_start); if (braces) { writer.write('{'); } writer.write(name); } } } /** * Returns the internal constant for the specified file type. * * @param type the type name or null for plain * @return the file type constant */ protected int getTypeConstant(String type) { if (type == null) { return TYPE_PLAIN; } Integer integer = typeNameToConstantMap.get(type); if (integer == null) { throw new IllegalArgumentException("Unknown file type " + type); } else { return integer; } } /** * Escapes the special characters in the specified string using file type specific rules. * * @param str the string to check for special characters * @param type the target file type (one of TYPE_xxx) * @return the string with the special characters properly escaped */ protected String escapeSpecialChars(String str, int type) { StringBuffer buffer; int len; int i; switch (type) { case TYPE_PLAIN: case TYPE_AT: case TYPE_ANT: return str; case TYPE_SHELL: //apple mac has major problem with \r, make sure they are gone return str.replace("\r",""); case TYPE_JAVA_PROPERTIES: case TYPE_JAVA: buffer = new StringBuffer(str); len = str.length(); for (i = 0; i < len; i++) { // Check for control characters char c = buffer.charAt(i); if (type == TYPE_JAVA_PROPERTIES) { if (c == '\t' || c == '\n' || c == '\r') { char tag; if (c == '\t') { tag = 't'; } else if (c == '\n') { tag = 'n'; } else { tag = 'r'; } buffer.replace(i, i + 1, "\\" + tag); len++; i++; } // Check for special characters if (c == '\\' || c == '"' || c == '\'' || c == ' ') { buffer.insert(i, '\\'); len++; i++; } } else { if (c == '\\') { buffer.replace(i, i + 1, "\\\\"); len++; i++; } } } return buffer.toString(); case TYPE_XML: buffer = new StringBuffer(str); len = str.length(); for (i = 0; i < len; i++) { String r = null; char c = buffer.charAt(i); switch (c) { case '<': r = "&lt;"; break; case '>': r = "&gt;"; break; case '&': r = "&amp;"; break; case '\'': r = "&apos;"; break; case '"': r = "&quot;"; break; } if (r != null) { buffer.replace(i, i + 1, r); len = buffer.length(); i += r.length() - 1; } } return buffer.toString(); default: throw new Error("Unknown file type constant " + type); } } }
Removed UTF-8 BOM (Byte Order Mark) (IZPACK-212). Relates to IZPACK-166 too. git-svn-id: 408af81b9e4f0a5eaad229a6d9eed76d614c4af6@2470 7d736ef5-cfd4-0310-9c9a-b52d5c14b761
src/lib/com/izforge/izpack/util/VariableSubstitutor.java
Removed UTF-8 BOM (Byte Order Mark) (IZPACK-212). Relates to IZPACK-166 too.
<ide><path>rc/lib/com/izforge/izpack/util/VariableSubstitutor.java <ide> // Copy data and substitute variables <ide> int c = reader.read(); <ide> <del> // Ignore BOM of UTF-8 <del> if (c == 0xEF) <del> { <del> for (int i = 0; i < 2; i++) <del> { <del> c = reader.read(); <del> } <del> } <del> // Ignore quaint return values at UTF-8 BOMs. <del> if (c > 0xFF) <del> { <del> c = reader.read(); <del> } <ide> while (true) <ide> { <ide> // Find the next potential variable reference or EOF
Java
apache-2.0
44a7c4d000376a1223eaee197c2cac18fcbab2c1
0
andsel/moquette,windbender/moquette,windbender/moquette,andsel/moquette,windbender/moquette,windbender/moquette,andsel/moquette,andsel/moquette
/* * Copyright (c) 2012-2017 The original author or authors * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.moquette.server.netty.metrics; import io.moquette.server.netty.NettyUtils; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandler.Sharable; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.mqtt.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static io.moquette.spi.impl.Utils.messageId; @Sharable public class MQTTMessageLogger extends ChannelDuplexHandler { private static final Logger LOG = LoggerFactory.getLogger("messageLogger"); @Override public void channelRead(ChannelHandlerContext ctx, Object message) { logMQTTMessage(ctx, message, "C->B"); ctx.fireChannelRead(message); } private void logMQTTMessage(ChannelHandlerContext ctx, Object message, String direction) { if (!(message instanceof MqttMessage)) { return; } MqttMessage msg = (MqttMessage) message; String clientID = NettyUtils.clientID(ctx.channel()); MqttMessageType messageType = msg.fixedHeader().messageType(); switch (messageType) { case CONNECT: LOG.info("{} CONNECT client <{}>", direction, clientID); break; case SUBSCRIBE: MqttSubscribeMessage subscribe = (MqttSubscribeMessage) msg; LOG.info("{} SUBSCRIBE <{}> to topics {}", direction, clientID, subscribe.payload().topicSubscriptions()); break; case UNSUBSCRIBE: MqttUnsubscribeMessage unsubscribe = (MqttUnsubscribeMessage) msg; LOG.info("{} UNSUBSCRIBE <{}> to topics <{}>", direction, clientID, unsubscribe.payload().topics()); break; case PUBLISH: MqttPublishMessage publish = (MqttPublishMessage) msg; LOG.info("{} PUBLISH <{}> to topics <{}>", direction, clientID, publish.variableHeader().topicName()); break; case PUBREC: LOG.info("{} PUBREC <{}> packetID <{}>", direction, clientID, messageId(msg)); break; case PUBCOMP: LOG.info("{} PUBCOMP <{}> packetID <{}>", direction, clientID, messageId(msg)); break; case PUBREL: LOG.info("{} PUBREL <{}> packetID <{}>", direction, clientID, messageId(msg)); break; case DISCONNECT: LOG.info("{} DISCONNECT <{}>", direction, clientID); break; case PUBACK: LOG.info("{} PUBACK <{}> packetID <{}>", direction, clientID, messageId(msg)); break; case CONNACK: case SUBACK: case PINGREQ: case PINGRESP: case UNSUBACK: LOG.debug("{} {} <{}> packetID <{}>", direction, messageType, clientID, messageId(msg)); break; default: LOG.error("Unkonwn MessageType:{}", messageType); break; } } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { String clientID = NettyUtils.clientID(ctx.channel()); if (clientID != null && !clientID.isEmpty()) { LOG.info("Channel closed <{}>", clientID); } ctx.fireChannelInactive(); } @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { logMQTTMessage(ctx, msg, "C<-B"); ctx.write(msg, promise); } }
broker/src/main/java/io/moquette/server/netty/metrics/MQTTMessageLogger.java
/* * Copyright (c) 2012-2017 The original author or authors * ------------------------------------------------------ * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * and Apache License v2.0 which accompanies this distribution. * * The Eclipse Public License is available at * http://www.eclipse.org/legal/epl-v10.html * * The Apache License v2.0 is available at * http://www.opensource.org/licenses/apache2.0.php * * You may elect to redistribute this code under either of these licenses. */ package io.moquette.server.netty.metrics; import io.moquette.server.netty.NettyUtils; import io.netty.channel.ChannelDuplexHandler; import io.netty.channel.ChannelHandler.Sharable; import io.netty.channel.ChannelHandlerContext; import io.netty.channel.ChannelPromise; import io.netty.handler.codec.mqtt.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static io.moquette.spi.impl.Utils.messageId; @Sharable public class MQTTMessageLogger extends ChannelDuplexHandler { private static final Logger LOG = LoggerFactory.getLogger("messageLogger"); @Override public void channelRead(ChannelHandlerContext ctx, Object message) { logMQTTMessage(ctx, message, "C->B"); ctx.fireChannelRead(message); } private void logMQTTMessage(ChannelHandlerContext ctx, Object message, String direction) { if (!(message instanceof MqttMessage)) { return; } MqttMessage msg = (MqttMessage) message; String clientID = NettyUtils.clientID(ctx.channel()); MqttMessageType messageType = msg.fixedHeader().messageType(); switch (messageType) { case CONNECT: LOG.info("{} CONNECT client <{}>", direction, clientID); break; case SUBSCRIBE: MqttSubscribeMessage subscribe = (MqttSubscribeMessage) msg; LOG.info("{} SUBSCRIBE <{}> to topics {}", direction, clientID, subscribe.payload().topicSubscriptions()); break; case UNSUBSCRIBE: MqttUnsubscribeMessage unsubscribe = (MqttUnsubscribeMessage) msg; LOG.info("{} UNSUBSCRIBE <{}> to topics <{}>", direction, clientID, unsubscribe.payload().topics()); break; case PUBLISH: MqttPublishMessage publish = (MqttPublishMessage) msg; LOG.info("{} PUBLISH <{}> to topics <{}>", direction, clientID, publish.variableHeader().topicName()); break; case PUBREC: LOG.info("{} PUBREC <{}> packetID <{}>", direction, clientID, messageId(msg)); break; case PUBCOMP: LOG.info("{} PUBCOMP <{}> packetID <{}>", direction, clientID, messageId(msg)); break; case PUBREL: LOG.info("{} PUBREL <{}> packetID <{}>", direction, clientID, messageId(msg)); break; case DISCONNECT: LOG.info("{} DISCONNECT <{}>", direction, clientID); break; case PUBACK: LOG.info("{} PUBACK <{}> packetID <{}>", direction, clientID, messageId(msg)); break; default: LOG.error("Unkonwn MessageType:{}", messageType); break; } } @Override public void channelInactive(ChannelHandlerContext ctx) throws Exception { String clientID = NettyUtils.clientID(ctx.channel()); if (clientID != null && !clientID.isEmpty()) { LOG.info("Channel closed <{}>", clientID); } ctx.fireChannelInactive(); } @Override public void write(ChannelHandlerContext ctx, Object msg, ChannelPromise promise) throws Exception { logMQTTMessage(ctx, msg, "C<-B"); ctx.write(msg, promise); } }
broker/MQTTMessageLogger: missing messages added
broker/src/main/java/io/moquette/server/netty/metrics/MQTTMessageLogger.java
broker/MQTTMessageLogger: missing messages added
<ide><path>roker/src/main/java/io/moquette/server/netty/metrics/MQTTMessageLogger.java <ide> case PUBACK: <ide> LOG.info("{} PUBACK <{}> packetID <{}>", direction, clientID, messageId(msg)); <ide> break; <add> <add> case CONNACK: <add> case SUBACK: <add> case PINGREQ: <add> case PINGRESP: <add> case UNSUBACK: <add> LOG.debug("{} {} <{}> packetID <{}>", direction, messageType, clientID, messageId(msg)); <add> break; <add> <ide> default: <ide> LOG.error("Unkonwn MessageType:{}", messageType); <ide> break;
Java
mit
be926df2ec5ff81693251629748ed38655075393
0
abdul-khalid/LazyList,chathudan/LazyList,xiaojiu01/LazyList,pretizy/LazyList,jiachenning/LazyList,vaibhav712/imagelist,thest1/LazyList
package com.fedorvlasov.lazylist; import android.app.Activity; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.ListView; public class MainActivity extends Activity { ListView list; LazyAdapter adapter; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); list=(ListView)findViewById(R.id.list); adapter=new LazyAdapter(this, mStrings); list.setAdapter(adapter); Button b=(Button)findViewById(R.id.button1); b.setOnClickListener(listener); } @Override public void onDestroy() { list.setAdapter(null); super.onDestroy(); } public OnClickListener listener=new OnClickListener(){ @Override public void onClick(View arg0) { adapter.imageLoader.clearCache(); adapter.notifyDataSetChanged(); } }; private String[] mStrings={ "https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png", "https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png","https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png","https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png","https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png" }; }
src/com/fedorvlasov/lazylist/MainActivity.java
package com.fedorvlasov.lazylist; import android.app.Activity; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.ListView; public class MainActivity extends Activity { ListView list; LazyAdapter adapter; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); list=(ListView)findViewById(R.id.list); adapter=new LazyAdapter(this, mStrings); list.setAdapter(adapter); Button b=(Button)findViewById(R.id.button1); b.setOnClickListener(listener); } @Override public void onDestroy() { list.setAdapter(null); super.onDestroy(); } public OnClickListener listener=new OnClickListener(){ @Override public void onClick(View arg0) { adapter.imageLoader.clearCache(); adapter.notifyDataSetChanged(); } }; private String[] mStrings={ "http://a3.twimg.com/profile_images/670625317/aam-logo-v3-twitter.png", "http://a3.twimg.com/profile_images/740897825/AndroidCast-350_normal.png", "http://a3.twimg.com/profile_images/121630227/Droid_normal.jpg", "http://a1.twimg.com/profile_images/957149154/twitterhalf_normal.jpg", "http://a1.twimg.com/profile_images/97470808/icon_normal.png", "http://a3.twimg.com/profile_images/511790713/AG.png", "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", "http://a1.twimg.com/profile_images/655119538/andbook.png", "http://a3.twimg.com/profile_images/768060227/ap4u_normal.jpg", "http://a1.twimg.com/profile_images/74724754/android_logo_normal.png", "http://a3.twimg.com/profile_images/681537837/SmallAvatarx150_normal.png", "http://a1.twimg.com/profile_images/63737974/2008-11-06_1637_normal.png", "http://a3.twimg.com/profile_images/548410609/icon_8_73.png", "http://a1.twimg.com/profile_images/612232882/nexusoneavatar_normal.jpg", "http://a1.twimg.com/profile_images/213722080/Bugdroid-phone_normal.png", "http://a1.twimg.com/profile_images/645523828/OT_icon_090918_android_normal.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet.png", "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", "http://a1.twimg.com/profile_images/655119538/andbook_normal.png", "http://a3.twimg.com/profile_images/511790713/AG_normal.png", "http://a3.twimg.com/profile_images/956404323/androinica-avatar.png", "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon_normal.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300.jpg", "http://a1.twimg.com/profile_images/655119538/andbook_normal.png", "http://a3.twimg.com/profile_images/511790713/AG_normal.png", "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", "http://a3.twimg.com/profile_images/121630227/Droid.jpg", "http://a1.twimg.com/profile_images/957149154/twitterhalf_normal.jpg", "http://a1.twimg.com/profile_images/97470808/icon_normal.png", "http://a3.twimg.com/profile_images/511790713/AG_normal.png", "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man.png", "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon_normal.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet.png", "http://a3.twimg.com/profile_images/670625317/aam-logo-v3-twitter_normal.png", "http://a3.twimg.com/profile_images/740897825/AndroidCast-350_normal.png", "http://a3.twimg.com/profile_images/121630227/Droid_normal.jpg", "http://a1.twimg.com/profile_images/957149154/twitterhalf_normal.jpg", "http://a1.twimg.com/profile_images/97470808/icon.png", "http://a3.twimg.com/profile_images/511790713/AG_normal.png", "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", "http://a1.twimg.com/profile_images/655119538/andbook_normal.png", "http://a3.twimg.com/profile_images/768060227/ap4u_normal.jpg", "http://a1.twimg.com/profile_images/74724754/android_logo.png", "http://a3.twimg.com/profile_images/681537837/SmallAvatarx150_normal.png", "http://a1.twimg.com/profile_images/63737974/2008-11-06_1637_normal.png", "http://a3.twimg.com/profile_images/548410609/icon_8_73_normal.png", "http://a1.twimg.com/profile_images/612232882/nexusoneavatar_normal.jpg", "http://a1.twimg.com/profile_images/213722080/Bugdroid-phone_normal.png", "http://a1.twimg.com/profile_images/645523828/OT_icon_090918_android.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", "http://a1.twimg.com/profile_images/655119538/andbook.png", "http://a3.twimg.com/profile_images/511790713/AG_normal.png", "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", "http://a1.twimg.com/profile_images/655119538/andbook_normal.png", "http://a3.twimg.com/profile_images/511790713/AG_normal.png", "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", "http://a3.twimg.com/profile_images/121630227/Droid_normal.jpg", "http://a1.twimg.com/profile_images/957149154/twitterhalf.jpg", "http://a1.twimg.com/profile_images/97470808/icon_normal.png", "http://a3.twimg.com/profile_images/511790713/AG_normal.png", "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", "http://a1.twimg.com/profile_images/349012784/android_logo_small.jpg", "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon_normal.png", "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png" }; }
Sample images urls fixes
src/com/fedorvlasov/lazylist/MainActivity.java
Sample images urls fixes
<ide><path>rc/com/fedorvlasov/lazylist/MainActivity.java <ide> }; <ide> <ide> private String[] mStrings={ <del> "http://a3.twimg.com/profile_images/670625317/aam-logo-v3-twitter.png", <del> "http://a3.twimg.com/profile_images/740897825/AndroidCast-350_normal.png", <del> "http://a3.twimg.com/profile_images/121630227/Droid_normal.jpg", <del> "http://a1.twimg.com/profile_images/957149154/twitterhalf_normal.jpg", <del> "http://a1.twimg.com/profile_images/97470808/icon_normal.png", <del> "http://a3.twimg.com/profile_images/511790713/AG.png", <del> "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", <del> "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", <del> "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", <del> "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", <del> "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", <del> "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", <del> "http://a1.twimg.com/profile_images/655119538/andbook.png", <del> "http://a3.twimg.com/profile_images/768060227/ap4u_normal.jpg", <del> "http://a1.twimg.com/profile_images/74724754/android_logo_normal.png", <del> "http://a3.twimg.com/profile_images/681537837/SmallAvatarx150_normal.png", <del> "http://a1.twimg.com/profile_images/63737974/2008-11-06_1637_normal.png", <del> "http://a3.twimg.com/profile_images/548410609/icon_8_73.png", <del> "http://a1.twimg.com/profile_images/612232882/nexusoneavatar_normal.jpg", <del> "http://a1.twimg.com/profile_images/213722080/Bugdroid-phone_normal.png", <del> "http://a1.twimg.com/profile_images/645523828/OT_icon_090918_android_normal.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet.png", <del> "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", <del> "http://a1.twimg.com/profile_images/655119538/andbook_normal.png", <del> "http://a3.twimg.com/profile_images/511790713/AG_normal.png", <del> "http://a3.twimg.com/profile_images/956404323/androinica-avatar.png", <del> "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", <del> "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", <del> "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", <del> "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon_normal.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", <del> "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300.jpg", <del> "http://a1.twimg.com/profile_images/655119538/andbook_normal.png", <del> "http://a3.twimg.com/profile_images/511790713/AG_normal.png", <del> "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", <del> "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", <del> "http://a3.twimg.com/profile_images/121630227/Droid.jpg", <del> "http://a1.twimg.com/profile_images/957149154/twitterhalf_normal.jpg", <del> "http://a1.twimg.com/profile_images/97470808/icon_normal.png", <del> "http://a3.twimg.com/profile_images/511790713/AG_normal.png", <del> "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", <del> "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man.png", <del> "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", <del> "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", <del> "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon_normal.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet.png", <del> "http://a3.twimg.com/profile_images/670625317/aam-logo-v3-twitter_normal.png", <del> "http://a3.twimg.com/profile_images/740897825/AndroidCast-350_normal.png", <del> "http://a3.twimg.com/profile_images/121630227/Droid_normal.jpg", <del> "http://a1.twimg.com/profile_images/957149154/twitterhalf_normal.jpg", <del> "http://a1.twimg.com/profile_images/97470808/icon.png", <del> "http://a3.twimg.com/profile_images/511790713/AG_normal.png", <del> "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", <del> "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", <del> "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", <del> "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", <del> "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", <del> "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", <del> "http://a1.twimg.com/profile_images/655119538/andbook_normal.png", <del> "http://a3.twimg.com/profile_images/768060227/ap4u_normal.jpg", <del> "http://a1.twimg.com/profile_images/74724754/android_logo.png", <del> "http://a3.twimg.com/profile_images/681537837/SmallAvatarx150_normal.png", <del> "http://a1.twimg.com/profile_images/63737974/2008-11-06_1637_normal.png", <del> "http://a3.twimg.com/profile_images/548410609/icon_8_73_normal.png", <del> "http://a1.twimg.com/profile_images/612232882/nexusoneavatar_normal.jpg", <del> "http://a1.twimg.com/profile_images/213722080/Bugdroid-phone_normal.png", <del> "http://a1.twimg.com/profile_images/645523828/OT_icon_090918_android.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", <del> "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", <del> "http://a1.twimg.com/profile_images/655119538/andbook.png", <del> "http://a3.twimg.com/profile_images/511790713/AG_normal.png", <del> "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", <del> "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", <del> "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", <del> "http://a1.twimg.com/profile_images/349012784/android_logo_small_normal.jpg", <del> "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png", <del> "http://a1.twimg.com/profile_images/850960042/elandroidelibre-logo_300x300_normal.jpg", <del> "http://a1.twimg.com/profile_images/655119538/andbook_normal.png", <del> "http://a3.twimg.com/profile_images/511790713/AG_normal.png", <del> "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", <del> "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", <del> "http://a3.twimg.com/profile_images/121630227/Droid_normal.jpg", <del> "http://a1.twimg.com/profile_images/957149154/twitterhalf.jpg", <del> "http://a1.twimg.com/profile_images/97470808/icon_normal.png", <del> "http://a3.twimg.com/profile_images/511790713/AG_normal.png", <del> "http://a3.twimg.com/profile_images/956404323/androinica-avatar_normal.png", <del> "http://a1.twimg.com/profile_images/909231146/Android_Biz_Man_normal.png", <del> "http://a3.twimg.com/profile_images/72774055/AndroidHomme-LOGO_normal.jpg", <del> "http://a1.twimg.com/profile_images/349012784/android_logo_small.jpg", <del> "http://a1.twimg.com/profile_images/841338368/ea-twitter-icon_normal.png", <del> "http://a3.twimg.com/profile_images/64827025/android-wallpaper6_2560x160_normal.png", <del> "http://a3.twimg.com/profile_images/77641093/AndroidPlanet_normal.png" <add> "https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", <add> "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", <add> "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", <add> "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", <add> "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", <add> "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", <add> "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", <add> "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png", <add> "https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", <add> "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", <add> "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", <add> "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", <add> "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", <add> "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", <add> "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", <add> "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png","https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", <add> "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", <add> "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", <add> "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", <add> "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", <add> "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", <add> "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", <add> "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png","https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", <add> "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", <add> "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", <add> "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", <add> "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", <add> "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", <add> "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", <add> "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png","https://pbs.twimg.com/profile_images/3092003750/9b72a46e957a52740c667f4c64fa5d10_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2508170683/m8jf0po4imu8t5eemjdd_normal.png", <add> "https://pbs.twimg.com/profile_images/1701796334/TA-New-Logo_normal.jpg", <add> "https://pbs.twimg.com/profile_images/913338263/AndroidPolice_logo_normal.png", <add> "https://pbs.twimg.com/profile_images/1417650153/android-hug_normal.png", <add> "https://pbs.twimg.com/profile_images/1517737798/aam-twitter-right-final_normal.png", <add> "https://pbs.twimg.com/profile_images/3319660679/70e7025a05b674852b9f3cea0998259c_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/2100693240/58534_150210305010136_148613708503129_315282_6481640_n_normal.jpg", <add> "https://pbs.twimg.com/profile_images/1306095935/androidcoo_normal.png", <add> "https://pbs.twimg.com/profile_images/2938108229/399ba333772228bfbb40134018fbe777_normal.jpeg", <add> "https://pbs.twimg.com/profile_images/487047133392949248/sVTI9rGI_normal.png" <ide> }; <ide> }
Java
apache-2.0
779d7088b16ef2f4d05b5a94ebe511967a875ef5
0
strongbox/strongbox-authentication-example
src/main/java/org/carlspring/strongbox/authentication/impl/example/EmptyAuthentication.java
package org.carlspring.strongbox.authentication.impl.example; import java.util.Collection; import org.springframework.security.core.Authentication; import org.springframework.security.core.GrantedAuthority; /** * @author Przemyslaw Fusik */ public class EmptyAuthentication implements Authentication { public Collection<? extends GrantedAuthority> getAuthorities() { return null; } public Object getCredentials() { return new Object(); } public Object getDetails() { return new Object(); } public Object getPrincipal() { return new Object(); } public boolean isAuthenticated() { return true; } public void setAuthenticated(boolean isAuthenticated) throws IllegalArgumentException { } public String getName() { return EmptyAuthentication.class.getSimpleName(); } }
SB-625 unneccesary file removal
src/main/java/org/carlspring/strongbox/authentication/impl/example/EmptyAuthentication.java
SB-625 unneccesary file removal
<ide><path>rc/main/java/org/carlspring/strongbox/authentication/impl/example/EmptyAuthentication.java <del>package org.carlspring.strongbox.authentication.impl.example; <del> <del>import java.util.Collection; <del> <del>import org.springframework.security.core.Authentication; <del>import org.springframework.security.core.GrantedAuthority; <del> <del>/** <del> * @author Przemyslaw Fusik <del> */ <del>public class EmptyAuthentication <del> implements Authentication <del>{ <del> <del> public Collection<? extends GrantedAuthority> getAuthorities() <del> { <del> return null; <del> } <del> <del> public Object getCredentials() <del> { <del> return new Object(); <del> } <del> <del> public Object getDetails() <del> { <del> return new Object(); <del> } <del> <del> public Object getPrincipal() <del> { <del> return new Object(); <del> } <del> <del> public boolean isAuthenticated() <del> { <del> return true; <del> } <del> <del> public void setAuthenticated(boolean isAuthenticated) <del> throws IllegalArgumentException <del> { <del> <del> } <del> <del> public String getName() <del> { <del> return EmptyAuthentication.class.getSimpleName(); <del> } <del>}
JavaScript
agpl-3.0
6ffeef1fda804ad7d7dd91d2ef43615d5bf18f7f
0
wearespindle/flindt,wearespindle/flindt,wearespindle/flindt
import propTypes from 'prop-types'; import React, { Component } from 'react'; import Time from 'react-time'; import { reduxForm, Field } from 'redux-form'; import { Link } from 'react-router-dom'; import { connect } from 'react-redux'; import Notifications from 'react-notification-system-redux'; import history from '../utils/history'; import RoleModalButton from '../components/RoleModalButton'; import SkipFeedbackModalButton from '../components/SkipFeedbackModalButton'; import Header from '../components/header'; import { cleanFeedback, fetchFeedback, editFeedback } from '../actions/feedback'; // renderField component for reduxForms. const renderTextArea = ({ input, meta: { touched, error } }) => ( <div> <textarea {...input} required /> {touched && error && <span className="label--alert">{error}</span>} </div> ); renderTextArea.propTypes = { input: propTypes.object, meta: propTypes.object }; // Assign this class to a variable to 'connect' both reduxForm and redux without // ESLint throwing a `no-class-assign`-error. let GiveRoleFeedbackClass = class GiveRoleFeedback extends Component { constructor(props) { super(props); this._handleSubmit = this._handleSubmit.bind(this); this.state = { id: this.props.match.params.feedbackId }; } componentWillMount() { let accessToken = this.props.user.user.access_token; this.props.fetchFeedback(accessToken, this.props.match.params.feedbackId); } componentWillUnmount() { this.props.cleanFeedback(); } _handleSubmit(values, dispatch, props) { const { id } = this.state; let ratings = this.props.feedback.feedback.round.available_ratings; let accessToken = this.props.user.user.access_token; let remarks = []; // Loop through ratings and set the content for the values. ratings.map((rating, index) => { remarks.push({ rating: { rating_id: rating.id, name: rating.name, description: rating.description }, content: values[rating.name] }); return null; }); this.props .editFeedback( { id, status: 1, role: { remarks } }, accessToken ) .then(response => { if (response.payload.status !== 200) { this.props.dispatch( Notifications.error({ title: 'Error!', message: 'Something went wrong while saving the data!', position: 'tr', autoDismiss: 4 }) ); } else { this.props.dispatch( Notifications.success({ title: 'Sweet success!', message: 'Feedback succesfully saved! Thanks!', position: 'tr', autoDismiss: 4 }) ); // Send the user back to his feedback overview after a succesful action. history.push('/give-feedback/'); } }); } render() { const { feedback } = this.props.feedback; if (!Object.keys(feedback).length) { return ( <div className="content--wrapper"> <div className="content--header"> <Header /> <div className="content--header-breadcrumbs"> <ul> <li>Give feedback</li> <li>Feedback on role</li> </ul> </div> </div> <div className="content"> <h2>Feedback on role</h2> <div className="feedback-form--wrapper"> <div className="spinner"> <div className="bounce1" /> <div className="bounce2" /> <div className="bounce3" /> </div> </div> </div> </div> ); } let person = feedback.recipient; const sender = feedback.sender; const { handleSubmit } = this.props; const ratings = feedback.round.available_ratings; const role = feedback.role.role; const accessToken = this.props.user.user.access_token; const requested = feedback.role.requested; return ( <div className="content--wrapper"> <div className="content--header"> <Header /> <div className="content--header-breadcrumbs"> <ul> <li>Give feedback</li> <li>Feedback on role</li> </ul> </div> </div> <div className="content"> <h2>Feedback on role</h2> <div className="feedback-form--wrapper"> <table className="feedback-form--meta"> <thead> <tr> <th>Person</th> <th>Role</th> <th>Circle</th> <th>Requested on</th> {requested && <th>Requested</th>} </tr> </thead> <tbody> <tr> <td data-label="Person"> {person.first_name} {person.last_name} </td> <td data-label="Role"> <RoleModalButton accessToken={accessToken} role={role.id}> {role.name} </RoleModalButton> </td> <td data-label="Circle"> {role.parent && ( <RoleModalButton accessToken={accessToken} role={role.parent.id} > {role.parent.name} </RoleModalButton> )} </td> <td data-label="Received on"> <Time value={feedback.date} locale="EN" format="D MMMM YYYY" /> </td> {requested && ( <td>Feedback requested by {person.first_name}</td> )} </tr> </tbody> </table> <div className="feedback-form--row padding-bottom-0"> <div className="feedback-form--form"> <form onSubmit={handleSubmit(this._handleSubmit)}> {ratings.map(rating => { return ( <div key={rating.id} className="feedback-form--row"> {rating.image && ( <div className="l-5 feedback-form--row-smiley"> <img alt="Rating" src={rating.image} /> </div> )} <div className={rating.image ? 'l-43' : ''}> <label htmlFor={rating.name}> <strong>{rating.description}</strong> <span className="is-required">*</span> </label> <Field name={rating.name} component={renderTextArea} /> </div> </div> ); })} <Link to="/give-feedback" className="action--button neutral"> <i className="fa fa-chevron-left" /> Back to overview </Link> <button className="action--button is-right" type="submit"> Save </button> <SkipFeedbackModalButton /> </form> </div> </div> </div> </div> </div> ); } }; // reduxForm validate function. function validate(values) { const errors = {}; // // if (!values.improvementFeedback) { // errors.improvementFeedback = 'Please fill in some improvements'; // } // // if (!values.positiveFeedback) { // errors.positiveFeedback = 'Please try to fill in any positive notes for this role'; // } return errors; } // Redux functions to map state and dispatch to props. const mapStateToProps = state => ({ feedback: state.Feedback.feedback, user: state.User.data }); GiveRoleFeedbackClass.propTypes = { cleanFeedback: propTypes.func, editFeedback: propTypes.func, dispatch: propTypes.func, feedback: propTypes.object, fetchFeedback: propTypes.func, handleSubmit: propTypes.func, params: propTypes.object, user: propTypes.object }; GiveRoleFeedbackClass.contextTypes = { router: propTypes.object }; // Connect reduxForm to our class. GiveRoleFeedbackClass = reduxForm({ form: 'GivePersonalFeedbackForm', validate })(GiveRoleFeedbackClass); export default connect(mapStateToProps, { fetchFeedback, editFeedback, cleanFeedback })(GiveRoleFeedbackClass);
frontend/src/pages/GiveRoleFeedback.js
import propTypes from 'prop-types'; import React, { Component } from 'react'; import Time from 'react-time'; import { reduxForm, Field } from 'redux-form'; import { Link } from 'react-router-dom'; import { connect } from 'react-redux'; import Notifications from 'react-notification-system-redux'; import history from '../utils/history'; import RoleModalButton from '../components/RoleModalButton'; import SkipFeedbackModalButton from '../components/SkipFeedbackModalButton'; import Header from '../components/header'; import { cleanFeedback, fetchFeedback, editFeedback } from '../actions/feedback'; // renderField component for reduxForms. const renderTextArea = ({ input, meta: { touched, error } }) => ( <div> <textarea {...input} required /> {touched && error && <span className="label--alert">{error}</span>} </div> ); renderTextArea.propTypes = { input: propTypes.object, meta: propTypes.object }; // Assign this class to a variable to 'connect' both reduxForm and redux without // ESLint throwing a `no-class-assign`-error. let GiveRoleFeedbackClass = class GiveRoleFeedback extends Component { constructor(props) { super(props); this._handleSubmit = this._handleSubmit.bind(this); this.state = { id: this.props.match.params.feedbackId }; } componentWillMount() { let accessToken = this.props.user.user.access_token; this.props.fetchFeedback(accessToken, this.props.match.params.feedbackId); } componentWillUnmount() { this.props.cleanFeedback(); } _handleSubmit(values, dispatch, props) { const { id } = this.state; let ratings = this.props.feedback.feedback.round.available_ratings; let accessToken = this.props.user.user.access_token; let remarks = []; // Loop through ratings and set the content for the values. ratings.map((rating, index) => { remarks.push({ rating: { rating_id: rating.id, name: rating.name, description: rating.description }, content: values[rating.name] }); return null; }); this.props .editFeedback( { id, status: 1, role: { remarks } }, accessToken ) .then(response => { if (response.payload.status !== 200) { this.props.dispatch( Notifications.error({ title: 'Error!', message: 'Something went wrong while saving the data!', position: 'tr', autoDismiss: 4 }) ); } else { this.props.dispatch( Notifications.success({ title: 'Sweet success!', message: 'Feedback succesfully saved! Thanks!', position: 'tr', autoDismiss: 4 }) ); // Send the user back to his feedback overview after a succesful action. history.push('/give-feedback/'); } }); } render() { const { feedback } = this.props.feedback; if (!Object.keys(feedback).length) { return ( <div className="content--wrapper"> <div className="content--header"> <Header /> <div className="content--header-breadcrumbs"> <ul> <li>Give feedback</li> <li>Feedback on role</li> </ul> </div> </div> <div className="content"> <h2>Feedback on role</h2> <div className="feedback-form--wrapper"> <div className="spinner"> <div className="bounce1" /> <div className="bounce2" /> <div className="bounce3" /> </div> </div> </div> </div> ); } let person = feedback.recipient; const sender = feedback.sender; const { handleSubmit } = this.props; const ratings = feedback.round.available_ratings; const role = feedback.role.role; const accessToken = this.props.user.user.access_token; const requested = feedback.role.requested; return ( <div className="content--wrapper"> <div className="content--header"> <Header /> <div className="content--header-breadcrumbs"> <ul> <li>Give feedback</li> <li>Feedback on role</li> </ul> </div> </div> <div className="content"> <h2>Feedback on role</h2> <div className="feedback-form--wrapper"> <table className="feedback-form--meta"> <thead> <tr> <th>Person</th> <th>Role</th> <th>Circle</th> <th>Requested on</th> {requested && <th>Requested</th>} </tr> </thead> <tbody> <tr> <td data-label="Person"> {person.first_name} {person.last_name} </td> <td data-label="Role"> <RoleModalButton accessToken={accessToken} role={role.id}> {role.name} </RoleModalButton> </td> <td data-label="Circle"> {role.parent && ( <RoleModalButton accessToken={accessToken} role={role.parent.id} > {role.parent.name} </RoleModalButton> )} </td> <td data-label="Received on"> <Time value={feedback.date} locale="EN" format="D MMMM YYYY" /> </td> {requested && ( <td>Feedback requested by {sender.first_name}</td> )} </tr> </tbody> </table> <div className="feedback-form--row padding-bottom-0"> <div className="feedback-form--form"> <form onSubmit={handleSubmit(this._handleSubmit)}> {ratings.map(rating => { return ( <div key={rating.id} className="feedback-form--row"> {rating.image && ( <div className="l-5 feedback-form--row-smiley"> <img alt="Rating" src={rating.image} /> </div> )} <div className={rating.image ? 'l-43' : ''}> <label htmlFor={rating.name}> <strong>{rating.description}</strong> <span className="is-required">*</span> </label> <Field name={rating.name} component={renderTextArea} /> </div> </div> ); })} <Link to="/give-feedback" className="action--button neutral"> <i className="fa fa-chevron-left" /> Back to overview </Link> <button className="action--button is-right" type="submit"> Save </button> <SkipFeedbackModalButton /> </form> </div> </div> </div> </div> </div> ); } }; // reduxForm validate function. function validate(values) { const errors = {}; // // if (!values.improvementFeedback) { // errors.improvementFeedback = 'Please fill in some improvements'; // } // // if (!values.positiveFeedback) { // errors.positiveFeedback = 'Please try to fill in any positive notes for this role'; // } return errors; } // Redux functions to map state and dispatch to props. const mapStateToProps = state => ({ feedback: state.Feedback.feedback, user: state.User.data }); GiveRoleFeedbackClass.propTypes = { cleanFeedback: propTypes.func, editFeedback: propTypes.func, dispatch: propTypes.func, feedback: propTypes.object, fetchFeedback: propTypes.func, handleSubmit: propTypes.func, params: propTypes.object, user: propTypes.object }; GiveRoleFeedbackClass.contextTypes = { router: propTypes.object }; // Connect reduxForm to our class. GiveRoleFeedbackClass = reduxForm({ form: 'GivePersonalFeedbackForm', validate })(GiveRoleFeedbackClass); export default connect(mapStateToProps, { fetchFeedback, editFeedback, cleanFeedback })(GiveRoleFeedbackClass);
Fix name of requested feedback recipient
frontend/src/pages/GiveRoleFeedback.js
Fix name of requested feedback recipient
<ide><path>rontend/src/pages/GiveRoleFeedback.js <ide> /> <ide> </td> <ide> {requested && ( <del> <td>Feedback requested by {sender.first_name}</td> <add> <td>Feedback requested by {person.first_name}</td> <ide> )} <ide> </tr> <ide> </tbody>
Java
mit
95134261bb237824e33732620c07433e92f40bd4
0
bcvsolutions/CzechIdMng,bcvsolutions/CzechIdMng,bcvsolutions/CzechIdMng,bcvsolutions/CzechIdMng
package eu.bcvsolutions.idm.acc.entity; import java.util.UUID; import javax.persistence.Column; import javax.persistence.ConstraintMode; import javax.persistence.Embedded; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.ForeignKey; import javax.persistence.Index; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; import javax.validation.constraints.NotNull; import org.joda.time.DateTime; import eu.bcvsolutions.idm.acc.domain.ProvisioningContext; import eu.bcvsolutions.idm.acc.domain.ProvisioningEventType; import eu.bcvsolutions.idm.acc.domain.SystemEntityType; import eu.bcvsolutions.idm.core.api.entity.AbstractEntity; import eu.bcvsolutions.idm.core.api.entity.OperationResult; /** * Persisted "active" provisioning operation. Any operation has batch and operation result. * * @author Radek Tomiška * */ @Entity @Table(name = "sys_provisioning_operation", indexes = { @Index(name = "idx_sys_p_o_created", columnList = "created"), @Index(name = "idx_sys_p_o_operation_type", columnList = "operation_type"), @Index(name = "idx_sys_p_o_system", columnList = "system_id"), @Index(name = "idx_sys_p_o_entity_type", columnList = "entity_type"), @Index(name = "idx_sys_p_o_sys_entity", columnList = "system_entity_id"), @Index(name = "idx_sys_p_o_entity_identifier", columnList = "entity_identifier"), @Index(name = "idx_sys_pro_oper_batch_id", columnList = "provisioning_batch_id") }) public class SysProvisioningOperation extends AbstractEntity { private static final long serialVersionUID = -6191740329296942394L; @NotNull @Enumerated(EnumType.STRING) @Column(name = "operation_type", nullable = false) private ProvisioningEventType operationType; @NotNull @Column(name = "provisioning_context", length = Integer.MAX_VALUE, nullable = false) private ProvisioningContext provisioningContext; @NotNull @ManyToOne(optional = false) @JoinColumn(name = "system_id", referencedColumnName = "id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT)) @SuppressWarnings("deprecation") // jpa FK constraint does not work in hibernate 4 @org.hibernate.annotations.ForeignKey( name = "none" ) private SysSystem system; @NotNull @Enumerated(EnumType.STRING) @Column(name = "entity_type", nullable = false) private SystemEntityType entityType; @NotNull @ManyToOne(optional = false) @JoinColumn(name = "system_entity_id", referencedColumnName = "id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT)) @SuppressWarnings("deprecation") // jpa FK constraint does not work in hibernate 4 @org.hibernate.annotations.ForeignKey( name = "none" ) private SysSystemEntity systemEntity; @Column(name = "entity_identifier") private UUID entityIdentifier; @Column(name = "current_attempt") private int currentAttempt = 0; @Column(name = "max_attempts") private int maxAttempts; @Embedded private OperationResult result; @ManyToOne @JoinColumn(name = "provisioning_batch_id", referencedColumnName = "id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT)) @SuppressWarnings("deprecation") // jpa FK constraint does not work in hibernate 4 @org.hibernate.annotations.ForeignKey( name = "none" ) private SysProvisioningBatch batch; public ProvisioningEventType getOperationType() { return operationType; } public void setOperationType(ProvisioningEventType operationType) { this.operationType = operationType; } public SysSystem getSystem() { return system; } public void setSystem(SysSystem system) { this.system = system; } public SystemEntityType getEntityType() { return entityType; } public void setEntityType(SystemEntityType entityType) { this.entityType = entityType; } public SysSystemEntity getSystemEntity() { return systemEntity; } public void setSystemEntity(SysSystemEntity systemEntity) { this.systemEntity = systemEntity; } public UUID getEntityIdentifier() { return entityIdentifier; } public void setEntityIdentifier(UUID entityIdentifier) { this.entityIdentifier = entityIdentifier; } public ProvisioningContext getProvisioningContext() { return provisioningContext; } public void setProvisioningContext(ProvisioningContext provisioningContext) { this.provisioningContext = provisioningContext; } public int getCurrentAttempt() { return currentAttempt; } public void setCurrentAttempt(int attempt) { this.currentAttempt = attempt; } public void increaseAttempt() { this.currentAttempt++; } public int getMaxAttempts() { return maxAttempts; } public void setMaxAttempts(int maxAttempts) { this.maxAttempts = maxAttempts; } public SysProvisioningBatch getBatch() { return batch; } public void setBatch(SysProvisioningBatch batch) { this.batch = batch; } public OperationResult getResult() { return result; } public void setResult(OperationResult result) { this.result = result; } public String getSystemEntityUid() { if(this.systemEntity != null) { return this.systemEntity.getUid(); } return null; } public DateTime getNextAttempt() { if (batch == null) { return null; } return batch.getNextAttempt(); } }
Realization/backend/acc/src/main/java/eu/bcvsolutions/idm/acc/entity/SysProvisioningOperation.java
package eu.bcvsolutions.idm.acc.entity; import java.util.UUID; import javax.persistence.Column; import javax.persistence.ConstraintMode; import javax.persistence.Embedded; import javax.persistence.Entity; import javax.persistence.EnumType; import javax.persistence.Enumerated; import javax.persistence.ForeignKey; import javax.persistence.Index; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.Table; import javax.validation.constraints.NotNull; import org.joda.time.DateTime; import eu.bcvsolutions.idm.acc.domain.ProvisioningContext; import eu.bcvsolutions.idm.acc.domain.ProvisioningEventType; import eu.bcvsolutions.idm.acc.domain.SystemEntityType; import eu.bcvsolutions.idm.core.api.entity.AbstractEntity; import eu.bcvsolutions.idm.core.api.entity.OperationResult; /** * Persisted "active" provisioning operation. Any operation has batch and operation result. * * @author Radek Tomiška * */ @Entity @Table(name = "sys_provisioning_operation", indexes = { @Index(name = "idx_sys_p_o_created", columnList = "created"), @Index(name = "idx_sys_p_o_operation_type", columnList = "operation_type"), @Index(name = "idx_sys_p_o_system", columnList = "system_id"), @Index(name = "idx_sys_p_o_entity_type", columnList = "entity_type"), @Index(name = "idx_sys_p_o_sys_entity", columnList = "system_entity_id"), @Index(name = "idx_sys_p_o_entity_identifier", columnList = "entity_identifier"), @Index(name = "idx_sys_pro_oper_batch_id", columnList = "provisioning_batch_id") }) public class SysProvisioningOperation extends AbstractEntity { private static final long serialVersionUID = -6191740329296942394L; @NotNull @Enumerated(EnumType.STRING) @Column(name = "operation_type", nullable = false) private ProvisioningEventType operationType; @NotNull @Column(name = "provisioning_context", length = Integer.MAX_VALUE, nullable = false) private ProvisioningContext provisioningContext; @NotNull @ManyToOne(optional = false) @JoinColumn(name = "system_id", referencedColumnName = "id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT)) @SuppressWarnings("deprecation") // jpa FK constraint does not work in hibernate 4 @org.hibernate.annotations.ForeignKey( name = "none" ) private SysSystem system; @NotNull @Enumerated(EnumType.STRING) @Column(name = "entity_type", nullable = false) private SystemEntityType entityType; @NotNull @ManyToOne(optional = false) @JoinColumn(name = "system_entity_id", referencedColumnName = "id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT)) @SuppressWarnings("deprecation") // jpa FK constraint does not work in hibernate 4 @org.hibernate.annotations.ForeignKey( name = "none" ) private SysSystemEntity systemEntity; @Column(name = "entity_identifier") private UUID entityIdentifier; @Column(name = "current_attempt") private int currentAttempt = 0; @Column(name = "max_attempts") private int maxAttempts; @Embedded private OperationResult result; @ManyToOne(optional = false) @JoinColumn(name = "provisioning_batch_id", referencedColumnName = "id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT)) @SuppressWarnings("deprecation") // jpa FK constraint does not work in hibernate 4 @org.hibernate.annotations.ForeignKey( name = "none" ) private SysProvisioningBatch batch; public ProvisioningEventType getOperationType() { return operationType; } public void setOperationType(ProvisioningEventType operationType) { this.operationType = operationType; } public SysSystem getSystem() { return system; } public void setSystem(SysSystem system) { this.system = system; } public SystemEntityType getEntityType() { return entityType; } public void setEntityType(SystemEntityType entityType) { this.entityType = entityType; } public SysSystemEntity getSystemEntity() { return systemEntity; } public void setSystemEntity(SysSystemEntity systemEntity) { this.systemEntity = systemEntity; } public UUID getEntityIdentifier() { return entityIdentifier; } public void setEntityIdentifier(UUID entityIdentifier) { this.entityIdentifier = entityIdentifier; } public ProvisioningContext getProvisioningContext() { return provisioningContext; } public void setProvisioningContext(ProvisioningContext provisioningContext) { this.provisioningContext = provisioningContext; } public int getCurrentAttempt() { return currentAttempt; } public void setCurrentAttempt(int attempt) { this.currentAttempt = attempt; } public void increaseAttempt() { this.currentAttempt++; } public int getMaxAttempts() { return maxAttempts; } public void setMaxAttempts(int maxAttempts) { this.maxAttempts = maxAttempts; } public SysProvisioningBatch getBatch() { return batch; } public void setBatch(SysProvisioningBatch batch) { this.batch = batch; } public OperationResult getResult() { return result; } public void setResult(OperationResult result) { this.result = result; } public String getSystemEntityUid() { if(this.systemEntity != null) { return this.systemEntity.getUid(); } return null; } public DateTime getNextAttempt() { if (batch == null) { return null; } return batch.getNextAttempt(); } }
#1619 fix: do not apply feches in findIds method
Realization/backend/acc/src/main/java/eu/bcvsolutions/idm/acc/entity/SysProvisioningOperation.java
#1619 fix: do not apply feches in findIds method
<ide><path>ealization/backend/acc/src/main/java/eu/bcvsolutions/idm/acc/entity/SysProvisioningOperation.java <ide> @Embedded <ide> private OperationResult result; <ide> <del> @ManyToOne(optional = false) <add> @ManyToOne <ide> @JoinColumn(name = "provisioning_batch_id", referencedColumnName = "id", foreignKey = @ForeignKey(value = ConstraintMode.NO_CONSTRAINT)) <ide> @SuppressWarnings("deprecation") // jpa FK constraint does not work in hibernate 4 <ide> @org.hibernate.annotations.ForeignKey( name = "none" )
JavaScript
mit
9f2207eb1f3c1fb7a31ae816d74ec610887eb6aa
0
thinkxl/bower,bower/bower,rlugojr/bower
var os = require('os'); var path = require('path'); var paths = require('./paths'); // Guess proxy defined in the env /*jshint camelcase: false*/ var proxy = process.env.HTTP_PROXY || process.env.http_proxy || null; var httpsProxy = process.env.HTTPS_PROXY || process.env.https_proxy || proxy; /*jshint camelcase: true*/ // Use a well known user agent (in this case, curl) when using a proxy, // to avoid potential filtering on many corporate proxies with blank or unknown agents var userAgent = !proxy && !httpsProxy ? 'node/' + process.version + ' ' + process.platform + ' ' + process.arch : 'curl/7.21.4 (universal-apple-darwin11.0) libcurl/7.21.4 OpenSSL/0.9.8r zlib/1.2.5'; var defaults = { 'cwd': process.cwd(), 'directory': 'bower_components', 'registry': 'https://bower.herokuapp.com', 'shorthand-resolver': 'git://github.com/{{owner}}/{{package}}.git', 'tmp': os.tmpdir ? os.tmpdir() : os.tmpDir(), 'proxy': proxy, 'https-proxy': httpsProxy, 'timeout': 30000, 'ca': { search: [] }, 'strict-ssl': true, 'user-agent': userAgent, 'color': true, 'interactive': false, 'storage': { packages: path.join(paths.cache, 'packages'), links: path.join(paths.data, 'links'), completion: path.join(paths.data, 'completion'), registry: path.join(paths.cache, 'registry'), empty: path.join(paths.data, 'empty') // Empty dir, used in GIT_TEMPLATE_DIR among others } }; module.exports = defaults;
packages/bower-config/lib/util/defaults.js
var os = require('os'); var path = require('path'); var paths = require('./paths'); // Guess proxy defined in the env /*jshint camelcase: false*/ var proxy = process.env.HTTP_PROXY || process.env.http_proxy || null; var httpsProxy = process.env.HTTPS_PROXY || process.env.https_proxy || proxy; /*jshint camelcase: true*/ // Use a well known user agent (in this case, curl) when using a proxy, // to avoid potential filtering on many corporate proxies with blank or unknown agents var userAgent = !proxy && !httpsProxy ? 'node/' + process.version + ' ' + process.platform + ' ' + process.arch : 'curl/7.21.4 (universal-apple-darwin11.0) libcurl/7.21.4 OpenSSL/0.9.8r zlib/1.2.5'; var defaults = { 'cwd': process.cwd(), 'directory': 'bower_components', 'registry': 'https://bower.herokuapp.com', 'shorthand-resolver': 'git://github.com/{{owner}}/{{package}}.git', 'tmp': os.tmpdir ? os.tmpdir() : os.tmpDir(), 'proxy': proxy, 'https-proxy': httpsProxy, 'timeout': 30000, 'ca': { search: [] }, 'strict-ssl': true, 'user-agent': userAgent, 'color': true, 'interactive': false, 'storage': { packages: path.join(paths.cache, 'packages'), links: path.join(paths.data, 'links'), completion: path.join(paths.data, 'completion'), registry: path.join(paths.cache, 'registry'), git: path.join(paths.data, 'git') } }; module.exports = defaults;
Change git folder to empty (was not being used anyway).
packages/bower-config/lib/util/defaults.js
Change git folder to empty (was not being used anyway).
<ide><path>ackages/bower-config/lib/util/defaults.js <ide> links: path.join(paths.data, 'links'), <ide> completion: path.join(paths.data, 'completion'), <ide> registry: path.join(paths.cache, 'registry'), <del> git: path.join(paths.data, 'git') <add> empty: path.join(paths.data, 'empty') // Empty dir, used in GIT_TEMPLATE_DIR among others <ide> } <ide> }; <ide>
JavaScript
apache-2.0
ae6b4db799f7d2513a8850d25ede5c6fbe6e029b
0
opencb/jsorolla,opencb/jsorolla,opencb/jsorolla,opencb/jsorolla,opencb/jsorolla
const OPENCGA_VARIANT_BROWSER_SETTINGS = { menu: { // merge criterium: internal sections and filters are used to hydrates the external filters list for each section (which is a list of string). Sections and filter order is respected. sections: [ { title: "Study and Cohorts", filters: ["study", "cohort"] }, { title: "Genomic", collapsed: true, filters: ["region", "feature", "biotype", "type"] }, { title: "Consequence Type", collapsed: true, filters: ["consequence-type"] }, { title: "Population Frequency", collapsed: true, filters: ["populationFrequency"] }, { title: "Clinical", collapsed: true, filters: ["diseasePanels", "clinical-annotation", "fullTextSearch"] }, { title: "Phenotype", collapsed: true, filters: ["go", "hpo"] }, { title: "Deleteriousness", collapsed: true, filters: ["proteinSubstitutionScore", "cadd"] }, { title: "Conservation", collapsed: true, filters: ["conservation"] } ], // merge criterium: full outer join like. it adds objects presents in internal array only and in external array only. In case of objects with same id, the external values overwrite the internal. examples: [ { id: "Intellectual disability2", active: false, query: { disorder: "Intellectual disability" } } ] }, table: { // merge criterium: spread operator toolbar: { showColumns: true, showExport: true, showDownload: false // columns list for the dropdown will be added in grid components based on settings.table.columns }, // merge criterium: uses this array as filter for internal 1D/2D array. It handles row/col span. // It is supported either columns[] or hiddenColumns[]. columns: ["id", "gene", "type", "consequenceType", "deleteriousness", "conservation", "samples", "cohorts", "popfreq", "clinicalInfo"] // hiddenColumns: ["id", "gene", "type"] }, // merge criterium: uses this array as filter for internal 1D array. // It is supported either details[] or hiddenDetails[]. details: ["annotationSummary", "annotationConsType", "annotationPropFreq", "annotationClinical", "cohortStats", "samples", "beacon", "json-view"] // hiddenDetails: ["json-view"] };
src/sites/iva/conf/opencga-variant-browser.settings.js
const OPENCGA_VARIANT_BROWSER_SETTINGS = { menu: { // merge criterium: internal sections and filters are used to hydrates the external filters list for each section (which is a list of string). Sections and filter order is respected. sections: [ { title: "Study and Cohorts", filters: ["study", "cohort"] }, { title: "Genomic", collapsed: true, filters: ["region", "feature", "biotype", "type"] }, { title: "Consequence Type", collapsed: true, filters: ["consequence-type"] }, { title: "Population Frequency", collapsed: true, filters: ["populationFrequency"] }, { title: "Clinical", collapsed: true, filters: ["diseasePanels", "clinical-annotation", "fullTextSearch"] }, { title: "Phenotype", collapsed: true, filters: ["go", "hpo"] }, { title: "Deleteriousness", collapsed: true, filters: ["proteinSubstitutionScore", "cadd"] }, { title: "Conservation", collapsed: true, filters: ["conservation"] } ], // merge criterium: full outer join like. it adds objects presents in internal array only and in external array only. In case of objects with same id, the external values overwrite the internal. examples: [ { id: "Intellectual disability2", active: false, query: { disorder: "Intellectual disability" } } ] }, table: { // merge criterium: spread operator toolbar: { showColumns: true, showExport: true, showDownload: false // columns list for the dropdown will be added in grid components based on settings.table.columns }, // merge criterium: uses this array as filter for internal 1D/2D array. It handles row/col span. // It is supported either columns[] or hiddenColumns[]. columns: ["id", "gene", "type", "consequenceType", "deleteriousness", "conservation", "samples", "cohorts", "popfreq", "clinicalInfo"] // hiddenColumns: ["id", "gene", "type"] }, // merge criterium: uses this array as filter for internal 1D array. details: ["annotationSummary", "annotationConsType", "annotationPropFreq", "annotationClinical", "cohortStats", "samples", "beacon", "json-view"] };
HiddenDetails reminder added in opencga-variant-browser.settings.js
src/sites/iva/conf/opencga-variant-browser.settings.js
HiddenDetails reminder added in opencga-variant-browser.settings.js
<ide><path>rc/sites/iva/conf/opencga-variant-browser.settings.js <ide> // hiddenColumns: ["id", "gene", "type"] <ide> }, <ide> // merge criterium: uses this array as filter for internal 1D array. <add> // It is supported either details[] or hiddenDetails[]. <ide> details: ["annotationSummary", "annotationConsType", "annotationPropFreq", "annotationClinical", "cohortStats", "samples", "beacon", "json-view"] <add> // hiddenDetails: ["json-view"] <ide> };
Java
apache-2.0
3476a722f94bebb4252ccf42d5ec1cc6d0db8c6e
0
smadha/tika,icirellik/tika,smadha/tika,zamattiac/tika,smadha/tika,zamattiac/tika,smadha/tika,smadha/tika,icirellik/tika,zamattiac/tika,icirellik/tika,icirellik/tika,smadha/tika,icirellik/tika,icirellik/tika,zamattiac/tika,zamattiac/tika,zamattiac/tika,smadha/tika,zamattiac/tika,smadha/tika,zamattiac/tika,icirellik/tika,icirellik/tika
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tika.language; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.util.List; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import junit.textui.TestRunner; /** * JUnit based test of class {@link LanguageIdentifier}. * * @author Sami Siren * @author Jerome Charron - http://frutch.free.fr/ */ public class TestLanguageIdentifier extends TestCase { public TestLanguageIdentifier(String testName) { super(testName); } public static Test suite() { return new TestSuite(TestLanguageIdentifier.class); } public static void main(String[] args) { TestRunner.run(suite()); } String tokencontent1 = "testaddtoken"; String tokencontent2 = "anotherteststring"; int[] counts1 = { 3, 2, 2, 1, 1, 1, 1, 1 }; String[] chars1 = { "t", "d", "e", "a", "k", "n", "o", "s" }; /** * Test analyze method */ public void testAnalyze() { String tokencontent = "testmeagain"; NGramProfile p = new NGramProfile("test", 1, 1); p.analyze(new StringBuilder(tokencontent)); //test that profile size is ok, eg 9 different NGramEntries "tesmagin" assertEquals(8, p.getSorted().size()); } /** * Test addNGrams method with StringBuffer argument * */ public void testAddNGramsStringBuffer() { String tokencontent = "testmeagain"; NGramProfile p = new NGramProfile("test", 1, 1); p.add(new StringBuffer(tokencontent)); //test that profile size is ok, eg 8 different NGramEntries "tesmagin" assertEquals(8, p.getSorted().size()); } /** * test getSorted method */ public void testGetSorted() { int[] count = { 4, 3, 1 }; String[] ngram = { "a", "b", "c" }; String teststring = "AAaaBbbC"; NGramProfile p = new NGramProfile("test", 1, 1); p.analyze(new StringBuilder(teststring)); //test size of profile assertEquals(3, p.getSorted().size()); testCounts(p.getSorted(), count); testContents(p.getSorted(), ngram); } public void testGetSimilarity() { NGramProfile a = new NGramProfile("a", 1, 1); NGramProfile b = new NGramProfile("b", 1, 1); a.analyze(new StringBuilder(tokencontent1)); b.analyze(new StringBuilder(tokencontent2)); //because of rounding errors might slightly return different results assertEquals(a.getSimilarity(b), b.getSimilarity(a), 0.0000002); } public void testExactMatch() { NGramProfile a = new NGramProfile("a", 1, 1); a.analyze(new StringBuilder(tokencontent1)); assertEquals(a.getSimilarity(a), 0, 0); } public void testIO() { //Create profile and set some contents NGramProfile a = new NGramProfile("a", 1, 1); a.analyze(new StringBuilder(this.tokencontent1)); NGramProfile b = new NGramProfile("a_from_inputstream", 1, 1); //save profile ByteArrayOutputStream os = new ByteArrayOutputStream(); try { a.save(os); os.close(); } catch (Exception e) { fail(); } //load profile InputStream is = new ByteArrayInputStream(os.toByteArray()); try { b.load(is); is.close(); } catch (Exception e) { fail(); } //check it testCounts(b.getSorted(), counts1); testContents(b.getSorted(), chars1); } private void testContents(List<NGramEntry> entries, String contents[]) { int c = 0; for (NGramEntry nge : entries) { assertEquals(contents[c], nge.getSeq().toString()); c++; } } private void testCounts(List<NGramEntry> entries, int counts[]) { int c = 0; for (NGramEntry nge : entries) { // System.out.println(nge); assertEquals(counts[c], nge.getCount()); c++; } } public void testIdentify() { try { long total = 0; LanguageIdentifier idfr = new LanguageIdentifier(); BufferedReader in = new BufferedReader(new InputStreamReader( this.getClass().getResourceAsStream("test-referencial.txt"))); String line = null; while((line = in.readLine()) != null) { String[] tokens = line.split(";"); if (!tokens[0].equals("")) { long start = System.currentTimeMillis(); // Identify the whole file String lang = idfr.identify(this.getClass().getResourceAsStream(tokens[0]), "UTF-8"); total += System.currentTimeMillis() - start; assertEquals(tokens[1], lang); // Then, each line of the file... BufferedReader testFile = new BufferedReader( new InputStreamReader( this.getClass().getResourceAsStream(tokens[0]), "UTF-8")); String testLine = null; while((testLine = testFile.readLine()) != null) { testLine = testLine.trim(); if (testLine.length() > 256) { lang = idfr.identify(testLine); assertEquals(tokens[1], lang); } } testFile.close(); } } in.close(); System.out.println("Total Time=" + total); } catch(Exception e) { e.printStackTrace(); fail(e.toString()); } } }
tika-core/src/test/java/org/apache/tika/language/TestLanguageIdentifier.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tika.language; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.util.List; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import junit.textui.TestRunner; /** * JUnit based test of class {@link LanguageIdentifier}. * * @author Sami Siren * @author Jerome Charron - http://frutch.free.fr/ */ public class TestLanguageIdentifier extends TestCase { public TestLanguageIdentifier(String testName) { super(testName); } public static Test suite() { return new TestSuite(TestLanguageIdentifier.class); } public static void main(String[] args) { TestRunner.run(suite()); } String tokencontent1 = "testaddtoken"; String tokencontent2 = "anotherteststring"; int[] counts1 = { 3, 2, 2, 1, 1, 1, 1, 1 }; String[] chars1 = { "t", "d", "e", "a", "k", "n", "o", "s" }; /** * Test analyze method */ public void testAnalyze() { String tokencontent = "testmeagain"; NGramProfile p = new NGramProfile("test", 1, 1); p.analyze(new StringBuilder(tokencontent)); //test that profile size is ok, eg 9 different NGramEntries "tesmagin" assertEquals(8, p.getSorted().size()); } /** * Test addNGrams method with StringBuffer argument * */ public void testAddNGramsStringBuffer() { String tokencontent = "testmeagain"; NGramProfile p = new NGramProfile("test", 1, 1); p.add(new StringBuffer(tokencontent)); //test that profile size is ok, eg 8 different NGramEntries "tesmagin" assertEquals(8, p.getSorted().size()); } /** * test getSorted method */ public void testGetSorted() { int[] count = { 4, 3, 1 }; String[] ngram = { "a", "b", "c" }; String teststring = "AAaaBbbC"; NGramProfile p = new NGramProfile("test", 1, 1); p.analyze(new StringBuilder(teststring)); //test size of profile assertEquals(3, p.getSorted().size()); testCounts(p.getSorted(), count); testContents(p.getSorted(), ngram); } public void testGetSimilarity() { NGramProfile a = new NGramProfile("a", 1, 1); NGramProfile b = new NGramProfile("b", 1, 1); a.analyze(new StringBuilder(tokencontent1)); b.analyze(new StringBuilder(tokencontent2)); //because of rounding errors might slightly return different results assertEquals(a.getSimilarity(b), b.getSimilarity(a), 0.0000002); } public void testExactMatch() { NGramProfile a = new NGramProfile("a", 1, 1); a.analyze(new StringBuilder(tokencontent1)); assertEquals(a.getSimilarity(a), 0, 0); } public void testIO() { //Create profile and set some contents NGramProfile a = new NGramProfile("a", 1, 1); a.analyze(new StringBuilder(this.tokencontent1)); NGramProfile b = new NGramProfile("a_from_inputstream", 1, 1); //save profile ByteArrayOutputStream os = new ByteArrayOutputStream(); try { a.save(os); os.close(); } catch (Exception e) { fail(); } //load profile InputStream is = new ByteArrayInputStream(os.toByteArray()); try { b.load(is); is.close(); } catch (Exception e) { fail(); } //check it testCounts(b.getSorted(), counts1); testContents(b.getSorted(), chars1); } private void testContents(List<NGramEntry> entries, String contents[]) { int c = 0; for (NGramEntry nge : entries) { assertEquals(contents[c], nge.getSeq().toString()); c++; } } private void testCounts(List<NGramEntry> entries, int counts[]) { int c = 0; for (NGramEntry nge : entries) { System.out.println(nge); assertEquals(counts[c], nge.getCount()); c++; } } public void testIdentify() { try { long total = 0; LanguageIdentifier idfr = new LanguageIdentifier(); BufferedReader in = new BufferedReader(new InputStreamReader( this.getClass().getResourceAsStream("test-referencial.txt"))); String line = null; while((line = in.readLine()) != null) { String[] tokens = line.split(";"); if (!tokens[0].equals("")) { long start = System.currentTimeMillis(); // Identify the whole file String lang = idfr.identify(this.getClass().getResourceAsStream(tokens[0]), "UTF-8"); total += System.currentTimeMillis() - start; assertEquals(tokens[1], lang); // Then, each line of the file... BufferedReader testFile = new BufferedReader( new InputStreamReader( this.getClass().getResourceAsStream(tokens[0]), "UTF-8")); String testLine = null; while((testLine = testFile.readLine()) != null) { testLine = testLine.trim(); if (testLine.length() > 256) { lang = idfr.identify(testLine); assertEquals(tokens[1], lang); } } testFile.close(); } } in.close(); System.out.println("Total Time=" + total); } catch(Exception e) { e.printStackTrace(); fail(e.toString()); } } }
TIKA-209: Language detection is weak. Comment out a debug print. git-svn-id: fa8893aa956664a85c8d981696777ef1db2d462a@829668 13f79535-47bb-0310-9956-ffa450edef68
tika-core/src/test/java/org/apache/tika/language/TestLanguageIdentifier.java
TIKA-209: Language detection is weak.
<ide><path>ika-core/src/test/java/org/apache/tika/language/TestLanguageIdentifier.java <ide> int c = 0; <ide> <ide> for (NGramEntry nge : entries) { <del> System.out.println(nge); <add> // System.out.println(nge); <ide> assertEquals(counts[c], nge.getCount()); <ide> c++; <ide> }
Java
mit
c3a6424bfa76e3fc0622ff37bec761b4cff39705
0
Strikingwolf/OpenModLoader
package xyz.openmodloader; import org.apache.commons.lang3.SystemUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import xyz.openmodloader.event.EventBus; import xyz.openmodloader.launcher.OMLStrippableTransformer; import xyz.openmodloader.launcher.strippable.Environment; import xyz.openmodloader.modloader.ModLoader; import xyz.openmodloader.modloader.Version; public enum OpenModLoader { INSTANCE; private Version mcversion = new Version("1.10.2"); private Version version = new Version("0.0.1-develop"); private Logger logger = LogManager.getFormatterLogger("OpenModLoader"); private EventBus eventBus = new EventBus(); private ISidedHandler sidedHandler; public void minecraftConstruction(ISidedHandler sidedHandler) { this.sidedHandler = sidedHandler; getLogger().info("Loading OpenModLoader " + getVersion()); getLogger().info("Running Minecraft %s on %s using Java %s", mcversion, SystemUtils.OS_NAME, SystemUtils.JAVA_VERSION); ModLoader.registerMods(); getSidedHandler().onInitialize(); } public Version getMinecraftVersion() { return mcversion; } public Version getVersion() { return version; } public Logger getLogger() { return logger; } public EventBus getEventBus() { return eventBus; } public ISidedHandler getSidedHandler() { return sidedHandler; } public Environment getEnvironment() { return OMLStrippableTransformer.getEnvironment(); } }
src/main/java/xyz/openmodloader/OpenModLoader.java
package xyz.openmodloader; import org.apache.commons.lang3.SystemUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import xyz.openmodloader.event.EventBus; import xyz.openmodloader.launcher.OMLStrippableTransformer; import xyz.openmodloader.launcher.strippable.Environment; import xyz.openmodloader.modloader.ModLoader; import xyz.openmodloader.modloader.Version; public enum OpenModLoader { INSTANCE; private Version mcversion = new Version("1.10.0"); private Version version = new Version("0.0.1-develop"); private Logger logger = LogManager.getFormatterLogger("OpenModLoader"); private EventBus eventBus = new EventBus(); private ISidedHandler sidedHandler; public void minecraftConstruction(ISidedHandler sidedHandler) { this.sidedHandler = sidedHandler; getLogger().info("Loading OpenModLoader " + getVersion()); getLogger().info("Running Minecraft %s on %s using Java %s", mcversion, SystemUtils.OS_NAME, SystemUtils.JAVA_VERSION); ModLoader.registerMods(); getSidedHandler().onInitialize(); } public Version getMinecraftVersion() { return mcversion; } public Version getVersion() { return version; } public Logger getLogger() { return logger; } public EventBus getEventBus() { return eventBus; } public ISidedHandler getSidedHandler() { return sidedHandler; } public Environment getEnvironment() { return OMLStrippableTransformer.getEnvironment(); } }
Fix MC version
src/main/java/xyz/openmodloader/OpenModLoader.java
Fix MC version
<ide><path>rc/main/java/xyz/openmodloader/OpenModLoader.java <ide> public enum OpenModLoader { <ide> INSTANCE; <ide> <del> private Version mcversion = new Version("1.10.0"); <add> private Version mcversion = new Version("1.10.2"); <ide> private Version version = new Version("0.0.1-develop"); <ide> private Logger logger = LogManager.getFormatterLogger("OpenModLoader"); <ide> private EventBus eventBus = new EventBus();
Java
mit
590b3a32db4c2e3197ee2203e2a511eecfb94717
0
FAU-Inf2/kwikshop-server,FAU-Inf2/kwikshop-server,FAU-Inf2/kwikshop-server,FAU-Inf2/kwikshop-server
package de.fau.cs.mad.kwikshop.server.sorting; import org.junit.*; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Random; import java.util.Set; import de.fau.cs.mad.kwikshop.common.Item; import de.fau.cs.mad.kwikshop.common.ShoppingListServer; import de.fau.cs.mad.kwikshop.common.sorting.BoughtItem; import de.fau.cs.mad.kwikshop.common.sorting.SortingRequest; import static org.junit.Assert.*; public class ItemGraphTest { private final String ONE = "ONE"; private final String TWO = "TWO"; private final String THREE = "THREE"; private final String FOUR = "FOUR"; private final String CHAIN_ONE = "CHAIN_ONE"; private final String CHAIN_TWO = "CHAIN_TWO"; private ItemGraph createNewItemGraph() { return new ItemGraph(new DAODummyHelper()); } private ItemGraph createNewItemGraphWithSupermarket(String supermarketPlaceId) { ItemGraph itemGraph = new ItemGraph(new DAODummyHelper()); itemGraph.setSupermarket(supermarketPlaceId, supermarketPlaceId); return itemGraph; } @Test public void newItemGraphShouldNotHaveAnyEdges() { ItemGraph itemGraph = createNewItemGraph(); Set<Edge> edges = itemGraph.getEdges(); assertNotNull("getEdges returned null instead of an empty set", edges); assertTrue("Newly created ItemGraph already had edges", edges.isEmpty()); } @Test public void newItemGraphShouldHaveNoVerticesOrStartAndEndVertices() { ItemGraph itemGraph = createNewItemGraph(); Set<BoughtItem> vertices = itemGraph.getVertices(); assertNotNull("getVertices returned null instead of an empty set or a set containing only start and end vertices", vertices); boolean isEmpty = vertices.isEmpty(); if (!isEmpty) { assertEquals("getVertices should return either an empty set or a set containing only start and end vertices. But it is not empty and not containing 2 elements.", 2, vertices.size()); assertTrue("\"getVertices should return either an empty set or a set containing only start and end vertices. But it is not containing the start vertex", vertices.contains(itemGraph.getDaoHelper().getStartBoughtItem())); assertTrue("\"getVertices should return either an empty set or a set containing only start and end vertices. But it is not containing the end vertex", vertices.contains(itemGraph.getDaoHelper().getEndBoughtItem())); } } @Test public void newItemGraphShouldNotHaveASupermarket() { ItemGraph itemGraph = createNewItemGraph(); assertNull("Newly created ItemGraph already has supermarket set", itemGraph.getSupermarket()); } @Test public void setAndGetSupermarketTest() { ItemGraph itemGraph = createNewItemGraph(); Supermarket supermarket = itemGraph.getDaoHelper().getSupermarketByPlaceID(ONE); itemGraph.setSupermarket(supermarket.getPlaceId(), supermarket.getPlaceId()); assertEquals("The returned supermarket by getSupermarket should be the same as the supermarket that was set", supermarket.getPlaceId(), itemGraph.getSupermarket().getPlaceId()); } @Test public void setSupermarketReturnsCorrectValue() { ItemGraph itemGraph = createNewItemGraph(); Supermarket supermarket = itemGraph.getDaoHelper().getSupermarketByPlaceID(ONE); assertFalse("setSupermarket returned true although it is not a new supermarket", itemGraph.setSupermarket(supermarket.getPlaceId(), supermarket.getPlaceId())); assertFalse("setSupermarket returned true although it is not a new supermarket", itemGraph.setSupermarket(supermarket.getPlaceId(), supermarket.getPlaceId())); assertTrue("setSupermarket returned false although it is a new supermarket", itemGraph.setSupermarket("blah", "blah")); assertFalse("setSupermarket returned true although it is not a new supermarket", itemGraph.setSupermarket("blah", "blah")); } @Test public void createOrUpdateEdgeForEmptyGraphShouldReturnAEdge() { ItemGraph itemGraph = createNewItemGraph(); BoughtItem i1 = new BoughtItem("i1", ONE, ONE); BoughtItem i2 = new BoughtItem("i2", ONE, ONE); Supermarket supermarket = itemGraph.getDaoHelper().getSupermarketByPlaceID(ONE); Edge edge = itemGraph.createOrUpdateEdge(i1, i2, supermarket); assertNotNull("createOrUpdateEdge returns null", edge); } @Test public void createdEdgeShouldBeContainedInResultOfGetEdges() { ItemGraph itemGraph = createNewItemGraph(); BoughtItem i1 = new BoughtItem("i1", ONE, ONE); BoughtItem i2 = new BoughtItem("i2", ONE, ONE); Supermarket supermarket = itemGraph.getDaoHelper().getSupermarketByPlaceID(ONE); Edge edge = itemGraph.createOrUpdateEdge(i1, i2, supermarket); itemGraph.setSupermarket(ONE, ONE); itemGraph.update(); Set<Edge> edges = itemGraph.getEdges(); assertNotNull("getEdges returns null although an edge was just added", edges); assertTrue("newly added edge is not contained in the item graph", edges.contains(edge)); assertEquals("getEdges returns more than just one edge, although no other edges were added", 1, edges.size()); } @Test public void getVerticesReturnsTheItemsThatWereAddedBeforeForOneItem() { getVerticesReturnsTheItemsThatWereAddedBeforeForNItems(1); } @Test public void getVerticesReturnsTheItemsThatWereAddedBeforeForTwoItems() { getVerticesReturnsTheItemsThatWereAddedBeforeForNItems(2); } @Test public void getVerticesReturnsTheItemsThatWereAddedBeforeForFiveItems() { getVerticesReturnsTheItemsThatWereAddedBeforeForNItems(5); } private void getVerticesReturnsTheItemsThatWereAddedBeforeForNItems(int n) { ItemGraph itemGraph = createNewItemGraph(); itemGraph.setSupermarket(ONE, ONE); List<BoughtItem> items = createBoughtItems(n, ONE); itemGraph.addBoughtItems(items); Set<BoughtItem> vertices = itemGraph.getVertices(); assertNotNull("getVertices returns null although items were added", vertices); assertEquals("getVertices does not have size " + n + 2 + "although " + n + "item(s) were added (+start/end)", n + 2, vertices.size()); for (int i = 0; i < n; i++) { assertTrue("The " + i + "th item is not contained in getVertices", vertices.contains(items.get(i))); } } private List<BoughtItem> createBoughtItems(int numberOfItemsToCreate, String supermarketPlaceId) { List<BoughtItem> items = new ArrayList<>(numberOfItemsToCreate); for (int i = 0; i < numberOfItemsToCreate; i++) { BoughtItem item = new BoughtItem("i" + i, supermarketPlaceId, supermarketPlaceId); item.setId(i); items.add(item); } return items; } @Test public void childIsSetCorrectlyForAListOfTwoItems() { List<BoughtItem> items = createBoughtItems(2, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); BoughtItem i0 = items.get(0); BoughtItem i1 = items.get(1); List<BoughtItem> i0sChildren = itemGraph.getChildren(i0); assertTrue("item i1 is not recognized as i0's child", i0sChildren.contains(i1)); List<BoughtItem> i1sChildren = itemGraph.getChildren(i1); assertFalse("item i0 is recognized as child of i1 incorrectly", i1sChildren.contains(i0)); } @Test public void parentIsSetCorrectlyForAListOfTwoItems() { List<BoughtItem> items = createBoughtItems(2, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); BoughtItem i0 = items.get(0); BoughtItem i1 = items.get(1); List<BoughtItem> i1sParents = itemGraph.getParents(i1); assertTrue("item i0 is not recognized as i1's parent", i1sParents.contains(i0)); List<BoughtItem> i0sParents = itemGraph.getParents(i0); assertFalse("item i1 is recognized as parent of i0 incorrectly", i0sParents.contains(i1)); } @Test public void parentIsSetCorrectlyForAListOfThreeItems() { List<BoughtItem> items = createBoughtItems(3, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); BoughtItem i0 = items.get(0); BoughtItem i1 = items.get(1); BoughtItem i2 = items.get(2); List<BoughtItem> i1sParents = itemGraph.getParents(i1); assertTrue("item i0 is not recognized as i1's parent", i1sParents.contains(i0)); assertFalse("item i2 is recognized as parent of i1 incorretclty", i1sParents.contains(i2)); List<BoughtItem> i0sParents = itemGraph.getParents(i0); assertFalse("item i1 is recognized as parent of i0 incorrectly", i0sParents.contains(i1)); assertFalse("item i2 is recognized as parent of i0 incorrectly", i0sParents.contains(i2)); List<BoughtItem> i2sParents = itemGraph.getParents(i2); assertTrue("item i1 is not recognized as i2's parent", i2sParents.contains(i1)); assertFalse("item i0 is recoginzed as parent of i2 incorrectly", i2sParents.contains(i0)); } @Test public void getSiblingsWorksForTwoSimpleLists() { List<BoughtItem> items = createBoughtItems(3, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); BoughtItem i0, i1, i2; i0 = items.get(0); i1 = items.get(1); i2 = items.get(2); List<BoughtItem> firstPurchase, secondPurchase; firstPurchase = new ArrayList<>(2); secondPurchase = new ArrayList<>(2); firstPurchase.add(i0); firstPurchase.add(i1); secondPurchase.add(i0); secondPurchase.add(i2); itemGraph.addBoughtItems(firstPurchase); itemGraph.addBoughtItems(secondPurchase); assertTrue("i2 is not recognized as sibling for i1", itemGraph.getSiblings(i1).contains(i2)); assertTrue("i1 is not recognized as sibling for i2", itemGraph.getSiblings(i2).contains(i1)); } @Test public void getSiblingsDoesntReturnFalseSiblings() { List<BoughtItem> items = createBoughtItems(2, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); BoughtItem i0, i1; i0 = items.get(0); i1 = items.get(1); List<BoughtItem> i0sSiblings, i1sSiblings; i0sSiblings = itemGraph.getSiblings(i0); i1sSiblings = itemGraph.getSiblings(i1); assertFalse("i0 is contained in i0's siblings incorrectly", i0sSiblings.contains(i0)); assertFalse("i0 is contained in i1's siblings incorrectly", i1sSiblings.contains(i0)); assertFalse("i1 is contained in i0's siblings incorrectly", i0sSiblings.contains(i1)); assertFalse("i1 is contained in i1's siblings incorrectly", i1sSiblings.contains(i1)); } @Test public void edgeFromToExistsDoesDetectEdges() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); List<BoughtItem> items = createBoughtItems(2, ONE); itemGraph.addBoughtItems(items); assertTrue("edge not detected", itemGraph.edgeFromToExists(items.get(0), items.get(1))); } @Test public void edgeFromToExistsDoesNotDetectNonExistingEdges() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); List<BoughtItem> items = createBoughtItems(2, ONE); itemGraph.addBoughtItems(items); assertFalse("non existing edge detected", itemGraph.edgeFromToExists(items.get(1), items.get(0))); } @Test public void executeAlgorithmDoesNotCrash() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); List<BoughtItem> items = createBoughtItems(2, ONE); itemGraph.executeAlgorithm(new IndirectEdgeInsertion(), items); } @Test public void twoItemsAreSortedIdenticallyASecondTime() { nItemsAreSortedIdenticallyASecondTime(2, false); } @Test public void threeItemsAreSortedIdenticallyASecondTime() { nItemsAreSortedIdenticallyASecondTime(3, false); } @Test public void fiveItemsAreSortedIdenticallyASecondTime() { nItemsAreSortedIdenticallyASecondTime(5, false); } @Test public void tenItemsAreSortedIdenticallyASecondTime() { nItemsAreSortedIdenticallyASecondTime(10, false); } @Test public void twoItemsAreSortedIdenticallyASecondTimeWhenTheOrderIsDifferent() { nItemsAreSortedIdenticallyASecondTime(2, true); } @Test public void threeItemsAreSortedIdenticallyASecondTimeWhenTheOrderIsDifferent() { nItemsAreSortedIdenticallyASecondTime(3, true); } @Test public void fiveItemsAreSortedIdenticallyASecondTimeWhenTheOrderIsDifferent() { nItemsAreSortedIdenticallyASecondTime(5, true); } @Test public void tenItemsAreSortedIdenticallyASecondTimeWhenTheOrderIsDifferent() { nItemsAreSortedIdenticallyASecondTime(10, true); } private void nItemsAreSortedIdenticallyASecondTime(int n, boolean mixItemsBeforeSorting) { List<BoughtItem> items = createBoughtItems(n, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); Algorithm magicSort = new MagicSort(); ShoppingListServer shoppingListServer; if (mixItemsBeforeSorting) { shoppingListServer = createShoppingListServerWithNItemsMixedUp(n); /*shoppingListServer now has items with exactly the same name as the items in itemGraph but in a different order*/ } else { shoppingListServer = createShoppingListServerWithNItems(n); /*shoppingListServer now has items with exactly the same name as the items in itemGraph and in the same order*/ } SortingRequest sortingRequest = new SortingRequest(ONE, ONE); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); List<Item> sortedItems = new ArrayList<>(); for(Item item: sortedList.getItems()) { sortedItems.add(item); } /* Sort according to the order of each Item */ Collections.sort(sortedItems); for (int i = 0; i < n; i++) { assertEquals("A identical list was sorted different as before, although no different data is available. The lists first differ at element " + i, items.get(i).getName(), sortedItems.get(i).getName()); } } private ShoppingListServer createShoppingListServerWithNItems(int n) { ArrayList<Item> items = new ArrayList<Item>(); for (int i = 0; i < n; i++) { Item item = new Item(); item.setName("i" + i); item.setID(i); item.setServerId(i); items.add(item); } ShoppingListServer shoppingListServer = new ShoppingListServer(0, items); return shoppingListServer; } private ShoppingListServer createShoppingListServerWithNItemsMixedUp(int n) { List<Item> orderedItems = new ArrayList<>(n); for (int i = 0; i < n; i++) { Item item = new Item(); item.setName("i" + i); item.setID(i); item.setServerId(i); orderedItems.add(item); } Random random = new Random(n*n); // random generator with some random seed ArrayList<Item> randomItems = new ArrayList<Item>(); while (!orderedItems.isEmpty()) { int index = random.nextInt(orderedItems.size()); Item item = orderedItems.remove(index); randomItems.add(item); } ShoppingListServer shoppingListServer = new ShoppingListServer(0, randomItems); return shoppingListServer; } @Test public void cycleOfThreeItemsShouldNotOccur() { BoughtItem i1, i2, i3; i1 = new BoughtItem("i1", ONE, ONE); i2 = new BoughtItem("i2", ONE, ONE); i3 = new BoughtItem("i3", ONE, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); addItemsToItemGraphThatWouldProduceACycleOfThree(itemGraph, i1, i2, i3); /*Now items were "bought in a cycle", but it is crucial that no cycles are contained in the item graph -> if there are edges i1->i2 and i2->i3, i3->i1 must not exist; only two of these three edges may exist at one time*/ boolean i1ToI2Exists, i2ToI3Exists, i3ToI1Exists; i1ToI2Exists = itemGraph.edgeFromToExists(i1, i2); i2ToI3Exists = itemGraph.edgeFromToExists(i2, i3); i3ToI1Exists = itemGraph.edgeFromToExists(i3, i1); if (i1ToI2Exists) { if (i2ToI3Exists) { assertFalse("Cycle in item graph detected", i3ToI1Exists); } else { assertTrue("Missing edge in item Graph", i3ToI1Exists); } } else { assertTrue("Missing edge in item Graph", i2ToI3Exists); assertTrue("Missing edge in item Graph", i3ToI1Exists); } } private void addItemsToItemGraphThatWouldProduceACycleOfThree(ItemGraph itemGraph, BoughtItem i1, BoughtItem i2, BoughtItem i3) { List<BoughtItem> first, second, third; first = new ArrayList<>(2); first.add(i1); first.add(i2); second = new ArrayList<>(2); second.add(i2); second.add(i3); third = new ArrayList<>(2); third.add(i3); third.add(i1); itemGraph.addBoughtItems(first); itemGraph.addBoughtItems(second); itemGraph.addBoughtItems(third); } @Test(timeout = 5000) public void sortWillReturnSomethingEvenIfTheDataIsInsufficient() { BoughtItem i1, i2, i0; i1 = new BoughtItem("i1", ONE, ONE); i2 = new BoughtItem("i2", ONE, ONE); i0 = new BoughtItem("i0", ONE, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); addItemsToItemGraphThatWouldProduceACycleOfThree(itemGraph, i1, i2, i0); ShoppingListServer shoppingList = createShoppingListServerWithNItems(2); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); Algorithm magicSort = new MagicSort(); // create a new shopping list, because it might be overwritten in sort() ShoppingListServer sortedList = itemGraph.sort(magicSort, new ShoppingListServer(42, shoppingList.getItems()), sortingRequest); for (Item item : shoppingList.getItems()) { assertTrue("Item that was to be sorted is not contained in the sorted list", sortedList.getItems().contains(item)); } } @Test public void shoppingListServerAddItemAndGetItemsWorkTogether() { Item item = new Item(); item.setName("i1"); item.setID(1); item.setServerId(1); ShoppingListServer shoppingListServer = new ShoppingListServer(0); shoppingListServer.addItem(item); assertTrue("An item added via addItem is not contained in the list of item obtained via getItems", shoppingListServer.getItems().contains(item)); } @Test public void simpleListIsSortedCorrectlyIfIsNoConflictingDataWasAdded__ItemsWereBoughtImmedeatelyOneAfterTheOtherBefore() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); Algorithm magicSort = new MagicSort(); Item item1 = new Item(); item1.setName("i1"); item1.setID(1); item1.setServerId(1); Item item3 = new Item(); item3.setName("i3"); item3.setID(3); item3.setServerId(3); List<Item> shoppingListItems = new ArrayList<>(2); shoppingListItems.add(item3); shoppingListItems.add(item1); ShoppingListServer shoppingListServer = new ShoppingListServer(0, shoppingListItems); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The sorted list has a different size than before", 2, sortedList.size()); List<Item> items = new ArrayList<Item>(sortedList.getItems()); Collections.sort(items); int iteration = 0; for (Item item : items) { if (iteration == 0) { assertEquals("Item was not sorted correctly", item1.getName(), item.getName()); } else { assertEquals("An extra item was added while sorting", 1, iteration); assertEquals("Item was not sorted correctly", item3.getName(), item.getName()); } iteration++; } } @Test public void simpleListIsSortedCorrectlyIfIsNoConflictingDataWasAdded__ItemsWereOnlyBoughtWithAnOtherItemInBetween() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); Algorithm magicSort = new MagicSort(); Item item4 = new Item(); item4.setName("i4"); item4.setID(4); item4.setServerId(4); Item item5 = new Item(); item5.setName("i5"); item5.setID(5); item5.setServerId(5); List<Item> shoppingListItems = new ArrayList<>(2); shoppingListItems.add(item5); shoppingListItems.add(item4); ShoppingListServer shoppingListServer = new ShoppingListServer(0, shoppingListItems); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The sorted list has a different size than before", 2, sortedList.size()); Collection<Item> items = sortedList.getItems(); int iteration = 0; for (Item item : items) { if (iteration == 0) { assertEquals("Item was not sorted correctly", item5.getName(), item.getName()); } else { assertEquals("An extra item was added while sorting", 1, iteration); assertEquals("Item was not sorted correctly", item4.getName(), item.getName()); } iteration++; } } private ItemGraph createCyclicFreeDataWithSixVertices() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); addCycleFreeDataWithSixVerticesToItemGraph(itemGraph); return itemGraph; } private void addCycleFreeDataWithSixVerticesToItemGraph(ItemGraph itemGraph) { BoughtItem i0, i1, i2, i3, i4, i5; i0 = new BoughtItem("i0", ONE, ONE); i1 = new BoughtItem("i1", ONE, ONE); i2 = new BoughtItem("i2", ONE, ONE); i3 = new BoughtItem("i3", ONE, ONE); i4 = new BoughtItem("i4", ONE, ONE); i5 = new BoughtItem("i5", ONE, ONE); List<BoughtItem> first, second, third, fourth, fifth, sixth; first = new ArrayList<>(2); first.add(i0); first.add(i2); second = new ArrayList<>(3); second.add(i0); second.add(i1); second.add(i3); third = new ArrayList<>(3); third.add(i0); third.add(i1); third.add(i2); fourth = new ArrayList<>(2); fourth.add(i3); fourth.add(i4); fifth = new ArrayList<>(2); fifth.add(i5); fifth.add(i3); sixth = new ArrayList<>(4); sixth.add(i1); sixth.add(i5); sixth.add(i3); sixth.add(i4); itemGraph.addBoughtItems(first); itemGraph.addBoughtItems(second); itemGraph.addBoughtItems(third); itemGraph.addBoughtItems(fourth); itemGraph.addBoughtItems(fifth); itemGraph.addBoughtItems(sixth); } @Test public void edgeShouldFlipIfItemsAreAddedTheOtherWayRoundMoreOften() { List<BoughtItem> items = createBoughtItems(2, ONE); BoughtItem i0 = items.get(0); BoughtItem i1 = items.get(1); List<BoughtItem> itemsOrderedTheOtherWayRound = new ArrayList<>(2); itemsOrderedTheOtherWayRound.add(i1); itemsOrderedTheOtherWayRound.add(i0); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); assertTrue("The edge was not added for the first two items", itemGraph.edgeFromToExists(i0, i1)); itemGraph.addBoughtItems(itemsOrderedTheOtherWayRound); itemGraph.addBoughtItems(itemsOrderedTheOtherWayRound); assertTrue("The inverted edge has not been added after the data changed", itemGraph.edgeFromToExists(i1, i0)); assertFalse("The edge, that was added for the first two items, didn't get removed after the data changed", itemGraph.edgeFromToExists(i0, i1)); } @Test public void ifNoDataIsAvailableTheOriginalListShouldNotBeAltered() { ShoppingListServer shoppingListServer = createShoppingListServerWithNItems(5); // copy the names before sorting, because the list might be altered String[] unSortedNames = new String[5]; int j = 0; for (Item item : shoppingListServer.getItems()) { unSortedNames[j++] = item.getName(); } ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); Algorithm magicSort = new MagicSort(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); String[] sortedNames = new String[5]; int i = 0; for (Item item : sortedList.getItems()) { sortedNames[i++] = item.getName(); } assertArrayEquals("The list has been re-ordered although no data was available", unSortedNames, sortedNames); } @Test public void ifInsufficientDataIsAvailableTheOriginalShoppingListShouldNotBeAltered() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); Algorithm magicSort = new MagicSort(); Item item2 = new Item(); item2.setName("i2"); item2.setID(2); item2.setServerId(2); Item item3 = new Item(); item3.setName("i3"); item3.setID(3); item3.setServerId(3); List<Item> shoppingListItems = new ArrayList<>(2); shoppingListItems.add(item2); shoppingListItems.add(item3); ShoppingListServer shoppingListServer = new ShoppingListServer(0, shoppingListItems); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The sorted list has a different size than before", 2, sortedList.size()); Collection<Item> items = sortedList.getItems(); int iteration = 0; for (Item item : items) { if (iteration == 0) { assertEquals("Item was not sorted correctly", item2.getName(), item.getName()); } else { assertEquals("An extra item was added while sorting", 1, iteration); assertEquals("Item was not sorted correctly", item3.getName(), item.getName()); } iteration++; } /*And the same test the other way round*/ shoppingListItems = new ArrayList<>(2); shoppingListItems.add(item3); shoppingListItems.add(item2); shoppingListServer = new ShoppingListServer(0, shoppingListItems); sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The sorted list has a different size than before", 2, sortedList.size()); items = sortedList.getItems(); iteration = 0; for (Item item : items) { if (iteration == 0) { assertEquals("Item was not sorted correctly", item3.getName(), item.getName()); } else { assertEquals("An extra item was added while sorting", 1, iteration); assertEquals("Item was not sorted correctly", item2.getName(), item.getName()); } iteration++; } } @Test @Ignore public void sortingDoesNotAlterTheOriginalListButWorksOnACopy() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); int n = 6; ShoppingListServer shoppingList = createShoppingListServerWithNItems(n); ShoppingListServer sorted = itemGraph.sort(new MagicSort(), shoppingList, new SortingRequest(ONE, ONE)); assertNotSame(shoppingList, sorted); } @Test public void sortingDoesNotSortAnItemBeforeAnotherAlthoughItWasAlwaysBoughtTheOtherWayRound() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); int n = 6; ShoppingListServer shoppingList = createShoppingListServerWithNItems(n); ShoppingListServer sorted = itemGraph.sort(new MagicSort(), shoppingList, new SortingRequest(ONE, ONE)); List<Item> sortedList = new ArrayList<Item>(sorted.getItems()); Collections.sort(sortedList); ArrayList<String> orderedItemNames = new ArrayList<>(6); for (Item item : sortedList) { orderedItemNames.add(item.getName()); } /* * There are several possibilities how the items can be ordered * 0-1-2-5-3-4 OR * 0-1-5-2-3-4 OR * 0-1-5-3-2-4 OR * 0-1-5-3-4-2 */ assertEquals("i0 is not the first item, although it should be", "i0", orderedItemNames.get(0)); assertEquals("i1 is not the second item, although it should be", "i1", orderedItemNames.get(1)); if (orderedItemNames.get(2).equals("i2")) { assertEquals("i5 is not the fourth item, although it should be, as i2 was the third item", "i5", orderedItemNames.get(3)); assertEquals("i3 is not the fifth item, although it should be, as i2 was the third item", "i3", orderedItemNames.get(4)); assertEquals("i4 is not the sixth item, although it should be, as i2 was the third item", "i4", orderedItemNames.get(5)); } else { assertEquals("i5 is not the third item, although it should be, as i2 was not the third item", "i5", orderedItemNames.get(2)); if (orderedItemNames.get(3).equals("i2")) { assertEquals("i3 is not the fifth item, although it should be, as i2 was the fourth item", "i3", orderedItemNames.get(4)); assertEquals("i4 is not the sixth item, although it should be, as i2 was the fourth item", "i4", orderedItemNames.get(5)); } else { assertEquals("i3 is not the fourth item, although it should be, as i2 was not the third or fourth item", "i3", orderedItemNames.get(3)); if (orderedItemNames.get(4).equals("i2")) { assertEquals("i4 is not the sixth item, although it should be, as i2 was the fifth item", "i4", orderedItemNames.get(5)); } else { assertEquals("i4 is not the fifth item, although it should be, as i2 was not the third, fourth or fifth item", "i4", orderedItemNames.get(4)); assertEquals("i2 is not the sixth item, although it should be, as it was not the third, fourth or fifth item either", "i2", orderedItemNames.get(5)); } } } } @Test(timeout = 5000) public void addTheSameItemTwiceAndThenSortTwoItems() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); List<BoughtItem> items = createBoughtItems(3, ONE); BoughtItem i0 = items.get(0); items.add(i0); // i0 is now contained twice - this is something that can definitely happen itemGraph.addBoughtItems(items); Item i1 = new Item(); i1.setName("i1"); i1.setID(1); i1.setServerId(1); Item i2 = new Item(); i2.setName("i2"); i2.setID(2); i2.setServerId(2); List<Item> listToSort = new ArrayList<>(2); listToSort.add(i1); listToSort.add(i2); Algorithm magicSort = new MagicSort(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); ShoppingListServer shoppingListServer = new ShoppingListServer(0, listToSort); shoppingListServer = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The size of the sorted shopping list has changed while sorting", 2, shoppingListServer.getItems().size()); Item sortedItem1 = (Item) shoppingListServer.getItems().toArray()[0]; assertEquals("The name of the first item that is to be sorted has changed while sorting", "i1", sortedItem1.getName()); Item sortedItem2 = (Item) shoppingListServer.getItems().toArray()[1]; assertEquals("The name of the second item that is to be sorted has changed while sorting", "i2", sortedItem2.getName()); } private class DAODummyHelper implements DAOHelper { private final Supermarket defaultSupermarketOne; private final Supermarket defaultSupermarketTwo; private final Supermarket defaultSupermarketThree; private final Supermarket defaultSupermarketFour; private final SupermarketChain defaultSupermarketChainOne; private final Supermarket defaultSupermarketChainOneGlobalSupermarket; private final SupermarketChain defaultSupermarketChainTwo; private final Supermarket defaultSupermarketChainTwoGlobalSupermarket; private final HashMap<String, List<Edge>> edges; private final HashMap<String, Supermarket> supermarkets; private final BoughtItem startBoughtItem; private final BoughtItem endBoughtItem; private final HashMap<String, BoughtItem> boughtItems; public DAODummyHelper() { defaultSupermarketChainOne = new SupermarketChain(); defaultSupermarketChainOne.setId(1); defaultSupermarketChainOne.setName(CHAIN_ONE); defaultSupermarketChainOneGlobalSupermarket = new Supermarket(); defaultSupermarketChainOneGlobalSupermarket.setId(-1); defaultSupermarketChainOneGlobalSupermarket.setPlaceId(CHAIN_ONE); defaultSupermarketChainTwo = new SupermarketChain(); defaultSupermarketChainTwo.setId(2); defaultSupermarketChainTwo.setName(CHAIN_TWO); defaultSupermarketChainTwoGlobalSupermarket = new Supermarket(); defaultSupermarketChainTwoGlobalSupermarket.setId(-2); defaultSupermarketChainTwoGlobalSupermarket.setPlaceId(CHAIN_TWO); defaultSupermarketOne = new Supermarket(); defaultSupermarketOne.setId(1); defaultSupermarketOne.setPlaceId(ONE); defaultSupermarketOne.setSupermarketChain(defaultSupermarketChainOne); defaultSupermarketTwo = new Supermarket(); defaultSupermarketTwo.setId(2); defaultSupermarketTwo.setPlaceId(TWO); defaultSupermarketTwo.setSupermarketChain(defaultSupermarketChainTwo); defaultSupermarketThree = new Supermarket(); defaultSupermarketThree.setId(3); defaultSupermarketThree.setPlaceId(THREE); defaultSupermarketThree.setSupermarketChain(defaultSupermarketChainTwo); defaultSupermarketFour = new Supermarket(); defaultSupermarketFour.setId(4); defaultSupermarketFour.setPlaceId(FOUR); supermarkets = new HashMap<>(); supermarkets.put(ONE, defaultSupermarketOne); supermarkets.put(TWO, defaultSupermarketTwo); supermarkets.put(THREE, defaultSupermarketThree); supermarkets.put(FOUR, defaultSupermarketFour); edges = new HashMap<>(); startBoughtItem = new BoughtItem(START_ITEM); endBoughtItem = new BoughtItem(END_ITEM); boughtItems = new HashMap<>(); boughtItems.put(START_ITEM, startBoughtItem); boughtItems.put(END_ITEM, endBoughtItem); } @Override public Supermarket getSupermarketByPlaceID(String placeId) { return supermarkets.get(placeId); } @Override public List<SupermarketChain> getAllSupermarketChains() { List<SupermarketChain> supermarketChains = new ArrayList<>(2); supermarketChains.add(0, defaultSupermarketChainOne); supermarketChains.add(1, defaultSupermarketChainTwo); return supermarketChains; } @Override public void createSupermarket(Supermarket supermarket) { if (supermarkets.containsKey(supermarket.getPlaceId())) { throw new IllegalArgumentException("Supermarket already created"); } supermarkets.put(supermarket.getPlaceId(), supermarket); } @Override public List<Edge> getEdgesBySupermarket(Supermarket supermarket) { if(supermarket == null) { return new ArrayList<Edge>(); } List<Edge> edges = this.edges.get(supermarket.getPlaceId()); if (edges == null) { return new ArrayList<Edge>(); } return new ArrayList<>(edges); } @Override public Edge getEdgeByFromTo(BoughtItem from, BoughtItem to, Supermarket supermarket) { List<Edge> edges = getEdgesBySupermarket(supermarket); for (Edge edge : edges) { if (edge.getFrom().equals(from) && edge.getTo().equals(to)) { return edge; } } return null; } @Override public List<Edge> getEdgesByTo(BoughtItem boughtItem, Supermarket supermarket) { List<Edge> allEdges = getEdgesBySupermarket(supermarket); List<Edge> foundEdges = new ArrayList<>(); for (Edge edge : allEdges) { if (edge.getTo().equals(boughtItem)) { foundEdges.add(edge); } } return foundEdges; } @Override public Edge createEdge(Edge edge) { String supermarketPlaceId = edge.getSupermarket().getPlaceId(); List<Edge> edges = this.edges.get(supermarketPlaceId); if (edges == null) { // the specified supermarket doesn't have edges yet edges = new ArrayList<>(); this.edges.put(supermarketPlaceId, edges); } edges.add(edge); return edge; } @Override public void deleteEdge(Edge edge) { List<Edge> edges = this.edges.get(edge.getSupermarket().getPlaceId()); if (edges != null) { edges.remove(edge); } } @Override public BoughtItem getStartBoughtItem() { return startBoughtItem; } @Override public BoughtItem getEndBoughtItem() { return endBoughtItem; } @Override public BoughtItem getBoughtItemByName(String name) { return boughtItems.get(name); } @Override public void createBoughtItem(BoughtItem boughtItem) { if (!boughtItems.containsValue(boughtItem)) { boughtItems.put(boughtItem.getName(), boughtItem); } } @Override public Supermarket getGlobalSupermarketBySupermarketChain(SupermarketChain supermarketChain) { if (supermarketChain.getName().equals(CHAIN_ONE)) { return defaultSupermarketChainOneGlobalSupermarket; } else if (supermarketChain.getName().equals(CHAIN_TWO)) { return defaultSupermarketChainTwoGlobalSupermarket; } else { return null; } } @Override public Supermarket getGlobalSupermarket(SupermarketChain supermarketChain) { return getGlobalSupermarketBySupermarketChain(supermarketChain); } } }
server/src/test/java/de/fau/cs/mad/kwikshop/server/sorting/ItemGraphTest.java
package de.fau.cs.mad.kwikshop.server.sorting; import org.junit.*; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Random; import java.util.Set; import de.fau.cs.mad.kwikshop.common.Item; import de.fau.cs.mad.kwikshop.common.ShoppingListServer; import de.fau.cs.mad.kwikshop.common.sorting.BoughtItem; import de.fau.cs.mad.kwikshop.common.sorting.SortingRequest; import static org.junit.Assert.*; public class ItemGraphTest { private final String ONE = "ONE"; private final String TWO = "TWO"; private final String THREE = "THREE"; private final String FOUR = "FOUR"; private final String CHAIN_ONE = "CHAIN_ONE"; private final String CHAIN_TWO = "CHAIN_TWO"; private ItemGraph createNewItemGraph() { return new ItemGraph(new DAODummyHelper()); } private ItemGraph createNewItemGraphWithSupermarket(String supermarketPlaceId) { ItemGraph itemGraph = new ItemGraph(new DAODummyHelper()); itemGraph.setSupermarket(supermarketPlaceId, supermarketPlaceId); return itemGraph; } @Test public void newItemGraphShouldNotHaveAnyEdges() { ItemGraph itemGraph = createNewItemGraph(); Set<Edge> edges = itemGraph.getEdges(); assertNotNull("getEdges returned null instead of an empty set", edges); assertTrue("Newly created ItemGraph already had edges", edges.isEmpty()); } @Test public void newItemGraphShouldHaveNoVerticesOrStartAndEndVertices() { ItemGraph itemGraph = createNewItemGraph(); Set<BoughtItem> vertices = itemGraph.getVertices(); assertNotNull("getVertices returned null instead of an empty set or a set containing only start and end vertices", vertices); boolean isEmpty = vertices.isEmpty(); if (!isEmpty) { assertEquals("getVertices should return either an empty set or a set containing only start and end vertices. But it is not empty and not containing 2 elements.", 2, vertices.size()); assertTrue("\"getVertices should return either an empty set or a set containing only start and end vertices. But it is not containing the start vertex", vertices.contains(itemGraph.getDaoHelper().getStartBoughtItem())); assertTrue("\"getVertices should return either an empty set or a set containing only start and end vertices. But it is not containing the end vertex", vertices.contains(itemGraph.getDaoHelper().getEndBoughtItem())); } } @Test public void newItemGraphShouldNotHaveASupermarket() { ItemGraph itemGraph = createNewItemGraph(); assertNull("Newly created ItemGraph already has supermarket set", itemGraph.getSupermarket()); } @Test public void setAndGetSupermarketTest() { ItemGraph itemGraph = createNewItemGraph(); Supermarket supermarket = itemGraph.getDaoHelper().getSupermarketByPlaceID(ONE); itemGraph.setSupermarket(supermarket.getPlaceId(), supermarket.getPlaceId()); assertEquals("The returned supermarket by getSupermarket should be the same as the supermarket that was set", supermarket.getPlaceId(), itemGraph.getSupermarket().getPlaceId()); } @Test public void setSupermarketReturnsCorrectValue() { ItemGraph itemGraph = createNewItemGraph(); Supermarket supermarket = itemGraph.getDaoHelper().getSupermarketByPlaceID(ONE); assertFalse("setSupermarket returned true although it is not a new supermarket", itemGraph.setSupermarket(supermarket.getPlaceId(), supermarket.getPlaceId())); assertFalse("setSupermarket returned true although it is not a new supermarket", itemGraph.setSupermarket(supermarket.getPlaceId(), supermarket.getPlaceId())); assertTrue("setSupermarket returned false although it is a new supermarket", itemGraph.setSupermarket("blah", "blah")); assertFalse("setSupermarket returned true although it is not a new supermarket", itemGraph.setSupermarket("blah", "blah")); } @Test public void createOrUpdateEdgeForEmptyGraphShouldReturnAEdge() { ItemGraph itemGraph = createNewItemGraph(); BoughtItem i1 = new BoughtItem("i1", ONE, ONE); BoughtItem i2 = new BoughtItem("i2", ONE, ONE); Supermarket supermarket = itemGraph.getDaoHelper().getSupermarketByPlaceID(ONE); Edge edge = itemGraph.createOrUpdateEdge(i1, i2, supermarket); assertNotNull("createOrUpdateEdge returns null", edge); } @Test public void createdEdgeShouldBeContainedInResultOfGetEdges() { ItemGraph itemGraph = createNewItemGraph(); BoughtItem i1 = new BoughtItem("i1", ONE, ONE); BoughtItem i2 = new BoughtItem("i2", ONE, ONE); Supermarket supermarket = itemGraph.getDaoHelper().getSupermarketByPlaceID(ONE); Edge edge = itemGraph.createOrUpdateEdge(i1, i2, supermarket); itemGraph.setSupermarket(ONE, ONE); itemGraph.update(); Set<Edge> edges = itemGraph.getEdges(); assertNotNull("getEdges returns null although an edge was just added", edges); assertTrue("newly added edge is not contained in the item graph", edges.contains(edge)); assertEquals("getEdges returns more than just one edge, although no other edges were added", 1, edges.size()); } @Test public void getVerticesReturnsTheItemsThatWereAddedBeforeForOneItem() { getVerticesReturnsTheItemsThatWereAddedBeforeForNItems(1); } @Test public void getVerticesReturnsTheItemsThatWereAddedBeforeForTwoItems() { getVerticesReturnsTheItemsThatWereAddedBeforeForNItems(2); } @Test public void getVerticesReturnsTheItemsThatWereAddedBeforeForFiveItems() { getVerticesReturnsTheItemsThatWereAddedBeforeForNItems(5); } private void getVerticesReturnsTheItemsThatWereAddedBeforeForNItems(int n) { ItemGraph itemGraph = createNewItemGraph(); itemGraph.setSupermarket(ONE, ONE); List<BoughtItem> items = createBoughtItems(n, ONE); itemGraph.addBoughtItems(items); Set<BoughtItem> vertices = itemGraph.getVertices(); assertNotNull("getVertices returns null although items were added", vertices); assertEquals("getVertices does not have size " + n + 2 + "although " + n + "item(s) were added (+start/end)", n + 2, vertices.size()); for (int i = 0; i < n; i++) { assertTrue("The " + i + "th item is not contained in getVertices", vertices.contains(items.get(i))); } } private List<BoughtItem> createBoughtItems(int numberOfItemsToCreate, String supermarketPlaceId) { List<BoughtItem> items = new ArrayList<>(numberOfItemsToCreate); for (int i = 0; i < numberOfItemsToCreate; i++) { BoughtItem item = new BoughtItem("i" + i, supermarketPlaceId, supermarketPlaceId); item.setId(i); items.add(item); } return items; } @Test public void childIsSetCorrectlyForAListOfTwoItems() { List<BoughtItem> items = createBoughtItems(2, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); BoughtItem i0 = items.get(0); BoughtItem i1 = items.get(1); List<BoughtItem> i0sChildren = itemGraph.getChildren(i0); assertTrue("item i1 is not recognized as i0's child", i0sChildren.contains(i1)); List<BoughtItem> i1sChildren = itemGraph.getChildren(i1); assertFalse("item i0 is recognized as child of i1 incorrectly", i1sChildren.contains(i0)); } @Test public void parentIsSetCorrectlyForAListOfTwoItems() { List<BoughtItem> items = createBoughtItems(2, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); BoughtItem i0 = items.get(0); BoughtItem i1 = items.get(1); List<BoughtItem> i1sParents = itemGraph.getParents(i1); assertTrue("item i0 is not recognized as i1's parent", i1sParents.contains(i0)); List<BoughtItem> i0sParents = itemGraph.getParents(i0); assertFalse("item i1 is recognized as parent of i0 incorrectly", i0sParents.contains(i1)); } @Test public void parentIsSetCorrectlyForAListOfThreeItems() { List<BoughtItem> items = createBoughtItems(3, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); BoughtItem i0 = items.get(0); BoughtItem i1 = items.get(1); BoughtItem i2 = items.get(2); List<BoughtItem> i1sParents = itemGraph.getParents(i1); assertTrue("item i0 is not recognized as i1's parent", i1sParents.contains(i0)); assertFalse("item i2 is recognized as parent of i1 incorretclty", i1sParents.contains(i2)); List<BoughtItem> i0sParents = itemGraph.getParents(i0); assertFalse("item i1 is recognized as parent of i0 incorrectly", i0sParents.contains(i1)); assertFalse("item i2 is recognized as parent of i0 incorrectly", i0sParents.contains(i2)); List<BoughtItem> i2sParents = itemGraph.getParents(i2); assertTrue("item i1 is not recognized as i2's parent", i2sParents.contains(i1)); assertFalse("item i0 is recoginzed as parent of i2 incorrectly", i2sParents.contains(i0)); } @Test public void getSiblingsWorksForTwoSimpleLists() { List<BoughtItem> items = createBoughtItems(3, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); BoughtItem i0, i1, i2; i0 = items.get(0); i1 = items.get(1); i2 = items.get(2); List<BoughtItem> firstPurchase, secondPurchase; firstPurchase = new ArrayList<>(2); secondPurchase = new ArrayList<>(2); firstPurchase.add(i0); firstPurchase.add(i1); secondPurchase.add(i0); secondPurchase.add(i2); itemGraph.addBoughtItems(firstPurchase); itemGraph.addBoughtItems(secondPurchase); assertTrue("i2 is not recognized as sibling for i1", itemGraph.getSiblings(i1).contains(i2)); assertTrue("i1 is not recognized as sibling for i2", itemGraph.getSiblings(i2).contains(i1)); } @Test public void getSiblingsDoesntReturnFalseSiblings() { List<BoughtItem> items = createBoughtItems(2, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); BoughtItem i0, i1; i0 = items.get(0); i1 = items.get(1); List<BoughtItem> i0sSiblings, i1sSiblings; i0sSiblings = itemGraph.getSiblings(i0); i1sSiblings = itemGraph.getSiblings(i1); assertFalse("i0 is contained in i0's siblings incorrectly", i0sSiblings.contains(i0)); assertFalse("i0 is contained in i1's siblings incorrectly", i1sSiblings.contains(i0)); assertFalse("i1 is contained in i0's siblings incorrectly", i0sSiblings.contains(i1)); assertFalse("i1 is contained in i1's siblings incorrectly", i1sSiblings.contains(i1)); } @Test public void edgeFromToExistsDoesDetectEdges() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); List<BoughtItem> items = createBoughtItems(2, ONE); itemGraph.addBoughtItems(items); assertTrue("edge not detected", itemGraph.edgeFromToExists(items.get(0), items.get(1))); } @Test public void edgeFromToExistsDoesNotDetectNonExistingEdges() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); List<BoughtItem> items = createBoughtItems(2, ONE); itemGraph.addBoughtItems(items); assertFalse("non existing edge detected", itemGraph.edgeFromToExists(items.get(1), items.get(0))); } @Test public void executeAlgorithmDoesNotCrash() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); List<BoughtItem> items = createBoughtItems(2, ONE); itemGraph.executeAlgorithm(new IndirectEdgeInsertion(), items); } @Test public void twoItemsAreSortedIdenticallyASecondTime() { nItemsAreSortedIdenticallyASecondTime(2, false); } @Test public void threeItemsAreSortedIdenticallyASecondTime() { nItemsAreSortedIdenticallyASecondTime(3, false); } @Test public void fiveItemsAreSortedIdenticallyASecondTime() { nItemsAreSortedIdenticallyASecondTime(5, false); } @Test public void tenItemsAreSortedIdenticallyASecondTime() { nItemsAreSortedIdenticallyASecondTime(10, false); } @Test public void twoItemsAreSortedIdenticallyASecondTimeWhenTheOrderIsDifferent() { nItemsAreSortedIdenticallyASecondTime(2, true); } @Test public void threeItemsAreSortedIdenticallyASecondTimeWhenTheOrderIsDifferent() { nItemsAreSortedIdenticallyASecondTime(3, true); } @Test public void fiveItemsAreSortedIdenticallyASecondTimeWhenTheOrderIsDifferent() { nItemsAreSortedIdenticallyASecondTime(5, true); } @Test public void tenItemsAreSortedIdenticallyASecondTimeWhenTheOrderIsDifferent() { nItemsAreSortedIdenticallyASecondTime(10, true); } private void nItemsAreSortedIdenticallyASecondTime(int n, boolean mixItemsBeforeSorting) { List<BoughtItem> items = createBoughtItems(n, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); Algorithm magicSort = new MagicSort(); ShoppingListServer shoppingListServer; if (mixItemsBeforeSorting) { shoppingListServer = createShoppingListServerWithNItemsMixedUp(n); /*shoppingListServer now has items with exactly the same name as the items in itemGraph but in a different order*/ } else { shoppingListServer = createShoppingListServerWithNItems(n); /*shoppingListServer now has items with exactly the same name as the items in itemGraph and in the same order*/ } SortingRequest sortingRequest = new SortingRequest(ONE, ONE); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); List<Item> sortedItems = new ArrayList<>(); for(Item item: sortedList.getItems()) { sortedItems.add(item); } /* Sort according to the order of each Item */ Collections.sort(sortedItems); for (int i = 0; i < n; i++) { assertEquals("A identical list was sorted different as before, although no different data is available. The lists first differ at element " + i, items.get(i).getName(), sortedItems.get(i).getName()); } } private ShoppingListServer createShoppingListServerWithNItems(int n) { ArrayList<Item> items = new ArrayList<Item>(); for (int i = 0; i < n; i++) { Item item = new Item(); item.setName("i" + i); item.setID(i); item.setServerId(i); items.add(item); } ShoppingListServer shoppingListServer = new ShoppingListServer(0, items); return shoppingListServer; } private ShoppingListServer createShoppingListServerWithNItemsMixedUp(int n) { List<Item> orderedItems = new ArrayList<>(n); for (int i = 0; i < n; i++) { Item item = new Item(); item.setName("i" + i); item.setID(i); item.setServerId(i); orderedItems.add(item); } Random random = new Random(n*n); // random generator with some random seed ArrayList<Item> randomItems = new ArrayList<Item>(); while (!orderedItems.isEmpty()) { int index = random.nextInt(orderedItems.size()); Item item = orderedItems.remove(index); randomItems.add(item); } ShoppingListServer shoppingListServer = new ShoppingListServer(0, randomItems); return shoppingListServer; } @Test public void cycleOfThreeItemsShouldNotOccur() { BoughtItem i1, i2, i3; i1 = new BoughtItem("i1", ONE, ONE); i2 = new BoughtItem("i2", ONE, ONE); i3 = new BoughtItem("i3", ONE, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); addItemsToItemGraphThatWouldProduceACycleOfThree(itemGraph, i1, i2, i3); /*Now items were "bought in a cycle", but it is crucial that no cycles are contained in the item graph -> if there are edges i1->i2 and i2->i3, i3->i1 must not exist; only two of these three edges may exist at one time*/ boolean i1ToI2Exists, i2ToI3Exists, i3ToI1Exists; i1ToI2Exists = itemGraph.edgeFromToExists(i1, i2); i2ToI3Exists = itemGraph.edgeFromToExists(i2, i3); i3ToI1Exists = itemGraph.edgeFromToExists(i3, i1); if (i1ToI2Exists) { if (i2ToI3Exists) { assertFalse("Cycle in item graph detected", i3ToI1Exists); } else { assertTrue("Missing edge in item Graph", i3ToI1Exists); } } else { assertTrue("Missing edge in item Graph", i2ToI3Exists); assertTrue("Missing edge in item Graph", i3ToI1Exists); } } private void addItemsToItemGraphThatWouldProduceACycleOfThree(ItemGraph itemGraph, BoughtItem i1, BoughtItem i2, BoughtItem i3) { List<BoughtItem> first, second, third; first = new ArrayList<>(2); first.add(i1); first.add(i2); second = new ArrayList<>(2); second.add(i2); second.add(i3); third = new ArrayList<>(2); third.add(i3); third.add(i1); itemGraph.addBoughtItems(first); itemGraph.addBoughtItems(second); itemGraph.addBoughtItems(third); } @Test(timeout = 5000) public void sortWillReturnSomethingEvenIfTheDataIsInsufficient() { BoughtItem i1, i2, i0; i1 = new BoughtItem("i1", ONE, ONE); i2 = new BoughtItem("i2", ONE, ONE); i0 = new BoughtItem("i0", ONE, ONE); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); addItemsToItemGraphThatWouldProduceACycleOfThree(itemGraph, i1, i2, i0); ShoppingListServer shoppingList = createShoppingListServerWithNItems(2); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); Algorithm magicSort = new MagicSort(); // create a new shopping list, because it might be overwritten in sort() ShoppingListServer sortedList = itemGraph.sort(magicSort, new ShoppingListServer(42, shoppingList.getItems()), sortingRequest); for (Item item : shoppingList.getItems()) { assertTrue("Item that was to be sorted is not contained in the sorted list", sortedList.getItems().contains(item)); } } @Test public void shoppingListServerAddItemAndGetItemsWorkTogether() { Item item = new Item(); item.setName("i1"); item.setID(1); item.setServerId(1); ShoppingListServer shoppingListServer = new ShoppingListServer(0); shoppingListServer.addItem(item); assertTrue("An item added via addItem is not contained in the list of item obtained via getItems", shoppingListServer.getItems().contains(item)); } @Test public void simpleListIsSortedCorrectlyIfIsNoConflictingDataWasAdded__ItemsWereBoughtImmedeatelyOneAfterTheOtherBefore() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); Algorithm magicSort = new MagicSort(); Item item1 = new Item(); item1.setName("i1"); item1.setID(1); item1.setServerId(1); Item item3 = new Item(); item3.setName("i3"); item3.setID(3); item3.setServerId(3); List<Item> shoppingListItems = new ArrayList<>(2); shoppingListItems.add(item3); shoppingListItems.add(item1); ShoppingListServer shoppingListServer = new ShoppingListServer(0, shoppingListItems); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The sorted list has a different size than before", 2, sortedList.size()); List<Item> items = new ArrayList<Item>(sortedList.getItems()); Collections.sort(items); int iteration = 0; for (Item item : items) { if (iteration == 0) { assertEquals("Item was not sorted correctly", item1.getName(), item.getName()); } else { assertEquals("An extra item was added while sorting", 1, iteration); assertEquals("Item was not sorted correctly", item3.getName(), item.getName()); } iteration++; } } @Test public void simpleListIsSortedCorrectlyIfIsNoConflictingDataWasAdded__ItemsWereOnlyBoughtWithAnOtherItemInBetween() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); Algorithm magicSort = new MagicSort(); Item item4 = new Item(); item4.setName("i4"); item4.setID(4); item4.setServerId(4); Item item5 = new Item(); item5.setName("i5"); item5.setID(5); item5.setServerId(5); List<Item> shoppingListItems = new ArrayList<>(2); shoppingListItems.add(item5); shoppingListItems.add(item4); ShoppingListServer shoppingListServer = new ShoppingListServer(0, shoppingListItems); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The sorted list has a different size than before", 2, sortedList.size()); Collection<Item> items = sortedList.getItems(); int iteration = 0; for (Item item : items) { if (iteration == 0) { assertEquals("Item was not sorted correctly", item5.getName(), item.getName()); } else { assertEquals("An extra item was added while sorting", 1, iteration); assertEquals("Item was not sorted correctly", item4.getName(), item.getName()); } iteration++; } } private ItemGraph createCyclicFreeDataWithSixVertices() { BoughtItem i0, i1, i2, i3, i4, i5; i0 = new BoughtItem("i0", ONE, ONE); i1 = new BoughtItem("i1", ONE, ONE); i2 = new BoughtItem("i2", ONE, ONE); i3 = new BoughtItem("i3", ONE, ONE); i4 = new BoughtItem("i4", ONE, ONE); i5 = new BoughtItem("i5", ONE, ONE); List<BoughtItem> first, second, third, fourth, fifth, sixth; first = new ArrayList<>(2); first.add(i0); first.add(i2); second = new ArrayList<>(3); second.add(i0); second.add(i1); second.add(i3); third = new ArrayList<>(3); third.add(i0); third.add(i1); third.add(i2); fourth = new ArrayList<>(2); fourth.add(i3); fourth.add(i4); fifth = new ArrayList<>(2); fifth.add(i5); fifth.add(i3); sixth = new ArrayList<>(4); sixth.add(i1); sixth.add(i5); sixth.add(i3); sixth.add(i4); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(first); itemGraph.addBoughtItems(second); itemGraph.addBoughtItems(third); itemGraph.addBoughtItems(fourth); itemGraph.addBoughtItems(fifth); itemGraph.addBoughtItems(sixth); return itemGraph; } @Test public void edgeShouldFlipIfItemsAreAddedTheOtherWayRoundMoreOften() { List<BoughtItem> items = createBoughtItems(2, ONE); BoughtItem i0 = items.get(0); BoughtItem i1 = items.get(1); List<BoughtItem> itemsOrderedTheOtherWayRound = new ArrayList<>(2); itemsOrderedTheOtherWayRound.add(i1); itemsOrderedTheOtherWayRound.add(i0); ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); itemGraph.addBoughtItems(items); assertTrue("The edge was not added for the first two items", itemGraph.edgeFromToExists(i0, i1)); itemGraph.addBoughtItems(itemsOrderedTheOtherWayRound); itemGraph.addBoughtItems(itemsOrderedTheOtherWayRound); assertTrue("The inverted edge has not been added after the data changed", itemGraph.edgeFromToExists(i1, i0)); assertFalse("The edge, that was added for the first two items, didn't get removed after the data changed", itemGraph.edgeFromToExists(i0, i1)); } @Test public void ifNoDataIsAvailableTheOriginalListShouldNotBeAltered() { ShoppingListServer shoppingListServer = createShoppingListServerWithNItems(5); // copy the names before sorting, because the list might be altered String[] unSortedNames = new String[5]; int j = 0; for (Item item : shoppingListServer.getItems()) { unSortedNames[j++] = item.getName(); } ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); Algorithm magicSort = new MagicSort(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); String[] sortedNames = new String[5]; int i = 0; for (Item item : sortedList.getItems()) { sortedNames[i++] = item.getName(); } assertArrayEquals("The list has been re-ordered although no data was available", unSortedNames, sortedNames); } @Test public void ifInsufficientDataIsAvailableTheOriginalShoppingListShouldNotBeAltered() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); Algorithm magicSort = new MagicSort(); Item item2 = new Item(); item2.setName("i2"); item2.setID(2); item2.setServerId(2); Item item3 = new Item(); item3.setName("i3"); item3.setID(3); item3.setServerId(3); List<Item> shoppingListItems = new ArrayList<>(2); shoppingListItems.add(item2); shoppingListItems.add(item3); ShoppingListServer shoppingListServer = new ShoppingListServer(0, shoppingListItems); ShoppingListServer sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The sorted list has a different size than before", 2, sortedList.size()); Collection<Item> items = sortedList.getItems(); int iteration = 0; for (Item item : items) { if (iteration == 0) { assertEquals("Item was not sorted correctly", item2.getName(), item.getName()); } else { assertEquals("An extra item was added while sorting", 1, iteration); assertEquals("Item was not sorted correctly", item3.getName(), item.getName()); } iteration++; } /*And the same test the other way round*/ shoppingListItems = new ArrayList<>(2); shoppingListItems.add(item3); shoppingListItems.add(item2); shoppingListServer = new ShoppingListServer(0, shoppingListItems); sortedList = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The sorted list has a different size than before", 2, sortedList.size()); items = sortedList.getItems(); iteration = 0; for (Item item : items) { if (iteration == 0) { assertEquals("Item was not sorted correctly", item3.getName(), item.getName()); } else { assertEquals("An extra item was added while sorting", 1, iteration); assertEquals("Item was not sorted correctly", item2.getName(), item.getName()); } iteration++; } } @Test @Ignore public void sortingDoesNotAlterTheOriginalListButWorksOnACopy() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); int n = 6; ShoppingListServer shoppingList = createShoppingListServerWithNItems(n); ShoppingListServer sorted = itemGraph.sort(new MagicSort(), shoppingList, new SortingRequest(ONE, ONE)); assertNotSame(shoppingList, sorted); } @Test public void sortingDoesNotSortAnItemBeforeAnotherAlthoughItWasAlwaysBoughtTheOtherWayRound() { ItemGraph itemGraph = createCyclicFreeDataWithSixVertices(); int n = 6; ShoppingListServer shoppingList = createShoppingListServerWithNItems(n); ShoppingListServer sorted = itemGraph.sort(new MagicSort(), shoppingList, new SortingRequest(ONE, ONE)); List<Item> sortedList = new ArrayList<Item>(sorted.getItems()); Collections.sort(sortedList); ArrayList<String> orderedItemNames = new ArrayList<>(6); for (Item item : sortedList) { orderedItemNames.add(item.getName()); } /* * There are several possibilities how the items can be ordered * 0-1-2-5-3-4 OR * 0-1-5-2-3-4 OR * 0-1-5-3-2-4 OR * 0-1-5-3-4-2 */ assertEquals("i0 is not the first item, although it should be", "i0", orderedItemNames.get(0)); assertEquals("i1 is not the second item, although it should be", "i1", orderedItemNames.get(1)); if (orderedItemNames.get(2).equals("i2")) { assertEquals("i5 is not the fourth item, although it should be, as i2 was the third item", "i5", orderedItemNames.get(3)); assertEquals("i3 is not the fifth item, although it should be, as i2 was the third item", "i3", orderedItemNames.get(4)); assertEquals("i4 is not the sixth item, although it should be, as i2 was the third item", "i4", orderedItemNames.get(5)); } else { assertEquals("i5 is not the third item, although it should be, as i2 was not the third item", "i5", orderedItemNames.get(2)); if (orderedItemNames.get(3).equals("i2")) { assertEquals("i3 is not the fifth item, although it should be, as i2 was the fourth item", "i3", orderedItemNames.get(4)); assertEquals("i4 is not the sixth item, although it should be, as i2 was the fourth item", "i4", orderedItemNames.get(5)); } else { assertEquals("i3 is not the fourth item, although it should be, as i2 was not the third or fourth item", "i3", orderedItemNames.get(3)); if (orderedItemNames.get(4).equals("i2")) { assertEquals("i4 is not the sixth item, although it should be, as i2 was the fifth item", "i4", orderedItemNames.get(5)); } else { assertEquals("i4 is not the fifth item, although it should be, as i2 was not the third, fourth or fifth item", "i4", orderedItemNames.get(4)); assertEquals("i2 is not the sixth item, although it should be, as it was not the third, fourth or fifth item either", "i2", orderedItemNames.get(5)); } } } } @Test(timeout = 5000) public void addTheSameItemTwiceAndThenSortTwoItems() { ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); List<BoughtItem> items = createBoughtItems(3, ONE); BoughtItem i0 = items.get(0); items.add(i0); // i0 is now contained twice - this is something that can definitely happen itemGraph.addBoughtItems(items); Item i1 = new Item(); i1.setName("i1"); i1.setID(1); i1.setServerId(1); Item i2 = new Item(); i2.setName("i2"); i2.setID(2); i2.setServerId(2); List<Item> listToSort = new ArrayList<>(2); listToSort.add(i1); listToSort.add(i2); Algorithm magicSort = new MagicSort(); SortingRequest sortingRequest = new SortingRequest(ONE, ONE); ShoppingListServer shoppingListServer = new ShoppingListServer(0, listToSort); shoppingListServer = itemGraph.sort(magicSort, shoppingListServer, sortingRequest); assertEquals("The size of the sorted shopping list has changed while sorting", 2, shoppingListServer.getItems().size()); Item sortedItem1 = (Item) shoppingListServer.getItems().toArray()[0]; assertEquals("The name of the first item that is to be sorted has changed while sorting", "i1", sortedItem1.getName()); Item sortedItem2 = (Item) shoppingListServer.getItems().toArray()[1]; assertEquals("The name of the second item that is to be sorted has changed while sorting", "i2", sortedItem2.getName()); } private class DAODummyHelper implements DAOHelper { private final Supermarket defaultSupermarketOne; private final Supermarket defaultSupermarketTwo; private final Supermarket defaultSupermarketThree; private final Supermarket defaultSupermarketFour; private final SupermarketChain defaultSupermarketChainOne; private final Supermarket defaultSupermarketChainOneGlobalSupermarket; private final SupermarketChain defaultSupermarketChainTwo; private final Supermarket defaultSupermarketChainTwoGlobalSupermarket; private final HashMap<String, List<Edge>> edges; private final HashMap<String, Supermarket> supermarkets; private final BoughtItem startBoughtItem; private final BoughtItem endBoughtItem; private final HashMap<String, BoughtItem> boughtItems; public DAODummyHelper() { defaultSupermarketChainOne = new SupermarketChain(); defaultSupermarketChainOne.setId(1); defaultSupermarketChainOne.setName(CHAIN_ONE); defaultSupermarketChainOneGlobalSupermarket = new Supermarket(); defaultSupermarketChainOneGlobalSupermarket.setId(-1); defaultSupermarketChainOneGlobalSupermarket.setPlaceId(CHAIN_ONE); defaultSupermarketChainTwo = new SupermarketChain(); defaultSupermarketChainTwo.setId(2); defaultSupermarketChainTwo.setName(CHAIN_TWO); defaultSupermarketChainTwoGlobalSupermarket = new Supermarket(); defaultSupermarketChainTwoGlobalSupermarket.setId(-2); defaultSupermarketChainTwoGlobalSupermarket.setPlaceId(CHAIN_TWO); defaultSupermarketOne = new Supermarket(); defaultSupermarketOne.setId(1); defaultSupermarketOne.setPlaceId(ONE); defaultSupermarketOne.setSupermarketChain(defaultSupermarketChainOne); defaultSupermarketTwo = new Supermarket(); defaultSupermarketTwo.setId(2); defaultSupermarketTwo.setPlaceId(TWO); defaultSupermarketTwo.setSupermarketChain(defaultSupermarketChainTwo); defaultSupermarketThree = new Supermarket(); defaultSupermarketThree.setId(3); defaultSupermarketThree.setPlaceId(THREE); defaultSupermarketThree.setSupermarketChain(defaultSupermarketChainTwo); defaultSupermarketFour = new Supermarket(); defaultSupermarketFour.setId(4); defaultSupermarketFour.setPlaceId(FOUR); supermarkets = new HashMap<>(); supermarkets.put(ONE, defaultSupermarketOne); supermarkets.put(TWO, defaultSupermarketTwo); supermarkets.put(THREE, defaultSupermarketThree); supermarkets.put(FOUR, defaultSupermarketFour); edges = new HashMap<>(); startBoughtItem = new BoughtItem(START_ITEM); endBoughtItem = new BoughtItem(END_ITEM); boughtItems = new HashMap<>(); boughtItems.put(START_ITEM, startBoughtItem); boughtItems.put(END_ITEM, endBoughtItem); } @Override public Supermarket getSupermarketByPlaceID(String placeId) { return supermarkets.get(placeId); } @Override public List<SupermarketChain> getAllSupermarketChains() { List<SupermarketChain> supermarketChains = new ArrayList<>(2); supermarketChains.add(0, defaultSupermarketChainOne); supermarketChains.add(1, defaultSupermarketChainTwo); return supermarketChains; } @Override public void createSupermarket(Supermarket supermarket) { if (supermarkets.containsKey(supermarket.getPlaceId())) { throw new IllegalArgumentException("Supermarket already created"); } supermarkets.put(supermarket.getPlaceId(), supermarket); } @Override public List<Edge> getEdgesBySupermarket(Supermarket supermarket) { if(supermarket == null) { return new ArrayList<Edge>(); } List<Edge> edges = this.edges.get(supermarket.getPlaceId()); if (edges == null) { return new ArrayList<Edge>(); } return new ArrayList<>(edges); } @Override public Edge getEdgeByFromTo(BoughtItem from, BoughtItem to, Supermarket supermarket) { List<Edge> edges = getEdgesBySupermarket(supermarket); for (Edge edge : edges) { if (edge.getFrom().equals(from) && edge.getTo().equals(to)) { return edge; } } return null; } @Override public List<Edge> getEdgesByTo(BoughtItem boughtItem, Supermarket supermarket) { List<Edge> allEdges = getEdgesBySupermarket(supermarket); List<Edge> foundEdges = new ArrayList<>(); for (Edge edge : allEdges) { if (edge.getTo().equals(boughtItem)) { foundEdges.add(edge); } } return foundEdges; } @Override public Edge createEdge(Edge edge) { String supermarketPlaceId = edge.getSupermarket().getPlaceId(); List<Edge> edges = this.edges.get(supermarketPlaceId); if (edges == null) { // the specified supermarket doesn't have edges yet edges = new ArrayList<>(); this.edges.put(supermarketPlaceId, edges); } edges.add(edge); return edge; } @Override public void deleteEdge(Edge edge) { List<Edge> edges = this.edges.get(edge.getSupermarket().getPlaceId()); if (edges != null) { edges.remove(edge); } } @Override public BoughtItem getStartBoughtItem() { return startBoughtItem; } @Override public BoughtItem getEndBoughtItem() { return endBoughtItem; } @Override public BoughtItem getBoughtItemByName(String name) { return boughtItems.get(name); } @Override public void createBoughtItem(BoughtItem boughtItem) { if (!boughtItems.containsValue(boughtItem)) { boughtItems.put(boughtItem.getName(), boughtItem); } } @Override public Supermarket getGlobalSupermarketBySupermarketChain(SupermarketChain supermarketChain) { if (supermarketChain.getName().equals(CHAIN_ONE)) { return defaultSupermarketChainOneGlobalSupermarket; } else if (supermarketChain.getName().equals(CHAIN_TWO)) { return defaultSupermarketChainTwoGlobalSupermarket; } else { return null; } } @Override public Supermarket getGlobalSupermarket(SupermarketChain supermarketChain) { return getGlobalSupermarketBySupermarketChain(supermarketChain); } } }
split the creation of the data for a cycle-free item graph with six vertices and the creation of the item graph into two separate methods
server/src/test/java/de/fau/cs/mad/kwikshop/server/sorting/ItemGraphTest.java
split the creation of the data for a cycle-free item graph with six vertices and the creation of the item graph into two separate methods
<ide><path>erver/src/test/java/de/fau/cs/mad/kwikshop/server/sorting/ItemGraphTest.java <ide> } <ide> <ide> private ItemGraph createCyclicFreeDataWithSixVertices() { <add> ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); <add> addCycleFreeDataWithSixVerticesToItemGraph(itemGraph); <add> return itemGraph; <add> } <add> <add> private void addCycleFreeDataWithSixVerticesToItemGraph(ItemGraph itemGraph) { <ide> BoughtItem i0, i1, i2, i3, i4, i5; <ide> i0 = new BoughtItem("i0", ONE, ONE); <ide> i1 = new BoughtItem("i1", ONE, ONE); <ide> sixth.add(i3); <ide> sixth.add(i4); <ide> <del> ItemGraph itemGraph = createNewItemGraphWithSupermarket(ONE); <add> <ide> itemGraph.addBoughtItems(first); <ide> itemGraph.addBoughtItems(second); <ide> itemGraph.addBoughtItems(third); <ide> itemGraph.addBoughtItems(fifth); <ide> itemGraph.addBoughtItems(sixth); <ide> <del> return itemGraph; <ide> } <ide> <ide> @Test
Java
apache-2.0
959a885c1a78644b3cf887534482c974f54c48dc
0
genericsystem/genericsystem2015,genericsystem/genericsystem2015,genericsystem/genericsystem2015,genericsystem/genericsystem2015,genericsystem/genericsystem2015
package org.genericsystem.cv.application; import org.genericsystem.cv.AbstractApp; import org.genericsystem.cv.Img; import org.genericsystem.cv.utils.NativeLibraryLoader; import org.opencv.core.Core; import org.opencv.core.Core.MinMaxLocResult; import org.opencv.core.CvType; import org.opencv.core.Mat; import org.opencv.core.MatOfDouble; import org.opencv.core.MatOfInt; import org.opencv.core.MatOfPoint; import org.opencv.core.Point; import org.opencv.core.Rect; import org.opencv.core.Scalar; import org.opencv.core.Size; import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Moments; import org.opencv.utils.Converters; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import javafx.application.Platform; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.layout.GridPane; public class RobustTextDetectorDemo extends AbstractApp { public static void main(String[] args) { launch(args); } static { NativeLibraryLoader.load(); } private final GSCapture gsCapture = new GSVideoCapture(0, GSVideoCapture.HD, GSVideoCapture.VGA); private Img frame = gsCapture.read(); private ScheduledExecutorService timer = new BoundedScheduledThreadPoolExecutor(); private Config config = new Config(); private final ImageView[][] imageViews = new ImageView[][] { new ImageView[3], new ImageView[3], new ImageView[3] }; private void startTimer() { timer.scheduleAtFixedRate(() -> { try { Image[] images = doWork(); if (images != null) Platform.runLater(() -> { Iterator<Image> it = Arrays.asList(images).iterator(); for (int row = 0; row < imageViews.length; row++) for (int col = 0; col < imageViews[row].length; col++) if (it.hasNext()) imageViews[row][col].setImage(it.next()); }); } catch (Throwable e) { e.printStackTrace(); } }, 30, 30, TimeUnit.MILLISECONDS); } @Override protected void fillGrid(GridPane mainGrid) { double displaySizeReduction = 0.5; for (int col = 0; col < imageViews.length; col++) for (int row = 0; row < imageViews[col].length; row++) { ImageView imageView = new ImageView(); imageViews[col][row] = imageView; mainGrid.add(imageViews[col][row], col, row); imageView.setFitWidth(frame.width() / displaySizeReduction); imageView.setFitHeight(frame.height() / displaySizeReduction); } startTimer(); } Mat convertContourToMat(MatOfPoint contour) { Point[] pts = contour.toArray(); Mat result = new Mat(pts.length, 2, CvType.CV_64FC1); for (int i = 0; i < result.rows(); ++i) { result.put(i, 0, pts[i].x); result.put(i, 1, pts[i].y); } return result; } private Image[] doWork() { System.out.println("do work"); if (!config.stabilizedMode) frame = gsCapture.read(); Image[] images = new Image[12]; Img gray = frame.bgr2Gray(); RobustTextDetectorManager manager = new RobustTextDetectorManager(gray.getSrc()); Mat mserMask = manager.getMserMask(); images[0] = new Img(mserMask, false).toJfxImage(); Mat edges = new Mat(); Imgproc.Canny(gray.getSrc(), edges, 30, 110); Mat edge_mser_intersection = new Mat(); Core.bitwise_and(edges, mserMask, edge_mser_intersection); images[1] = new Img(edge_mser_intersection, false).toJfxImage(); Mat gradientGrown = growEdges(gray.getSrc(), edge_mser_intersection); images[2] = new Img(gradientGrown, false).toJfxImage(); Mat edgeEnhancedMser = new Mat(); Mat notGradientGrown = new Mat(); Core.bitwise_not(gradientGrown, notGradientGrown); Core.bitwise_and(notGradientGrown, mserMask, edgeEnhancedMser); images[3] = new Img(edgeEnhancedMser, false).toJfxImage(); Mat labels = new Mat(); Mat stats = new Mat(); Mat centroid = new Mat(); int labelsIds = Imgproc.connectedComponentsWithStats(edgeEnhancedMser, labels, stats, centroid, 4, CvType.CV_32S); Mat result2 = new Mat(labels.size(), CvType.CV_8UC1, new Scalar(0)); for (int labelId = 0; labelId < labelsIds; labelId++) { double area = stats.get(labelId, Imgproc.CC_STAT_AREA)[0]; if (area < 3 || area > 600) continue; Mat labelMask = new Mat(); Core.inRange(labels, new Scalar(labelId), new Scalar(labelId), labelMask); Moments moment = Imgproc.moments(labelMask); double left_comp = (moment.nu20 + moment.nu02) / 2.0; double right_comp = Math.sqrt((4 * moment.nu11 * moment.nu11) + (moment.nu20 - moment.nu02) * (moment.nu20 - moment.nu02)) / 2.0; double eig_val_1 = left_comp + right_comp; double eig_val_2 = left_comp - right_comp; double eccentricity = Math.sqrt(1.0 - (eig_val_2 / eig_val_1)); double minEccentricity = 0.1; double maxEccentricity = 0.995; if (eccentricity < minEccentricity || eccentricity > maxEccentricity) { continue; } List<MatOfPoint> contours = new ArrayList<>(); Imgproc.findContours(labelMask, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); MatOfInt hull = new MatOfInt(); Imgproc.convexHull(contours.get(0), hull); MatOfPoint mopHull = new MatOfPoint(); mopHull.create((int) hull.size().height, 1, CvType.CV_32SC2); for (int j = 0; j < hull.size().height; j++) { int index = (int) hull.get(j, 0)[0]; double[] point = new double[] { contours.get(0).get(index, 0)[0], contours.get(0).get(index, 0)[1] }; mopHull.put(j, 0, point); } double solidity = area / Imgproc.contourArea(mopHull); double minSolidity = 0.4;// 0.5 if (solidity < minSolidity) continue; Core.bitwise_or(result2, labelMask, result2); } images[4] = new Img(result2, false).toJfxImage(); Imgproc.distanceTransform(result2, result2, Imgproc.DIST_L2, 3); Mat tmp = new Mat(); Core.multiply(result2, new Scalar(200), tmp); images[5] = new Img(tmp, false).toJfxImage(); result2.convertTo(result2, CvType.CV_32SC1); Mat strokeWidth = computeStrokeWidth(result2); Mat filtered_stroke_width = new Mat(strokeWidth.size(), CvType.CV_8UC1, new Scalar(0)); Mat strokeWithCV8U = new Mat(); strokeWidth.convertTo(strokeWithCV8U, CvType.CV_8UC1); labelsIds = Imgproc.connectedComponentsWithStats(strokeWithCV8U, labels, stats, centroid, 4, CvType.CV_32S); for (int labelId = 0; labelId < labelsIds; labelId++) { Mat labelMask = new Mat(); Core.inRange(labels, new Scalar(labelId), new Scalar(labelId), labelMask); Mat temp = new Mat(strokeWithCV8U.size(), strokeWithCV8U.type(), new Scalar(0)); strokeWithCV8U.copyTo(temp, labelMask); int area = Core.countNonZero(temp); MatOfDouble meanD = new MatOfDouble(); MatOfDouble stdDev = new MatOfDouble(); Core.meanStdDev(strokeWithCV8U, meanD, stdDev, labelMask); if (area != 0) { /* Filter out those which are out of the prespecified ratio */ if ((stdDev.get(0, 0)[0] / meanD.get(0, 0)[0]) > 0.5) continue; /* Collect the filtered stroke width */ Core.bitwise_or(filtered_stroke_width, labelMask, filtered_stroke_width); } } images[6] = new Img(filtered_stroke_width, false).toJfxImage(); Mat bounding_region = new Mat(); Imgproc.morphologyEx(filtered_stroke_width, bounding_region, Imgproc.MORPH_CLOSE, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(25, 25))); // Imgproc.morphologyEx(bounding_region, bounding_region, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(25, 25))); // Mat result3 = new Mat(); // superFrame.getFrame().getSrc().copyTo(result3, filtered_stroke_width); images[7] = new Img(bounding_region, false).toJfxImage(); return images; } private static int booleansToInt(boolean[] arr) { int n = 0; for (boolean b : arr) n = (n << 1) | (b ? 1 : 0); return n; } private static int getNeighborsLessThan(Mat mat, int y, int x) { boolean[] neighbors = new boolean[8]; neighbors[0] = mat.get(y, x - 1)[0] == 0 ? false : mat.get(y, x - 1)[0] < mat.get(y, x)[0]; neighbors[1] = mat.get(y - 1, x - 1)[0] == 0 ? false : mat.get(y - 1, x - 1)[0] < mat.get(y, x)[0]; neighbors[2] = mat.get(y - 1, x)[0] == 0 ? false : mat.get(y - 1, x)[0] < mat.get(y, x)[0]; neighbors[3] = mat.get(y - 1, x + 1)[0] == 0 ? false : mat.get(y - 1, x + 1)[0] < mat.get(y, x)[0]; neighbors[4] = mat.get(y, x + 1)[0] == 0 ? false : mat.get(y, x + 1)[0] < mat.get(y, x)[0]; neighbors[5] = mat.get(y + 1, x + 1)[0] == 0 ? false : mat.get(y + 1, x + 1)[0] < mat.get(y, x)[0]; neighbors[6] = mat.get(y + 1, x)[0] == 0 ? false : mat.get(y + 1, x)[0] < mat.get(y, x)[0]; neighbors[7] = mat.get(y + 1, x - 1)[0] == 0 ? false : mat.get(y + 1, x - 1)[0] < mat.get(y, x)[0]; return booleansToInt(neighbors); } private static Mat computeStrokeWidth(Mat dist) { /* Pad the distance transformed matrix to avoid boundary checking */ Mat padded = new Mat(dist.rows() + 1, dist.cols() + 1, dist.type(), new Scalar(0)); dist.copyTo(new Mat(padded, new Rect(1, 1, dist.cols(), dist.rows()))); Mat lookup = new Mat(padded.size(), CvType.CV_8UC1, new Scalar(0)); for (int y = 1; y < padded.rows() - 1; y++) { for (int x = 1; x < padded.cols() - 1; x++) { /* Extract all the neighbors whose value < curr_ptr[x], encoded in 8-bit uchar */ if (padded.get(y, x)[0] != 0) lookup.put(y, x, (double) getNeighborsLessThan(padded, y, x)); } } /* Get max stroke from the distance transformed */ MinMaxLocResult minMaxLocResult = Core.minMaxLoc(padded); int maxStroke = (int) Math.round(minMaxLocResult.maxVal); for (double stroke = maxStroke; stroke > 0; stroke--) { Mat stroke_indices_mat = new Mat(); Mat mask = new Mat(); Core.inRange(padded, new Scalar(stroke - 0.1), new Scalar(stroke + 0.1), mask); Mat masked = new Mat(); padded.copyTo(masked, mask); masked.convertTo(masked, CvType.CV_8UC1); Core.findNonZero(masked, stroke_indices_mat); List<Point> stroke_indices = new ArrayList<>(); if (stroke_indices_mat.cols() > 0) Converters.Mat_to_vector_Point(stroke_indices_mat, stroke_indices); List<Point> neighbors = new ArrayList<>(); for (Point stroke_index : stroke_indices) { List<Point> temp = convertToCoords((int) stroke_index.x, (int) stroke_index.y, (int) lookup.get((int) stroke_index.y, (int) stroke_index.x)[0]); neighbors.addAll(temp); } while (!neighbors.isEmpty()) { for (Point neighbor : neighbors) padded.put((int) neighbor.y, (int) neighbor.x, stroke); neighbors.clear(); List<Point> temp = new ArrayList<>(neighbors); neighbors.clear(); /* Recursively gets neighbors of the current neighbors */ for (Point neighbor : temp) { List<Point> temp2 = convertToCoords((int) neighbor.x, (int) neighbor.y, (int) lookup.get((int) neighbor.y, (int) neighbor.x)[0]); neighbors.addAll(temp2); } } } return new Mat(padded, new Rect(1, 1, dist.cols(), dist.rows())); } private static List<Point> convertToCoords(int x, int y, int neighbors) { List<Point> coords = new ArrayList<>(); if (((neighbors & ((int) Math.pow(2, 7))) != 0)) coords.add(new Point(x - 1, y)); if (((neighbors & ((int) Math.pow(2, 6))) != 0)) coords.add(new Point(x - 1, y - 1)); if (((neighbors & ((int) Math.pow(2, 5))) != 0)) coords.add(new Point(x, y - 1)); if (((neighbors & ((int) Math.pow(2, 4))) != 0)) coords.add(new Point(x + 1, y - 1)); if (((neighbors & ((int) Math.pow(2, 3))) != 0)) coords.add(new Point(x + 1, y)); if (((neighbors & ((int) Math.pow(2, 2))) != 0)) coords.add(new Point(x + 1, y + 1)); if (((neighbors & ((int) Math.pow(2, 1))) != 0)) coords.add(new Point(x, y + 1)); if (((neighbors & ((int) Math.pow(2, 0))) != 0)) coords.add(new Point(x - 1, y + 1)); return coords; } public static int toBin(double angle, int neighbors) { float divisor = 180.0f / neighbors; return (int) ((((Math.floor(angle / divisor) - 1) / 2) + 1) % neighbors + 1); } public static Mat growEdges(Mat gray, Mat edges) { Mat grad_x = new Mat(), grad_y = new Mat(); Imgproc.Sobel(gray, grad_x, CvType.CV_64FC1, 1, 0, -1, 1, 0); Imgproc.Sobel(gray, grad_y, CvType.CV_64FC1, 0, 1, -1, 1, 0); Mat grad_mag = new Mat(), grad_dir = new Mat(); Core.cartToPolar(grad_x, grad_y, grad_mag, grad_dir, true); /* * Convert the angle into predefined 3x3 neighbor locations | 2 | 3 | 4 | | 1 | 0 | 5 | | 8 | 7 | 6 | */ for (int y = 0; y < grad_dir.rows(); y++) for (int x = 0; x < grad_dir.cols(); x++) grad_dir.put(y, x, toBin((grad_dir.get(y, x))[0], 8)); grad_dir.convertTo(grad_dir, CvType.CV_8UC1); /* Perform region growing based on the gradient direction */ Mat result = new Mat(); edges.copyTo(result); for (int y = 1; y < edges.rows() - 1; y++) { for (int x = 1; x < edges.cols() - 1; x++) { /* Only consider the contours */ if (edges.get(y, x)[0] != 0) { /* .. there should be a better way .... */ switch ((int) grad_dir.get(y, x)[0]) { case 1: result.put(y, x + 1, 255); break; case 2: result.put(y + 1, x + 1, 255); break; case 3: result.put(y + 1, x, 255); break; case 4: result.put(y + 1, x - 1, 255); break; case 5: result.put(y, x - 1, 255); break; case 6: result.put(y - 1, x - 1, 255); break; case 7: result.put(y - 1, x, 255); break; case 8: result.put(y - 1, x + 1, 255); break; default: System.out.println("Error : " + (int) grad_dir.get(y, x)[0]); break; } } } } return result; } @Override protected void onS() { config.stabilizedMode = !config.stabilizedMode; } @Override protected void onSpace() { if (config.isOn) timer.shutdown(); else { timer = new BoundedScheduledThreadPoolExecutor(); startTimer(); } config.isOn = !config.isOn; } @Override public void stop() throws Exception { super.stop(); timer.shutdown(); timer.awaitTermination(5000, TimeUnit.MILLISECONDS); gsCapture.release(); } }
gs-cv/src/main/java/org/genericsystem/cv/application/RobustTextDetectorDemo.java
package org.genericsystem.cv.application; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.genericsystem.cv.AbstractApp; import org.genericsystem.cv.Img; import org.genericsystem.cv.utils.NativeLibraryLoader; import org.opencv.core.Core; import org.opencv.core.Core.MinMaxLocResult; import org.opencv.core.CvType; import org.opencv.core.Mat; import org.opencv.core.MatOfDouble; import org.opencv.core.MatOfInt; import org.opencv.core.MatOfPoint; import org.opencv.core.Point; import org.opencv.core.Rect; import org.opencv.core.Scalar; import org.opencv.core.Size; import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Moments; import org.opencv.utils.Converters; import javafx.application.Platform; import javafx.scene.image.Image; import javafx.scene.image.ImageView; import javafx.scene.layout.GridPane; public class RobustTextDetectorDemo extends AbstractApp { public static void main(String[] args) { launch(args); } static { NativeLibraryLoader.load(); } private final GSCapture gsCapture = new GSVideoCapture(0, GSVideoCapture.HD, GSVideoCapture.VGA); private Img frame = gsCapture.read(); private ScheduledExecutorService timer = new BoundedScheduledThreadPoolExecutor(); private Config config = new Config(); private final ImageView[][] imageViews = new ImageView[][] { new ImageView[3], new ImageView[3], new ImageView[3] }; private void startTimer() { timer.scheduleAtFixedRate(() -> { try { Image[] images = doWork(); if (images != null) Platform.runLater(() -> { Iterator<Image> it = Arrays.asList(images).iterator(); for (int row = 0; row < imageViews.length; row++) for (int col = 0; col < imageViews[row].length; col++) if (it.hasNext()) imageViews[row][col].setImage(it.next()); }); } catch (Throwable e) { e.printStackTrace(); } }, 30, 30, TimeUnit.MILLISECONDS); } @Override protected void fillGrid(GridPane mainGrid) { double displaySizeReduction = 1; for (int col = 0; col < imageViews.length; col++) for (int row = 0; row < imageViews[col].length; row++) { ImageView imageView = new ImageView(); imageViews[col][row] = imageView; mainGrid.add(imageViews[col][row], col, row); imageView.setFitWidth(frame.width() / displaySizeReduction); imageView.setFitHeight(frame.height() / displaySizeReduction); } startTimer(); } Mat convertContourToMat(MatOfPoint contour) { Point[] pts = contour.toArray(); Mat result = new Mat(pts.length, 2, CvType.CV_64FC1); for (int i = 0; i < result.rows(); ++i) { result.put(i, 0, pts[i].x); result.put(i, 1, pts[i].y); } return result; } private Image[] doWork() { System.out.println("do work"); if (!config.stabilizedMode) frame = gsCapture.read(); Image[] images = new Image[12]; Img gray = frame.bgr2Gray(); RobustTextDetectorManager manager = new RobustTextDetectorManager(gray.getSrc()); Mat mserMask = manager.getMserMask(); images[0] = new Img(mserMask, false).toJfxImage(); Mat edges = new Mat(); Imgproc.Canny(gray.getSrc(), edges, 20, 60); Mat edge_mser_intersection = new Mat(); Core.bitwise_and(edges, mserMask, edge_mser_intersection); images[1] = new Img(edge_mser_intersection, false).toJfxImage(); Mat gradientGrown = growEdges(gray.getSrc(), edge_mser_intersection); images[2] = new Img(gradientGrown, false).toJfxImage(); Mat edgeEnhancedMser = new Mat(); Mat notGradientGrown = new Mat(); Core.bitwise_not(gradientGrown, notGradientGrown); Core.bitwise_and(notGradientGrown, mserMask, edgeEnhancedMser); images[3] = new Img(edgeEnhancedMser, false).toJfxImage(); Mat labels = new Mat(); Mat stats = new Mat(); Mat centroid = new Mat(); int labelsIds = Imgproc.connectedComponentsWithStats(edgeEnhancedMser, labels, stats, centroid, 4, CvType.CV_32S); Mat result2 = new Mat(labels.size(), CvType.CV_8UC1, new Scalar(0)); for (int labelId = 0; labelId < labelsIds; labelId++) { double area = stats.get(labelId, Imgproc.CC_STAT_AREA)[0]; if (area < 3 || area > 600) continue; Mat labelMask = new Mat(); Core.inRange(labels, new Scalar(labelId), new Scalar(labelId), labelMask); Moments moment = Imgproc.moments(labelMask); double left_comp = (moment.nu20 + moment.nu02) / 2.0; double right_comp = Math.sqrt((4 * moment.nu11 * moment.nu11) + (moment.nu20 - moment.nu02) * (moment.nu20 - moment.nu02)) / 2.0; double eig_val_1 = left_comp + right_comp; double eig_val_2 = left_comp - right_comp; double eccentricity = Math.sqrt(1.0 - (eig_val_2 / eig_val_1)); double minEccentricity = 0.1; double maxEccentricity = 0.995; if (eccentricity < minEccentricity || eccentricity > maxEccentricity) { continue; } List<MatOfPoint> contours = new ArrayList<>(); Imgproc.findContours(labelMask, contours, new Mat(), Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); MatOfInt hull = new MatOfInt(); Imgproc.convexHull(contours.get(0), hull); MatOfPoint mopHull = new MatOfPoint(); mopHull.create((int) hull.size().height, 1, CvType.CV_32SC2); for (int j = 0; j < hull.size().height; j++) { int index = (int) hull.get(j, 0)[0]; double[] point = new double[] { contours.get(0).get(index, 0)[0], contours.get(0).get(index, 0)[1] }; mopHull.put(j, 0, point); } double solidity = area / Imgproc.contourArea(mopHull); double minSolidity = 0.4;// 0.5 if (solidity < minSolidity) { continue; } Core.bitwise_or(result2, labelMask, result2); } images[4] = new Img(result2, false).toJfxImage(); Imgproc.distanceTransform(result2, result2, Imgproc.DIST_L2, 3); Mat tmp = new Mat(); Core.multiply(result2, new Scalar(200), tmp); images[5] = new Img(tmp, false).toJfxImage(); result2.convertTo(result2, CvType.CV_32SC1); Mat strokeWidth = computeStrokeWidth(result2); Mat filtered_stroke_width = new Mat(strokeWidth.size(), CvType.CV_8UC1, new Scalar(0)); Mat strokeWithCV8U = new Mat(); strokeWidth.convertTo(strokeWithCV8U, CvType.CV_8UC1); labelsIds = Imgproc.connectedComponentsWithStats(strokeWithCV8U, labels, stats, centroid, 4, CvType.CV_32S); for (int labelId = 0; labelId < labelsIds; labelId++) { Mat labelMask = new Mat(); Core.inRange(labels, new Scalar(labelId), new Scalar(labelId), labelMask); Mat temp = new Mat(strokeWithCV8U.size(), strokeWithCV8U.type(), new Scalar(0)); strokeWithCV8U.copyTo(temp, labelMask); int area = Core.countNonZero(temp); MatOfDouble meanD = new MatOfDouble(); MatOfDouble stdDev = new MatOfDouble(); Core.meanStdDev(strokeWithCV8U, meanD, stdDev, labelMask); if (area != 0) { /* Filter out those which are out of the prespecified ratio */ if ((stdDev.get(0, 0)[0] / meanD.get(0, 0)[0]) > 0.5) continue; /* Collect the filtered stroke width */ Core.bitwise_or(filtered_stroke_width, labelMask, filtered_stroke_width); } } images[6] = new Img(filtered_stroke_width, false).toJfxImage(); Mat bounding_region = new Mat(); Imgproc.morphologyEx(filtered_stroke_width, bounding_region, Imgproc.MORPH_CLOSE, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(25, 25))); // Imgproc.morphologyEx(bounding_region, bounding_region, Imgproc.MORPH_OPEN, Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(25, 25))); // Mat result3 = new Mat(); // superFrame.getFrame().getSrc().copyTo(result3, filtered_stroke_width); images[7] = new Img(bounding_region, false).toJfxImage(); return images; } private static int booleansToInt(boolean[] arr) { int n = 0; for (boolean b : arr) n = (n << 1) | (b ? 1 : 0); return n; } private static int getNeighborsLessThan(Mat mat, int y, int x) { boolean[] neighbors = new boolean[8]; neighbors[0] = mat.get(y, x - 1)[0] == 0 ? false : mat.get(y, x - 1)[0] < mat.get(y, x)[0]; neighbors[1] = mat.get(y - 1, x - 1)[0] == 0 ? false : mat.get(y - 1, x - 1)[0] < mat.get(y, x)[0]; neighbors[2] = mat.get(y - 1, x)[0] == 0 ? false : mat.get(y - 1, x)[0] < mat.get(y, x)[0]; neighbors[3] = mat.get(y - 1, x + 1)[0] == 0 ? false : mat.get(y - 1, x + 1)[0] < mat.get(y, x)[0]; neighbors[4] = mat.get(y, x + 1)[0] == 0 ? false : mat.get(y, x + 1)[0] < mat.get(y, x)[0]; neighbors[5] = mat.get(y + 1, x + 1)[0] == 0 ? false : mat.get(y + 1, x + 1)[0] < mat.get(y, x)[0]; neighbors[6] = mat.get(y + 1, x)[0] == 0 ? false : mat.get(y + 1, x)[0] < mat.get(y, x)[0]; neighbors[7] = mat.get(y + 1, x - 1)[0] == 0 ? false : mat.get(y + 1, x - 1)[0] < mat.get(y, x)[0]; return booleansToInt(neighbors); } private static Mat computeStrokeWidth(Mat dist) { /* Pad the distance transformed matrix to avoid boundary checking */ Mat padded = new Mat(dist.rows() + 1, dist.cols() + 1, dist.type(), new Scalar(0)); dist.copyTo(new Mat(padded, new Rect(1, 1, dist.cols(), dist.rows()))); Mat lookup = new Mat(padded.size(), CvType.CV_8UC1, new Scalar(0)); for (int y = 1; y < padded.rows() - 1; y++) { for (int x = 1; x < padded.cols() - 1; x++) { /* Extract all the neighbors whose value < curr_ptr[x], encoded in 8-bit uchar */ if (padded.get(y, x)[0] != 0) lookup.put(y, x, (double) getNeighborsLessThan(padded, y, x)); } } /* Get max stroke from the distance transformed */ MinMaxLocResult minMaxLocResult = Core.minMaxLoc(padded); int maxStroke = (int) Math.round(minMaxLocResult.maxVal); for (double stroke = maxStroke; stroke > 0; stroke--) { Mat stroke_indices_mat = new Mat(); Mat mask = new Mat(); Core.inRange(padded, new Scalar(stroke - 0.1), new Scalar(stroke + 0.1), mask); Mat masked = new Mat(); padded.copyTo(masked, mask); masked.convertTo(masked, CvType.CV_8UC1); Core.findNonZero(masked, stroke_indices_mat); List<Point> stroke_indices = new ArrayList<>(); if (stroke_indices_mat.cols() > 0) Converters.Mat_to_vector_Point(stroke_indices_mat, stroke_indices); List<Point> neighbors = new ArrayList<>(); for (Point stroke_index : stroke_indices) { List<Point> temp = convertToCoords((int) stroke_index.x, (int) stroke_index.y, (int) lookup.get((int) stroke_index.y, (int) stroke_index.x)[0]); neighbors.addAll(temp); } while (!neighbors.isEmpty()) { for (Point neighbor : neighbors) padded.put((int) neighbor.y, (int) neighbor.x, stroke); neighbors.clear(); List<Point> temp = new ArrayList<>(neighbors); neighbors.clear(); /* Recursively gets neighbors of the current neighbors */ for (Point neighbor : temp) { List<Point> temp2 = convertToCoords((int) neighbor.x, (int) neighbor.y, (int) lookup.get((int) neighbor.y, (int) neighbor.x)[0]); neighbors.addAll(temp2); } } } return new Mat(padded, new Rect(1, 1, dist.cols(), dist.rows())); } private static List<Point> convertToCoords(int x, int y, int neighbors) { List<Point> coords = new ArrayList<>(); if (((neighbors & ((int) Math.pow(2, 7))) != 0)) coords.add(new Point(x - 1, y)); if (((neighbors & ((int) Math.pow(2, 6))) != 0)) coords.add(new Point(x - 1, y - 1)); if (((neighbors & ((int) Math.pow(2, 5))) != 0)) coords.add(new Point(x, y - 1)); if (((neighbors & ((int) Math.pow(2, 4))) != 0)) coords.add(new Point(x + 1, y - 1)); if (((neighbors & ((int) Math.pow(2, 3))) != 0)) coords.add(new Point(x + 1, y)); if (((neighbors & ((int) Math.pow(2, 2))) != 0)) coords.add(new Point(x + 1, y + 1)); if (((neighbors & ((int) Math.pow(2, 1))) != 0)) coords.add(new Point(x, y + 1)); if (((neighbors & ((int) Math.pow(2, 0))) != 0)) coords.add(new Point(x - 1, y + 1)); return coords; } public static int toBin(double angle, int neighbors) { float divisor = 180.0f / neighbors; return (int) ((((Math.floor(angle / divisor) - 1) / 2) + 1) % neighbors + 1); } public static Mat growEdges(Mat image, Mat edges) { Mat grad_x = new Mat(), grad_y = new Mat(); Imgproc.Sobel(image, grad_x, CvType.CV_32FC1, 1, 0); Imgproc.Sobel(image, grad_y, CvType.CV_32FC1, 0, 1); Mat grad_mag = new Mat(), grad_dir = new Mat(); Core.cartToPolar(grad_x, grad_y, grad_mag, grad_dir, true); /* * Convert the angle into predefined 3x3 neighbor locations | 2 | 3 | 4 | | 1 | 0 | 5 | | 8 | 7 | 6 | */ for (int y = 0; y < grad_dir.rows(); y++) for (int x = 0; x < grad_dir.cols(); x++) grad_dir.put(y, x, toBin((grad_dir.get(y, x))[0], 8)); grad_dir.convertTo(grad_dir, CvType.CV_8UC1); /* Perform region growing based on the gradient direction */ Mat result = new Mat(); edges.copyTo(result); for (int y = 1; y < edges.rows() - 1; y++) { for (int x = 1; x < edges.cols() - 1; x++) { /* Only consider the contours */ if (edges.get(y, x)[0] != 0) { /* .. there should be a better way .... */ switch ((int) grad_dir.get(y, x)[0]) { case 1: result.put(y, x + 1, 255); break; case 2: result.put(y + 1, x + 1, 255); break; case 3: result.put(y + 1, x, 255); break; case 4: result.put(y + 1, x - 1, 255); break; case 5: result.put(y, x - 1, 255); break; case 6: result.put(y - 1, x - 1, 255); break; case 7: result.put(y - 1, x, 255); break; case 8: result.put(y - 1, x + 1, 255); break; // case 1: // result.put(y, x - 1, 255); // break; // case 2: // result.put(y - 1, x - 1, 255); // break; // case 3: // result.put(y - 1, x, 255); // break; // case 4: // result.put(y - 1, x + 1, 255); // break; // case 5: // result.put(y, x + 1, 255); // break; // case 6: // result.put(y + 1, x + 1, 255); // break; // case 7: // result.put(y + 1, x, 255); // break; // case 8: // result.put(y + 1, x - 1, 255); // break; default: System.out.println("Error : " + (int) grad_dir.get(y, x)[0]); break; } } } } return result; } @Override protected void onS() { config.stabilizedMode = !config.stabilizedMode; } @Override protected void onSpace() { if (config.isOn) timer.shutdown(); else { timer = new BoundedScheduledThreadPoolExecutor(); startTimer(); } config.isOn = !config.isOn; } @Override public void stop() throws Exception { super.stop(); timer.shutdown(); timer.awaitTermination(5000, TimeUnit.MILLISECONDS); gsCapture.release(); } }
Change meta parameters
gs-cv/src/main/java/org/genericsystem/cv/application/RobustTextDetectorDemo.java
Change meta parameters
<ide><path>s-cv/src/main/java/org/genericsystem/cv/application/RobustTextDetectorDemo.java <ide> package org.genericsystem.cv.application; <del> <del>import java.util.ArrayList; <del>import java.util.Arrays; <del>import java.util.Iterator; <del>import java.util.List; <del>import java.util.concurrent.ScheduledExecutorService; <del>import java.util.concurrent.TimeUnit; <ide> <ide> import org.genericsystem.cv.AbstractApp; <ide> import org.genericsystem.cv.Img; <ide> import org.opencv.imgproc.Moments; <ide> import org.opencv.utils.Converters; <ide> <add>import java.util.ArrayList; <add>import java.util.Arrays; <add>import java.util.Iterator; <add>import java.util.List; <add>import java.util.concurrent.ScheduledExecutorService; <add>import java.util.concurrent.TimeUnit; <add> <ide> import javafx.application.Platform; <ide> import javafx.scene.image.Image; <ide> import javafx.scene.image.ImageView; <ide> <ide> @Override <ide> protected void fillGrid(GridPane mainGrid) { <del> double displaySizeReduction = 1; <add> double displaySizeReduction = 0.5; <ide> for (int col = 0; col < imageViews.length; col++) <ide> for (int row = 0; row < imageViews[col].length; row++) { <ide> ImageView imageView = new ImageView(); <ide> images[0] = new Img(mserMask, false).toJfxImage(); <ide> <ide> Mat edges = new Mat(); <del> Imgproc.Canny(gray.getSrc(), edges, 20, 60); <add> Imgproc.Canny(gray.getSrc(), edges, 30, 110); <ide> Mat edge_mser_intersection = new Mat(); <ide> Core.bitwise_and(edges, mserMask, edge_mser_intersection); <ide> images[1] = new Img(edge_mser_intersection, false).toJfxImage(); <ide> } <ide> double solidity = area / Imgproc.contourArea(mopHull); <ide> double minSolidity = 0.4;// 0.5 <del> if (solidity < minSolidity) { <add> if (solidity < minSolidity) <ide> continue; <del> } <ide> Core.bitwise_or(result2, labelMask, result2); <ide> } <ide> images[4] = new Img(result2, false).toJfxImage(); <ide> return (int) ((((Math.floor(angle / divisor) - 1) / 2) + 1) % neighbors + 1); <ide> } <ide> <del> public static Mat growEdges(Mat image, Mat edges) { <del> <add> public static Mat growEdges(Mat gray, Mat edges) { <ide> Mat grad_x = new Mat(), grad_y = new Mat(); <del> Imgproc.Sobel(image, grad_x, CvType.CV_32FC1, 1, 0); <del> Imgproc.Sobel(image, grad_y, CvType.CV_32FC1, 0, 1); <add> <add> Imgproc.Sobel(gray, grad_x, CvType.CV_64FC1, 1, 0, -1, 1, 0); <add> Imgproc.Sobel(gray, grad_y, CvType.CV_64FC1, 0, 1, -1, 1, 0); <ide> Mat grad_mag = new Mat(), grad_dir = new Mat(); <add> <ide> Core.cartToPolar(grad_x, grad_y, grad_mag, grad_dir, true); <ide> <ide> /* <ide> case 8: <ide> result.put(y - 1, x + 1, 255); <ide> break; <del> // case 1: <del> // result.put(y, x - 1, 255); <del> // break; <del> // case 2: <del> // result.put(y - 1, x - 1, 255); <del> // break; <del> // case 3: <del> // result.put(y - 1, x, 255); <del> // break; <del> // case 4: <del> // result.put(y - 1, x + 1, 255); <del> // break; <del> // case 5: <del> // result.put(y, x + 1, 255); <del> // break; <del> // case 6: <del> // result.put(y + 1, x + 1, 255); <del> // break; <del> // case 7: <del> // result.put(y + 1, x, 255); <del> // break; <del> // case 8: <del> // result.put(y + 1, x - 1, 255); <del> // break; <add> <ide> default: <ide> System.out.println("Error : " + (int) grad_dir.get(y, x)[0]); <ide> break;
Java
apache-2.0
5c9d0a40a6aaff43a818c63baf677d751c64823b
0
togglz/togglz,togglz/togglz,togglz/togglz
package org.togglz.console.handlers.edit; import java.io.IOException; import java.util.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.togglz.console.RequestEvent; import org.togglz.console.RequestHandlerBase; import org.togglz.servlet.spi.CSRFToken; import org.togglz.servlet.spi.CSRFTokenProvider; import org.togglz.console.model.FeatureModel; import org.togglz.core.Feature; import org.togglz.core.manager.FeatureManager; import org.togglz.core.metadata.FeatureMetaData; import org.togglz.core.repository.FeatureState; import org.togglz.core.spi.ActivationStrategy; import com.floreysoft.jmte.Engine; import org.togglz.core.util.Services; public class EditPageHandler extends RequestHandlerBase { @Override public boolean handles(String path) { return path.equals("/edit"); } @Override public boolean adminOnly() { return true; } @Override public void process(RequestEvent event) throws IOException { FeatureManager featureManager = event.getFeatureManager(); HttpServletRequest request = event.getRequest(); HttpServletResponse response = event.getResponse(); // identify the feature Feature feature = null; String featureAsString = request.getParameter("f"); for (Feature f : featureManager.getFeatures()) { if (f.name().equals(featureAsString)) { feature = f; } } if (feature == null) { response.sendError(400); return; } FeatureMetaData metadata = featureManager.getMetaData(feature); List<ActivationStrategy> impls = featureManager.getActivationStrategies(); FeatureModel featureModel = new FeatureModel(feature, metadata, impls); // GET requests for this feature if ("GET".equals(request.getMethod())) { FeatureState state = featureManager.getFeatureState(feature); featureModel.populateFromFeatureState(state); renderEditPage(event, featureModel); } // POST requests for this feature if ("POST".equals(request.getMethod())) { featureModel.restoreFromRequest(request); // no validation errors if (featureModel.isValid()) { FeatureState state = featureModel.toFeatureState(); featureManager.setFeatureState(state); response.sendRedirect("index"); } // got validation errors else { renderEditPage(event, featureModel); } } } private void renderEditPage(RequestEvent event, FeatureModel featureModel) throws IOException { List<CSRFToken> tokens = new ArrayList<CSRFToken>(); for (CSRFTokenProvider provider : Services.get(CSRFTokenProvider.class)) { CSRFToken token = provider.getToken(event.getRequest()); if (token != null) { tokens.add(token); } } Map<String, Object> model = new HashMap<String, Object>(); model.put("model", featureModel); model.put("tokens", tokens); String template = getResourceAsString("edit.html"); String content = new Engine().transform(template, model); writeResponse(event, content); } }
console/src/main/java/org/togglz/console/handlers/edit/EditPageHandler.java
package org.togglz.console.handlers.edit; import java.io.IOException; import java.util.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.togglz.console.RequestEvent; import org.togglz.console.RequestHandlerBase; import org.togglz.servlet.spi.CSRFToken; import org.togglz.servlet.spi.CSRFTokenProvider; import org.togglz.console.model.FeatureModel; import org.togglz.core.Feature; import org.togglz.core.manager.FeatureManager; import org.togglz.core.metadata.FeatureMetaData; import org.togglz.core.repository.FeatureState; import org.togglz.core.spi.ActivationStrategy; import com.floreysoft.jmte.Engine; import org.togglz.core.util.Services; public class EditPageHandler extends RequestHandlerBase { @Override public boolean handles(String path) { return path.equals("/edit"); } @Override public boolean adminOnly() { return true; } @Override public void process(RequestEvent event) throws IOException { FeatureManager featureManager = event.getFeatureManager(); HttpServletRequest request = event.getRequest(); HttpServletResponse response = event.getResponse(); // identify the feature Feature feature = null; String featureAsString = request.getParameter("f"); for (Feature f : featureManager.getFeatures()) { if (f.name().equals(featureAsString)) { feature = f; } } if (feature == null) { response.sendError(403); return; } FeatureMetaData metadata = featureManager.getMetaData(feature); List<ActivationStrategy> impls = featureManager.getActivationStrategies(); FeatureModel featureModel = new FeatureModel(feature, metadata, impls); // GET requests for this feature if ("GET".equals(request.getMethod())) { FeatureState state = featureManager.getFeatureState(feature); featureModel.populateFromFeatureState(state); renderEditPage(event, featureModel); } // POST requests for this feature if ("POST".equals(request.getMethod())) { featureModel.restoreFromRequest(request); // no validation errors if (featureModel.isValid()) { FeatureState state = featureModel.toFeatureState(); featureManager.setFeatureState(state); response.sendRedirect("index"); } // got validation errors else { renderEditPage(event, featureModel); } } } private void renderEditPage(RequestEvent event, FeatureModel featureModel) throws IOException { List<CSRFToken> tokens = new ArrayList<CSRFToken>(); for (CSRFTokenProvider provider : Services.get(CSRFTokenProvider.class)) { CSRFToken token = provider.getToken(event.getRequest()); if (token != null) { tokens.add(token); } } Map<String, Object> model = new HashMap<String, Object>(); model.put("model", featureModel); model.put("tokens", tokens); String template = getResourceAsString("edit.html"); String content = new Engine().transform(template, model); writeResponse(event, content); } }
Exchanging Forbidden to Bad Request (#319) this makes it easier to distinguish errors caused by wrong request against authentification issues.
console/src/main/java/org/togglz/console/handlers/edit/EditPageHandler.java
Exchanging Forbidden to Bad Request (#319)
<ide><path>onsole/src/main/java/org/togglz/console/handlers/edit/EditPageHandler.java <ide> } <ide> } <ide> if (feature == null) { <del> response.sendError(403); <add> response.sendError(400); <ide> return; <ide> } <ide>
Java
agpl-3.0
076df97183eb4f3cbeb66d879997816aa5f13fd5
0
xJon/Jons-Useless-Mod
src/main/java/xjon/jum/client/render/item/ItemRendererUselessChest.java
package xjon.jum.client.render.item; import org.lwjgl.opengl.GL11; import xjon.jum.tileentity.TileEntityUselessChest; import net.minecraft.client.model.ModelChest; import net.minecraft.client.renderer.tileentity.TileEntityRendererDispatcher; import net.minecraft.item.ItemStack; import net.minecraft.util.ResourceLocation; import net.minecraftforge.client.IItemRenderer; import net.minecraftforge.fml.client.FMLClientHandler; public class ItemRendererUselessChest implements IItemRenderer { private ModelChest chestModel; private ResourceLocation location = new ResourceLocation("jum:textures/entity/chest/normal"); @Override public boolean handleRenderType(ItemStack item, ItemRenderType type) { return true; } @Override public boolean shouldUseRenderHelper(ItemRenderType type, ItemStack item, ItemRendererHelper helper) { return true; } @Override public void renderItem(ItemRenderType type, ItemStack item, Object... data) { switch (type) { case ENTITY: { renderStratosChest(0.5F, 0.5F, 0.5F); break; } case EQUIPPED: { renderStratosChest(1.0F, 1.0F, 1.0F); break; } case EQUIPPED_FIRST_PERSON: { renderStratosChest(1.0F, 1.0F, 1.0F); break; } case INVENTORY: { renderStratosChest(0.0F, 0.075F, 0.0F); break; } default: break; } } private void renderStratosChest(float x, float y, float z) { location = new ResourceLocation("jum:textures/entity/chest/normal"); FMLClientHandler.instance().getClient().renderEngine.bindTexture(location); GL11.glPushMatrix(); GL11.glTranslatef(x, y, z); GL11.glRotatef(180, 1, 0, 0); GL11.glRotatef(-90, 0, 1, 0); chestModel.renderAll(); GL11.glPopMatrix(); } }
More of the same
src/main/java/xjon/jum/client/render/item/ItemRendererUselessChest.java
More of the same
<ide><path>rc/main/java/xjon/jum/client/render/item/ItemRendererUselessChest.java <del>package xjon.jum.client.render.item; <del> <del>import org.lwjgl.opengl.GL11; <del> <del>import xjon.jum.tileentity.TileEntityUselessChest; <del>import net.minecraft.client.model.ModelChest; <del>import net.minecraft.client.renderer.tileentity.TileEntityRendererDispatcher; <del>import net.minecraft.item.ItemStack; <del>import net.minecraft.util.ResourceLocation; <del>import net.minecraftforge.client.IItemRenderer; <del>import net.minecraftforge.fml.client.FMLClientHandler; <del> <del>public class ItemRendererUselessChest implements IItemRenderer { <del> <del> private ModelChest chestModel; <del> private ResourceLocation location = new ResourceLocation("jum:textures/entity/chest/normal"); <del> <del> @Override <del> public boolean handleRenderType(ItemStack item, ItemRenderType type) { <del> return true; <del> } <del> <del> @Override <del> public boolean shouldUseRenderHelper(ItemRenderType type, ItemStack item, ItemRendererHelper helper) { <del> return true; <del> } <del> <del> @Override <del> public void renderItem(ItemRenderType type, ItemStack item, Object... data) { <del> switch (type) { <del> case ENTITY: { <del> renderStratosChest(0.5F, 0.5F, 0.5F); <del> break; <del> } <del> case EQUIPPED: { <del> renderStratosChest(1.0F, 1.0F, 1.0F); <del> break; <del> } <del> case EQUIPPED_FIRST_PERSON: { <del> renderStratosChest(1.0F, 1.0F, 1.0F); <del> break; <del> } <del> case INVENTORY: { <del> renderStratosChest(0.0F, 0.075F, 0.0F); <del> break; <del> } <del> default: <del> break; <del> } <del> } <del> private void renderStratosChest(float x, float y, float z) { <del> location = new ResourceLocation("jum:textures/entity/chest/normal"); <del> FMLClientHandler.instance().getClient().renderEngine.bindTexture(location); <del> GL11.glPushMatrix(); <del> GL11.glTranslatef(x, y, z); <del> GL11.glRotatef(180, 1, 0, 0); <del> GL11.glRotatef(-90, 0, 1, 0); <del> chestModel.renderAll(); <del> GL11.glPopMatrix(); <del> } <del> <del>}
JavaScript
mpl-2.0
58d55cbd57595e67e214fd20aad92dd78a3cc933
0
HatfieldConsultants/Hatfield.EnviroData.MVC,HatfieldConsultants/Hatfield.EnviroData.MVC,HatfieldConsultants/Hatfield.EnviroData.MVC,gvassas/Hatfield.EnviroData.MVC,gvassas/Hatfield.EnviroData.MVC
var LocalFileImportModel = function () { var self = this; self.ImportResults = ko.observableArray(); self.submitImportRequest = function () { //self.ImportResults(); var formData = new FormData(); // Main magic with files here var files = $('input[type=file]'); formData.append('headerFileInput', $('input[type=file]')[0].files[0]); formData.append('sampleFileInput', $('input[type=file]')[1].files[0]); formData.append('chemistryFileInput', $('input[type=file]')[2].files[0]); $.ajax({ url: '../api/ESDATImportAPI/ImportLocalFiles', type: 'POST', data: formData, mimeType: "multipart/form-data", contentType: false, cache: false, processData: false, success: function (data) { var parsedJson = JSON.parse(data);//parsed the string to json because the data type was set to form self.ImportResults.removeAll(); ko.utils.arrayPushAll(self.ImportResults, parsedJson); //push the json array to the station list }, error: function (data) { alert('Import local file system fail'); } });//end of ajax }; };
Source/Hatfield.EnviroData.MVC/Scripts/Knockout/LocalFileImport.js
var LocalFileImportModel = function () { var self = this; self.ImportResults = ko.observableArray(); self.submitImportRequest = function () { self.ImportResults(null); var formData = new FormData(); // Main magic with files here var files = $('input[type=file]'); formData.append('headerFileInput', $('input[type=file]')[0].files[0]); formData.append('sampleFileInput', $('input[type=file]')[1].files[0]); formData.append('chemistryFileInput', $('input[type=file]')[2].files[0]); $.ajax({ url: '../api/ESDATImportAPI/ImportLocalFiles', type: 'POST', data: formData, mimeType: "multipart/form-data", contentType: false, cache: false, processData: false, success: function (data) { self.ImportResults(data); }, error: function (data) { alert('Import local file system fail'); } });//end of ajax }; };
fix binding fail for the local file system js fix binding fail for the local file system js
Source/Hatfield.EnviroData.MVC/Scripts/Knockout/LocalFileImport.js
fix binding fail for the local file system js
<ide><path>ource/Hatfield.EnviroData.MVC/Scripts/Knockout/LocalFileImport.js <ide> self.ImportResults = ko.observableArray(); <ide> <ide> self.submitImportRequest = function () { <del> self.ImportResults(null); <add> //self.ImportResults(); <ide> <ide> var formData = new FormData(); <ide> // Main magic with files here <ide> contentType: false, <ide> cache: false, <ide> processData: false, <del> success: function (data) { <del> self.ImportResults(data); <add> success: function (data) { <add> var parsedJson = JSON.parse(data);//parsed the string to json because the data type was set to form <add> self.ImportResults.removeAll(); <add> ko.utils.arrayPushAll(self.ImportResults, parsedJson); //push the json array to the station list <ide> <ide> }, <ide> error: function (data) {
JavaScript
mit
b279f78e6c8a3fbd478414c8cda69b3bec726c5c
0
Xeio/tswcalc,Xeio/tswcalc,joakibj/tswcalc,joakibj/tswcalc
var tswcalc = tswcalc || {}; tswcalc.data = tswcalc.data || {}; tswcalc.data.custom_gear_data = { 'weapon': { '10.0': { weapon_power: 398 }, '10.1': { weapon_power: 411 }, '10.2': { weapon_power: 423 }, '10.3': { weapon_power: 434 }, '10.4': { weapon_power: 446 }, '10.5': { weapon_power: 457 }, '10.6': { weapon_power: 464 }, '10.7': { weapon_power: 475 } }, 'head': { heal_dps: { 'ql10.0': { rating: 559 }, 'ql10.1': { rating: 596 }, 'ql10.2': { rating: 636 }, 'ql10.3': { rating: 682 }, 'ql10.4': { rating: 735 }, 'ql10.5': { rating: 788 }, 'ql10.6': { rating: 846 }, 'ql10.7': { rating: 936 } }, tank: { 'ql10.0': { hitpoints: 2100 }, 'ql10.1': { hitpoints: 2194 }, 'ql10.2': { hitpoints: 2288 }, 'ql10.3': { hitpoints: 2382 }, 'ql10.4': { hitpoints: 2476 }, 'ql10.5': { hitpoints: 2570 }, 'ql10.6': { hitpoints: 2627 }, 'ql10.7': { hitpoints: 2714 } } }, 'major': { heal_dps: { 'ql10.0': { rating: 505 }, 'ql10.1': { rating: 538 }, 'ql10.2': { rating: 575 }, 'ql10.3': { rating: 616 }, 'ql10.4': { rating: 664 }, 'ql10.5': { rating: 712 }, 'ql10.6': { rating: 764 }, 'ql10.7': { rating: 845 } }, tank: { 'ql10.0': { hitpoints: 1897 }, 'ql10.1': { hitpoints: 1982 }, 'ql10.2': { hitpoints: 2067 }, 'ql10.3': { hitpoints: 2152 }, 'ql10.4': { hitpoints: 2237 }, 'ql10.5': { hitpoints: 2322 }, 'ql10.6': { hitpoints: 2373 }, 'ql10.7': { hitpoints: 2452 } } }, 'minor': { heal_dps: { 'ql10.0': { rating: 325 }, 'ql10.1': { rating: 346 }, 'ql10.2': { rating: 369 }, 'ql10.3': { rating: 396 }, 'ql10.4': { rating: 427 }, 'ql10.5': { rating: 458 }, 'ql10.6': { rating: 491 }, 'ql10.7': { rating: 543 } }, tank: { 'ql10.0': { hitpoints: 1220 }, 'ql10.1': { hitpoints: 1274 }, 'ql10.2': { hitpoints: 1329 }, 'ql10.3': { hitpoints: 1383 }, 'ql10.4': { hitpoints: 1438 }, 'ql10.5': { hitpoints: 1492 }, 'ql10.6': { hitpoints: 1526 }, 'ql10.7': { hitpoints: 1576 } } } };
src/javascript/data/tswcalc-data-gear.js
var tswcalc = tswcalc || {}; tswcalc.data = tswcalc.data || {}; tswcalc.data.custom_gear_data = { 'weapon': { '10.0': { weapon_power: 398 }, '10.1': { weapon_power: 411 }, '10.2': { weapon_power: 423 }, '10.3': { weapon_power: 434 }, '10.4': { weapon_power: 446 }, '10.5': { weapon_power: 457 }, '10.6': { weapon_power: 99999 }, '10.7': { weapon_power: 99999 } }, 'head': { heal_dps: { 'ql10.0': { rating: 559 }, 'ql10.1': { rating: 596 }, 'ql10.2': { rating: 636 }, 'ql10.3': { rating: 682 }, 'ql10.4': { rating: 735 }, 'ql10.5': { rating: 788 }, 'ql10.6': { rating: 99999 }, 'ql10.7': { rating: 99999 } }, tank: { 'ql10.0': { hitpoints: 2100 }, 'ql10.1': { hitpoints: 2194 }, 'ql10.2': { hitpoints: 2288 }, 'ql10.3': { hitpoints: 2382 }, 'ql10.4': { hitpoints: 2476 }, 'ql10.5': { hitpoints: 2570 }, 'ql10.6': { hitpoints: 99999 }, 'ql10.7': { hitpoints: 99999 } } }, 'major': { heal_dps: { 'ql10.0': { rating: 505 }, 'ql10.1': { rating: 538 }, 'ql10.2': { rating: 575 }, 'ql10.3': { rating: 616 }, 'ql10.4': { rating: 664 }, 'ql10.5': { rating: 712 }, 'ql10.6': { rating: 99999 }, 'ql10.7': { rating: 99999 } }, tank: { 'ql10.0': { hitpoints: 1897 }, 'ql10.1': { hitpoints: 1982 }, 'ql10.2': { hitpoints: 2067 }, 'ql10.3': { hitpoints: 2152 }, 'ql10.4': { hitpoints: 2237 }, 'ql10.5': { hitpoints: 2322 }, 'ql10.6': { hitpoints: 99999 }, 'ql10.7': { hitpoints: 99999 } } }, 'minor': { heal_dps: { 'ql10.0': { rating: 325 }, 'ql10.1': { rating: 346 }, 'ql10.2': { rating: 369 }, 'ql10.3': { rating: 396 }, 'ql10.4': { rating: 427 }, 'ql10.5': { rating: 458 }, 'ql10.6': { rating: 99999 }, 'ql10.7': { rating: 99999 } }, tank: { 'ql10.0': { hitpoints: 1220 }, 'ql10.1': { hitpoints: 1274 }, 'ql10.2': { hitpoints: 1329 }, 'ql10.3': { hitpoints: 1383 }, 'ql10.4': { hitpoints: 1438 }, 'ql10.5': { hitpoints: 1492 }, 'ql10.6': { hitpoints: 999999 }, 'ql10.7': { hitpoints: 999999 } } } };
10.6 and 10.7 gear values
src/javascript/data/tswcalc-data-gear.js
10.6 and 10.7 gear values
<ide><path>rc/javascript/data/tswcalc-data-gear.js <ide> weapon_power: 457 <ide> }, <ide> '10.6': { <del> weapon_power: 99999 <add> weapon_power: 464 <ide> }, <ide> '10.7': { <del> weapon_power: 99999 <add> weapon_power: 475 <ide> } <ide> }, <ide> 'head': { <ide> rating: 788 <ide> }, <ide> 'ql10.6': { <del> rating: 99999 <add> rating: 846 <ide> }, <ide> 'ql10.7': { <del> rating: 99999 <add> rating: 936 <ide> } <ide> }, <ide> tank: { <ide> hitpoints: 2570 <ide> }, <ide> 'ql10.6': { <del> hitpoints: 99999 <add> hitpoints: 2627 <ide> }, <ide> 'ql10.7': { <del> hitpoints: 99999 <add> hitpoints: 2714 <ide> } <ide> } <ide> }, <ide> rating: 712 <ide> }, <ide> 'ql10.6': { <del> rating: 99999 <add> rating: 764 <ide> }, <ide> 'ql10.7': { <del> rating: 99999 <add> rating: 845 <ide> } <ide> }, <ide> tank: { <ide> hitpoints: 2322 <ide> }, <ide> 'ql10.6': { <del> hitpoints: 99999 <add> hitpoints: 2373 <ide> }, <ide> 'ql10.7': { <del> hitpoints: 99999 <add> hitpoints: 2452 <ide> } <ide> } <ide> }, <ide> rating: 458 <ide> }, <ide> 'ql10.6': { <del> rating: 99999 <add> rating: 491 <ide> }, <ide> 'ql10.7': { <del> rating: 99999 <add> rating: 543 <ide> } <ide> }, <ide> tank: { <ide> hitpoints: 1492 <ide> }, <ide> 'ql10.6': { <del> hitpoints: 999999 <add> hitpoints: 1526 <ide> }, <ide> 'ql10.7': { <del> hitpoints: 999999 <add> hitpoints: 1576 <ide> } <ide> } <ide> }
Java
apache-2.0
a626640ecf75ea0dcb2558a7a4510089ab19f1ec
0
dschadow/Java-Web-Security,dschadow/Java-Web-Security
/* * Copyright (C) 2013 Dominik Schadow, [email protected] * * This file is part of Java-Web-Security . * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.dominikschadow.webappsecurity; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; /** * Simple CSP-Reporting servlet to receive and print out any JSON style CSP report with violations. * * @author Dominik Schadow */ @WebServlet(name = "CSPReporting", urlPatterns = {"/CSPReporting"}) public class CSPReporting extends HttpServlet { private static final long serialVersionUID = 1L; /** * @see javax.servlet.http.HttpServlet#HttpServlet() */ public CSPReporting() { super(); } /** * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException { System.out.println("CSP-Reporting-Servlet"); try { BufferedReader reader = new BufferedReader(new InputStreamReader(request.getInputStream())); StringBuilder responseBuilder = new StringBuilder(); String inputStr; while ((inputStr = reader.readLine()) != null) { responseBuilder.append(inputStr); } System.out.println("REPORT " + responseBuilder.toString()); JSONObject json = new JSONObject(responseBuilder.toString()); JSONObject cspReport = json.getJSONObject("csp-report"); System.out.println("document-uri: " + cspReport.getString("document-uri")); System.out.println("referrer: " + cspReport.getString("referrer")); System.out.println("blocked-uri: " + cspReport.getString("blocked-uri")); System.out.println("violated-directive: " + cspReport.getString("violated-directive")); System.out.println("source-file: " + cspReport.getString("source-file")); System.out.println("script-sample: " + cspReport.getString("script-sample")); System.out.println("line-number: " + cspReport.getString("line-number")); } catch (IOException | JSONException e) { e.printStackTrace(); } } }
Ch07_CSP/src/main/java/de/dominikschadow/webappsecurity/CSPReporting.java
/* * Copyright (C) 2013 Dominik Schadow, [email protected] * * This file is part of Java-Web-Security . * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package de.dominikschadow.webappsecurity; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; /** * Simple CSP-Reporting servlet to receive and print out any JSON style CSP report with violations. * * @author Dominik Schadow */ @WebServlet(name = "CSPReporting", urlPatterns = {"/CSPReporting"}) public class CSPReporting extends HttpServlet { private static final long serialVersionUID = 1L; /** * @see javax.servlet.http.HttpServlet#HttpServlet() */ public CSPReporting() { super(); } /** * @see javax.servlet.http.HttpServlet#doPost(javax.servlet.http.HttpServletRequest request, javax.servlet.http.HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException { System.out.println("CSP-Reporting-Servlet"); try { BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(request.getInputStream())); StringBuilder responseStrBuilder = new StringBuilder(); String inputStr; while ((inputStr = bufferedReader.readLine()) != null) { responseStrBuilder.append(inputStr); } JSONObject json = new JSONObject(responseStrBuilder.toString()); System.out.println("JSON " + json.toString()); JSONObject cspReport = json.getJSONObject("csp-report"); System.out.println("document-uri: " + cspReport.getString("document-uri")); System.out.println("referrer: " + cspReport.getString("referrer")); System.out.println("blocked-uri: " + cspReport.getString("blocked-uri")); System.out.println("violated-directive: " + cspReport.getString("violated-directive")); System.out.println("source-file: " + cspReport.getString("source-file")); System.out.println("script-sample: " + cspReport.getString("script-sample")); System.out.println("line-number: " + cspReport.getString("line-number")); } catch (IOException e) { e.printStackTrace(); } catch (JSONException e) { e.printStackTrace(); } } }
Refactoring
Ch07_CSP/src/main/java/de/dominikschadow/webappsecurity/CSPReporting.java
Refactoring
<ide><path>h07_CSP/src/main/java/de/dominikschadow/webappsecurity/CSPReporting.java <ide> System.out.println("CSP-Reporting-Servlet"); <ide> <ide> try { <del> BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(request.getInputStream())); <del> StringBuilder responseStrBuilder = new StringBuilder(); <add> BufferedReader reader = new BufferedReader(new InputStreamReader(request.getInputStream())); <add> StringBuilder responseBuilder = new StringBuilder(); <ide> <ide> String inputStr; <del> while ((inputStr = bufferedReader.readLine()) != null) { <del> responseStrBuilder.append(inputStr); <add> while ((inputStr = reader.readLine()) != null) { <add> responseBuilder.append(inputStr); <ide> } <ide> <del> JSONObject json = new JSONObject(responseStrBuilder.toString()); <del> System.out.println("JSON " + json.toString()); <add> System.out.println("REPORT " + responseBuilder.toString()); <ide> <add> JSONObject json = new JSONObject(responseBuilder.toString()); <ide> JSONObject cspReport = json.getJSONObject("csp-report"); <ide> System.out.println("document-uri: " + cspReport.getString("document-uri")); <ide> System.out.println("referrer: " + cspReport.getString("referrer")); <ide> System.out.println("source-file: " + cspReport.getString("source-file")); <ide> System.out.println("script-sample: " + cspReport.getString("script-sample")); <ide> System.out.println("line-number: " + cspReport.getString("line-number")); <del> } catch (IOException e) { <del> e.printStackTrace(); <del> } catch (JSONException e) { <add> } catch (IOException | JSONException e) { <ide> e.printStackTrace(); <ide> } <ide> }
Java
apache-2.0
86d08b2e2b7b9b48f57210f8528f4847634a8501
0
strapdata/elassandra,vroyer/elassandra,strapdata/elassandra,strapdata/elassandra,strapdata/elassandra,strapdata/elassandra,vroyer/elassandra,vroyer/elassandra
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.logging; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.util.concurrent.ThreadContext; import java.nio.charset.Charset; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.SignStyle; import java.util.BitSet; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import java.util.regex.Matcher; import java.util.regex.Pattern; import static java.time.temporal.ChronoField.DAY_OF_MONTH; import static java.time.temporal.ChronoField.DAY_OF_WEEK; import static java.time.temporal.ChronoField.HOUR_OF_DAY; import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; import static java.time.temporal.ChronoField.MONTH_OF_YEAR; import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; import static java.time.temporal.ChronoField.YEAR; /** * A logger that logs deprecation notices. */ public class DeprecationLogger { private final Logger logger; /** * This is set once by the {@code Node} constructor, but it uses {@link CopyOnWriteArraySet} to ensure that tests can run in parallel. * <p> * Integration tests will create separate nodes within the same classloader, thus leading to a shared, {@code static} state. * In order for all tests to appropriately be handled, this must be able to remember <em>all</em> {@link ThreadContext}s that it is * given in a thread safe manner. * <p> * For actual usage, multiple nodes do not share the same JVM and therefore this will only be set once in practice. */ private static final CopyOnWriteArraySet<ThreadContext> THREAD_CONTEXT = new CopyOnWriteArraySet<>(); /** * Set the {@link ThreadContext} used to add deprecation headers to network responses. * <p> * This is expected to <em>only</em> be invoked by the {@code Node}'s constructor (therefore once outside of tests). * * @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node}) * @throws IllegalStateException if this {@code threadContext} has already been set */ public static void setThreadContext(ThreadContext threadContext) { Objects.requireNonNull(threadContext, "Cannot register a null ThreadContext"); // add returning false means it _did_ have it already if (THREAD_CONTEXT.add(threadContext) == false) { throw new IllegalStateException("Double-setting ThreadContext not allowed!"); } } /** * Remove the {@link ThreadContext} used to add deprecation headers to network responses. * <p> * This is expected to <em>only</em> be invoked by the {@code Node}'s {@code close} method (therefore once outside of tests). * * @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node}) * @throws IllegalStateException if this {@code threadContext} is unknown (and presumably already unset before) */ public static void removeThreadContext(ThreadContext threadContext) { assert threadContext != null; // remove returning false means it did not have it already if (THREAD_CONTEXT.remove(threadContext) == false) { throw new IllegalStateException("Removing unknown ThreadContext not allowed!"); } } /** * Creates a new deprecation logger based on the parent logger. Automatically * prefixes the logger name with "deprecation", if it starts with "org.elasticsearch.", * it replaces "org.elasticsearch" with "org.elasticsearch.deprecation" to maintain * the "org.elasticsearch" namespace. */ public DeprecationLogger(Logger parentLogger) { String name = parentLogger.getName(); if (name.startsWith("org.elasticsearch")) { name = name.replace("org.elasticsearch.", "org.elasticsearch.deprecation."); } else { name = "deprecation." + name; } this.logger = LogManager.getLogger(name); } /** * Logs a deprecation message, adding a formatted warning message as a response header on the thread context. */ public void deprecated(String msg, Object... params) { deprecated(THREAD_CONTEXT, msg, params); } // LRU set of keys used to determine if a deprecation message should be emitted to the deprecation logs private Set<String> keys = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<String, Boolean>() { @Override protected boolean removeEldestEntry(final Map.Entry eldest) { return size() > 128; } })); /** * Adds a formatted warning message as a response header on the thread context, and logs a deprecation message if the associated key has * not recently been seen. * * @param key the key used to determine if this deprecation should be logged * @param msg the message to log * @param params parameters to the message */ public void deprecatedAndMaybeLog(final String key, final String msg, final Object... params) { deprecated(THREAD_CONTEXT, msg, keys.add(key), params); } /* * RFC7234 specifies the warning format as warn-code <space> warn-agent <space> "warn-text" [<space> "warn-date"]. Here, warn-code is a * three-digit number with various standard warn codes specified. The warn code 299 is apt for our purposes as it represents a * miscellaneous persistent warning (can be presented to a human, or logged, and must not be removed by a cache). The warn-agent is an * arbitrary token; here we use the Elasticsearch version and build hash. The warn text must be quoted. The warn-date is an optional * quoted field that can be in a variety of specified date formats; here we use RFC 1123 format. */ private static final String WARNING_PREFIX = String.format( Locale.ROOT, "299 Elasticsearch-%s%s-%s", Version.CURRENT.toString(), Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "", Build.CURRENT.shortHash()); /* * RFC 7234 section 5.5 specifies that the warn-date is a quoted HTTP-date. HTTP-date is defined in RFC 7234 Appendix B as being from * RFC 7231 section 7.1.1.1. RFC 7231 specifies an HTTP-date as an IMF-fixdate (or an obs-date referring to obsolete formats). The * grammar for IMF-fixdate is specified as 'day-name "," SP date1 SP time-of-day SP GMT'. Here, day-name is * (Mon|Tue|Wed|Thu|Fri|Sat|Sun). Then, date1 is 'day SP month SP year' where day is 2DIGIT, month is * (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec), and year is 4DIGIT. Lastly, time-of-day is 'hour ":" minute ":" second' where * hour is 2DIGIT, minute is 2DIGIT, and second is 2DIGIT. Finally, 2DIGIT and 4DIGIT have the obvious definitions. */ private static final DateTimeFormatter RFC_7231_DATE_TIME; static { final Map<Long, String> dow = new HashMap<>(); dow.put(1L, "Mon"); dow.put(2L, "Tue"); dow.put(3L, "Wed"); dow.put(4L, "Thu"); dow.put(5L, "Fri"); dow.put(6L, "Sat"); dow.put(7L, "Sun"); final Map<Long, String> moy = new HashMap<>(); moy.put(1L, "Jan"); moy.put(2L, "Feb"); moy.put(3L, "Mar"); moy.put(4L, "Apr"); moy.put(5L, "May"); moy.put(6L, "Jun"); moy.put(7L, "Jul"); moy.put(8L, "Aug"); moy.put(9L, "Sep"); moy.put(10L, "Oct"); moy.put(11L, "Nov"); moy.put(12L, "Dec"); RFC_7231_DATE_TIME = new DateTimeFormatterBuilder() .parseCaseInsensitive() .parseLenient() .optionalStart() .appendText(DAY_OF_WEEK, dow) .appendLiteral(", ") .optionalEnd() .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) .appendLiteral(' ') .appendText(MONTH_OF_YEAR, moy) .appendLiteral(' ') .appendValue(YEAR, 4) .appendLiteral(' ') .appendValue(HOUR_OF_DAY, 2) .appendLiteral(':') .appendValue(MINUTE_OF_HOUR, 2) .optionalStart() .appendLiteral(':') .appendValue(SECOND_OF_MINUTE, 2) .optionalEnd() .appendLiteral(' ') .appendOffset("+HHMM", "GMT") .toFormatter(Locale.getDefault(Locale.Category.FORMAT)); } private static final String STARTUP_TIME = RFC_7231_DATE_TIME.format(ZonedDateTime.now(ZoneId.of("GMT"))); /** * Regular expression to test if a string matches the RFC7234 specification for warning headers. This pattern assumes that the warn code * is always 299. Further, this pattern assumes that the warn agent represents a version of Elasticsearch including the build hash. */ public static Pattern WARNING_HEADER_PATTERN = Pattern.compile( "299 " + // warn code "Elasticsearch-\\d+\\.\\d+\\.\\d+(?:-(?:alpha|beta|rc)\\d+)?(?:-SNAPSHOT)?-(?:[a-f0-9]{7}|Unknown) " + // warn agent "\"((?:\t| |!|[\\x23-\\x5B]|[\\x5D-\\x7E]|[\\x80-\\xFF]|\\\\|\\\\\")*)\" " + // quoted warning value, captured // quoted RFC 1123 date format "\"" + // opening quote "(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // weekday "\\d{2} " + // 2-digit day "(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month "\\d{4} " + // 4-digit year "\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second) "GMT" + // GMT "\""); // closing quote /** * Extracts the warning value from the value of a warning header that is formatted according to RFC 7234. That is, given a string * {@code 299 Elasticsearch-6.0.0 "warning value" "Sat, 25 Feb 2017 10:27:43 GMT"}, the return value of this method would be {@code * warning value}. * * @param s the value of a warning header formatted according to RFC 7234. * @return the extracted warning value */ public static String extractWarningValueFromWarningHeader(final String s) { /* * We know the exact format of the warning header, so to extract the warning value we can skip forward from the front to the first * quote, and skip backwards from the end to the penultimate quote: * * 299 Elasticsearch-6.0.0 "warning value" "Sat, 25, Feb 2017 10:27:43 GMT" * ^ ^ ^ * firstQuote penultimateQuote lastQuote * * We do it this way rather than seeking forward after the first quote because there could be escaped quotes in the warning value * but since there are none in the warning date, we can skip backwards to find the quote that closes the quoted warning value. * * We parse this manually rather than using the capturing regular expression because the regular expression involves a lot of * backtracking and carries a performance penalty. However, when assertions are enabled, we still use the regular expression to * verify that we are maintaining the warning header format. */ final int firstQuote = s.indexOf('\"'); final int lastQuote = s.lastIndexOf('\"'); final int penultimateQuote = s.lastIndexOf('\"', lastQuote - 1); final String warningValue = s.substring(firstQuote + 1, penultimateQuote - 2); assert assertWarningValue(s, warningValue); return warningValue; } /** * Assert that the specified string has the warning value equal to the provided warning value. * * @param s the string representing a full warning header * @param warningValue the expected warning header * @return {@code true} if the specified string has the expected warning value */ private static boolean assertWarningValue(final String s, final String warningValue) { final Matcher matcher = WARNING_HEADER_PATTERN.matcher(s); final boolean matches = matcher.matches(); assert matches; return matcher.group(1).equals(warningValue); } /** * Logs a deprecated message to the deprecation log, as well as to the local {@link ThreadContext}. * * @param threadContexts The node's {@link ThreadContext} (outside of concurrent tests, this should only ever have one context). * @param message The deprecation message. * @param params The parameters used to fill in the message, if any exist. */ void deprecated(final Set<ThreadContext> threadContexts, final String message, final Object... params) { deprecated(threadContexts, message, true, params); } @SuppressLoggerChecks(reason = "safely delegates to logger") void deprecated(final Set<ThreadContext> threadContexts, final String message, final boolean log, final Object... params) { final Iterator<ThreadContext> iterator = threadContexts.iterator(); if (iterator.hasNext()) { final String formattedMessage = LoggerMessageFormat.format(message, params); final String warningHeaderValue = formatWarning(formattedMessage); assert WARNING_HEADER_PATTERN.matcher(warningHeaderValue).matches(); assert extractWarningValueFromWarningHeader(warningHeaderValue).equals(escapeAndEncode(formattedMessage)); while (iterator.hasNext()) { try { final ThreadContext next = iterator.next(); next.addResponseHeader("Warning", warningHeaderValue, DeprecationLogger::extractWarningValueFromWarningHeader); } catch (final IllegalStateException e) { // ignored; it should be removed shortly } } } if (log) { logger.warn(message, params); } } /** * Format a warning string in the proper warning format by prepending a warn code, warn agent, wrapping the warning string in quotes, * and appending the RFC 7231 date. * * @param s the warning string to format * @return a warning value formatted according to RFC 7234 */ public static String formatWarning(final String s) { return WARNING_PREFIX + " " + "\"" + escapeAndEncode(s) + "\"" + " " + "\"" + STARTUP_TIME + "\""; } /** * Escape and encode a string as a valid RFC 7230 quoted-string. * * @param s the string to escape and encode * @return the escaped and encoded string */ public static String escapeAndEncode(final String s) { return encode(escapeBackslashesAndQuotes(s)); } /** * Escape backslashes and quotes in the specified string. * * @param s the string to escape * @return the escaped string */ static String escapeBackslashesAndQuotes(final String s) { /* * We want a fast path check to avoid creating the string builder and copying characters if needed. So we walk the string looking * for either of the characters that we need to escape. If we find a character that needs escaping, we start over and */ boolean escapingNeeded = false; for (int i = 0; i < s.length(); i++) { final char c = s.charAt(i); if (c == '\\' || c == '"') { escapingNeeded = true; break; } } if (escapingNeeded) { final StringBuilder sb = new StringBuilder(); for (final char c : s.toCharArray()) { if (c == '\\' || c == '"') { sb.append("\\"); } sb.append(c); } return sb.toString(); } else { return s; } } private static BitSet doesNotNeedEncoding; static { doesNotNeedEncoding = new BitSet(1 + 0xFF); doesNotNeedEncoding.set('\t'); doesNotNeedEncoding.set(' '); doesNotNeedEncoding.set('!'); doesNotNeedEncoding.set('\\'); doesNotNeedEncoding.set('"'); // we have to skip '%' which is 0x25 so that it is percent-encoded too for (int i = 0x23; i <= 0x24; i++) { doesNotNeedEncoding.set(i); } for (int i = 0x26; i <= 0x5B; i++) { doesNotNeedEncoding.set(i); } for (int i = 0x5D; i <= 0x7E; i++) { doesNotNeedEncoding.set(i); } for (int i = 0x80; i <= 0xFF; i++) { doesNotNeedEncoding.set(i); } assert doesNotNeedEncoding.get('%') == false : doesNotNeedEncoding; } private static final Charset UTF_8 = Charset.forName("UTF-8"); /** * Encode a string containing characters outside of the legal characters for an RFC 7230 quoted-string. * * @param s the string to encode * @return the encoded string */ static String encode(final String s) { // first check if the string needs any encoding; this is the fast path and we want to avoid creating a string builder and copying boolean encodingNeeded = false; for (int i = 0; i < s.length(); i++) { int current = s.charAt(i); if (doesNotNeedEncoding.get(current) == false) { encodingNeeded = true; break; } } if (encodingNeeded == false) { return s; } final StringBuilder sb = new StringBuilder(s.length()); for (int i = 0; i < s.length();) { int current = (int) s.charAt(i); /* * Either the character does not need encoding or it does; when the character does not need encoding we append the character to * a buffer and move to the next character and when the character does need encoding, we peel off as many characters as possible * which we encode using UTF-8 until we encounter another character that does not need encoding. */ if (doesNotNeedEncoding.get(current)) { // append directly and move to the next character sb.append((char) current); i++; } else { int startIndex = i; do { i++; } while (i < s.length() && !doesNotNeedEncoding.get(s.charAt(i))); final byte[] bytes = s.substring(startIndex, i).getBytes(UTF_8); // noinspection ForLoopReplaceableByForEach for (int j = 0; j < bytes.length; j++) { sb.append('%').append(hex(bytes[j] >> 4)).append(hex(bytes[j])); } } } return sb.toString(); } private static char hex(int b) { final char ch = Character.forDigit(b & 0xF, 16); if (Character.isLetter(ch)) { return Character.toUpperCase(ch); } else { return ch; } } }
server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.common.logging; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.common.SuppressLoggerChecks; import org.elasticsearch.common.util.concurrent.ThreadContext; import java.nio.charset.Charset; import java.time.ZoneId; import java.time.ZonedDateTime; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.SignStyle; import java.util.BitSet; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.CopyOnWriteArraySet; import java.util.regex.Matcher; import java.util.regex.Pattern; import static java.time.temporal.ChronoField.DAY_OF_MONTH; import static java.time.temporal.ChronoField.DAY_OF_WEEK; import static java.time.temporal.ChronoField.HOUR_OF_DAY; import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; import static java.time.temporal.ChronoField.MONTH_OF_YEAR; import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; import static java.time.temporal.ChronoField.YEAR; /** * A logger that logs deprecation notices. */ public class DeprecationLogger { private final Logger logger; /** * This is set once by the {@code Node} constructor, but it uses {@link CopyOnWriteArraySet} to ensure that tests can run in parallel. * <p> * Integration tests will create separate nodes within the same classloader, thus leading to a shared, {@code static} state. * In order for all tests to appropriately be handled, this must be able to remember <em>all</em> {@link ThreadContext}s that it is * given in a thread safe manner. * <p> * For actual usage, multiple nodes do not share the same JVM and therefore this will only be set once in practice. */ private static final CopyOnWriteArraySet<ThreadContext> THREAD_CONTEXT = new CopyOnWriteArraySet<>(); /** * Set the {@link ThreadContext} used to add deprecation headers to network responses. * <p> * This is expected to <em>only</em> be invoked by the {@code Node}'s constructor (therefore once outside of tests). * * @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node}) * @throws IllegalStateException if this {@code threadContext} has already been set */ public static void setThreadContext(ThreadContext threadContext) { Objects.requireNonNull(threadContext, "Cannot register a null ThreadContext"); // add returning false means it _did_ have it already if (THREAD_CONTEXT.add(threadContext) == false) { throw new IllegalStateException("Double-setting ThreadContext not allowed!"); } } /** * Remove the {@link ThreadContext} used to add deprecation headers to network responses. * <p> * This is expected to <em>only</em> be invoked by the {@code Node}'s {@code close} method (therefore once outside of tests). * * @param threadContext The thread context owned by the {@code ThreadPool} (and implicitly a {@code Node}) * @throws IllegalStateException if this {@code threadContext} is unknown (and presumably already unset before) */ public static void removeThreadContext(ThreadContext threadContext) { assert threadContext != null; // remove returning false means it did not have it already if (THREAD_CONTEXT.remove(threadContext) == false) { throw new IllegalStateException("Removing unknown ThreadContext not allowed!"); } } /** * Creates a new deprecation logger based on the parent logger. Automatically * prefixes the logger name with "deprecation", if it starts with "org.elasticsearch.", * it replaces "org.elasticsearch" with "org.elasticsearch.deprecation" to maintain * the "org.elasticsearch" namespace. */ public DeprecationLogger(Logger parentLogger) { String name = parentLogger.getName(); if (name.startsWith("org.elasticsearch")) { name = name.replace("org.elasticsearch.", "org.elasticsearch.deprecation."); } else { name = "deprecation." + name; } this.logger = LogManager.getLogger(name); } /** * Logs a deprecation message, adding a formatted warning message as a response header on the thread context. */ public void deprecated(String msg, Object... params) { deprecated(THREAD_CONTEXT, msg, params); } // LRU set of keys used to determine if a deprecation message should be emitted to the deprecation logs private Set<String> keys = Collections.newSetFromMap(Collections.synchronizedMap(new LinkedHashMap<String, Boolean>() { @Override protected boolean removeEldestEntry(final Map.Entry eldest) { return size() > 128; } })); /** * Adds a formatted warning message as a response header on the thread context, and logs a deprecation message if the associated key has * not recently been seen. * * @param key the key used to determine if this deprecation should be logged * @param msg the message to log * @param params parameters to the message */ public void deprecatedAndMaybeLog(final String key, final String msg, final Object... params) { deprecated(THREAD_CONTEXT, msg, keys.add(key), params); } /* * RFC7234 specifies the warning format as warn-code <space> warn-agent <space> "warn-text" [<space> "warn-date"]. Here, warn-code is a * three-digit number with various standard warn codes specified. The warn code 299 is apt for our purposes as it represents a * miscellaneous persistent warning (can be presented to a human, or logged, and must not be removed by a cache). The warn-agent is an * arbitrary token; here we use the Elasticsearch version and build hash. The warn text must be quoted. The warn-date is an optional * quoted field that can be in a variety of specified date formats; here we use RFC 1123 format. */ private static final String WARNING_FORMAT = String.format( Locale.ROOT, "299 Elasticsearch-%s%s-%s ", Version.CURRENT.toString(), Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "", Build.CURRENT.shortHash()) + "\"%s\" \"%s\""; /* * RFC 7234 section 5.5 specifies that the warn-date is a quoted HTTP-date. HTTP-date is defined in RFC 7234 Appendix B as being from * RFC 7231 section 7.1.1.1. RFC 7231 specifies an HTTP-date as an IMF-fixdate (or an obs-date referring to obsolete formats). The * grammar for IMF-fixdate is specified as 'day-name "," SP date1 SP time-of-day SP GMT'. Here, day-name is * (Mon|Tue|Wed|Thu|Fri|Sat|Sun). Then, date1 is 'day SP month SP year' where day is 2DIGIT, month is * (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec), and year is 4DIGIT. Lastly, time-of-day is 'hour ":" minute ":" second' where * hour is 2DIGIT, minute is 2DIGIT, and second is 2DIGIT. Finally, 2DIGIT and 4DIGIT have the obvious definitions. */ private static final DateTimeFormatter RFC_7231_DATE_TIME; static { final Map<Long, String> dow = new HashMap<>(); dow.put(1L, "Mon"); dow.put(2L, "Tue"); dow.put(3L, "Wed"); dow.put(4L, "Thu"); dow.put(5L, "Fri"); dow.put(6L, "Sat"); dow.put(7L, "Sun"); final Map<Long, String> moy = new HashMap<>(); moy.put(1L, "Jan"); moy.put(2L, "Feb"); moy.put(3L, "Mar"); moy.put(4L, "Apr"); moy.put(5L, "May"); moy.put(6L, "Jun"); moy.put(7L, "Jul"); moy.put(8L, "Aug"); moy.put(9L, "Sep"); moy.put(10L, "Oct"); moy.put(11L, "Nov"); moy.put(12L, "Dec"); RFC_7231_DATE_TIME = new DateTimeFormatterBuilder() .parseCaseInsensitive() .parseLenient() .optionalStart() .appendText(DAY_OF_WEEK, dow) .appendLiteral(", ") .optionalEnd() .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) .appendLiteral(' ') .appendText(MONTH_OF_YEAR, moy) .appendLiteral(' ') .appendValue(YEAR, 4) .appendLiteral(' ') .appendValue(HOUR_OF_DAY, 2) .appendLiteral(':') .appendValue(MINUTE_OF_HOUR, 2) .optionalStart() .appendLiteral(':') .appendValue(SECOND_OF_MINUTE, 2) .optionalEnd() .appendLiteral(' ') .appendOffset("+HHMM", "GMT") .toFormatter(Locale.getDefault(Locale.Category.FORMAT)); } private static final ZoneId GMT = ZoneId.of("GMT"); /** * Regular expression to test if a string matches the RFC7234 specification for warning headers. This pattern assumes that the warn code * is always 299. Further, this pattern assumes that the warn agent represents a version of Elasticsearch including the build hash. */ public static Pattern WARNING_HEADER_PATTERN = Pattern.compile( "299 " + // warn code "Elasticsearch-\\d+\\.\\d+\\.\\d+(?:-(?:alpha|beta|rc)\\d+)?(?:-SNAPSHOT)?-(?:[a-f0-9]{7}|Unknown) " + // warn agent "\"((?:\t| |!|[\\x23-\\x5B]|[\\x5D-\\x7E]|[\\x80-\\xFF]|\\\\|\\\\\")*)\" " + // quoted warning value, captured // quoted RFC 1123 date format "\"" + // opening quote "(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // weekday "\\d{2} " + // 2-digit day "(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month "\\d{4} " + // 4-digit year "\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second) "GMT" + // GMT "\""); // closing quote /** * Extracts the warning value from the value of a warning header that is formatted according to RFC 7234. That is, given a string * {@code 299 Elasticsearch-6.0.0 "warning value" "Sat, 25 Feb 2017 10:27:43 GMT"}, the return value of this method would be {@code * warning value}. * * @param s the value of a warning header formatted according to RFC 7234. * @return the extracted warning value */ public static String extractWarningValueFromWarningHeader(final String s) { /* * We know the exact format of the warning header, so to extract the warning value we can skip forward from the front to the first * quote, and skip backwards from the end to the penultimate quote: * * 299 Elasticsearch-6.0.0 "warning value" "Sat, 25, Feb 2017 10:27:43 GMT" * ^ ^ ^ * firstQuote penultimateQuote lastQuote * * We do it this way rather than seeking forward after the first quote because there could be escaped quotes in the warning value * but since there are none in the warning date, we can skip backwards to find the quote that closes the quoted warning value. * * We parse this manually rather than using the capturing regular expression because the regular expression involves a lot of * backtracking and carries a performance penalty. However, when assertions are enabled, we still use the regular expression to * verify that we are maintaining the warning header format. */ final int firstQuote = s.indexOf('\"'); final int lastQuote = s.lastIndexOf('\"'); final int penultimateQuote = s.lastIndexOf('\"', lastQuote - 1); final String warningValue = s.substring(firstQuote + 1, penultimateQuote - 2); assert assertWarningValue(s, warningValue); return warningValue; } /** * Assert that the specified string has the warning value equal to the provided warning value. * * @param s the string representing a full warning header * @param warningValue the expected warning header * @return {@code true} if the specified string has the expected warning value */ private static boolean assertWarningValue(final String s, final String warningValue) { final Matcher matcher = WARNING_HEADER_PATTERN.matcher(s); final boolean matches = matcher.matches(); assert matches; return matcher.group(1).equals(warningValue); } /** * Logs a deprecated message to the deprecation log, as well as to the local {@link ThreadContext}. * * @param threadContexts The node's {@link ThreadContext} (outside of concurrent tests, this should only ever have one context). * @param message The deprecation message. * @param params The parameters used to fill in the message, if any exist. */ void deprecated(final Set<ThreadContext> threadContexts, final String message, final Object... params) { deprecated(threadContexts, message, true, params); } @SuppressLoggerChecks(reason = "safely delegates to logger") void deprecated(final Set<ThreadContext> threadContexts, final String message, final boolean log, final Object... params) { final Iterator<ThreadContext> iterator = threadContexts.iterator(); if (iterator.hasNext()) { final String formattedMessage = LoggerMessageFormat.format(message, params); final String warningHeaderValue = formatWarning(formattedMessage); assert WARNING_HEADER_PATTERN.matcher(warningHeaderValue).matches(); assert extractWarningValueFromWarningHeader(warningHeaderValue).equals(escapeAndEncode(formattedMessage)); while (iterator.hasNext()) { try { final ThreadContext next = iterator.next(); next.addResponseHeader("Warning", warningHeaderValue, DeprecationLogger::extractWarningValueFromWarningHeader); } catch (final IllegalStateException e) { // ignored; it should be removed shortly } } } if (log) { logger.warn(message, params); } } /** * Format a warning string in the proper warning format by prepending a warn code, warn agent, wrapping the warning string in quotes, * and appending the RFC 7231 date. * * @param s the warning string to format * @return a warning value formatted according to RFC 7234 */ public static String formatWarning(final String s) { return String.format(Locale.ROOT, WARNING_FORMAT, escapeAndEncode(s), RFC_7231_DATE_TIME.format(ZonedDateTime.now(GMT))); } /** * Escape and encode a string as a valid RFC 7230 quoted-string. * * @param s the string to escape and encode * @return the escaped and encoded string */ public static String escapeAndEncode(final String s) { return encode(escapeBackslashesAndQuotes(s)); } /** * Escape backslashes and quotes in the specified string. * * @param s the string to escape * @return the escaped string */ static String escapeBackslashesAndQuotes(final String s) { return s.replaceAll("([\"\\\\])", "\\\\$1"); } private static BitSet doesNotNeedEncoding; static { doesNotNeedEncoding = new BitSet(1 + 0xFF); doesNotNeedEncoding.set('\t'); doesNotNeedEncoding.set(' '); doesNotNeedEncoding.set('!'); doesNotNeedEncoding.set('\\'); doesNotNeedEncoding.set('"'); // we have to skip '%' which is 0x25 so that it is percent-encoded too for (int i = 0x23; i <= 0x24; i++) { doesNotNeedEncoding.set(i); } for (int i = 0x26; i <= 0x5B; i++) { doesNotNeedEncoding.set(i); } for (int i = 0x5D; i <= 0x7E; i++) { doesNotNeedEncoding.set(i); } for (int i = 0x80; i <= 0xFF; i++) { doesNotNeedEncoding.set(i); } assert !doesNotNeedEncoding.get('%'); } private static final Charset UTF_8 = Charset.forName("UTF-8"); /** * Encode a string containing characters outside of the legal characters for an RFC 7230 quoted-string. * * @param s the string to encode * @return the encoded string */ static String encode(final String s) { final StringBuilder sb = new StringBuilder(s.length()); boolean encodingNeeded = false; for (int i = 0; i < s.length();) { int current = (int) s.charAt(i); /* * Either the character does not need encoding or it does; when the character does not need encoding we append the character to * a buffer and move to the next character and when the character does need encoding, we peel off as many characters as possible * which we encode using UTF-8 until we encounter another character that does not need encoding. */ if (doesNotNeedEncoding.get(current)) { // append directly and move to the next character sb.append((char) current); i++; } else { int startIndex = i; do { i++; } while (i < s.length() && !doesNotNeedEncoding.get(s.charAt(i))); final byte[] bytes = s.substring(startIndex, i).getBytes(UTF_8); // noinspection ForLoopReplaceableByForEach for (int j = 0; j < bytes.length; j++) { sb.append('%').append(hex(bytes[j] >> 4)).append(hex(bytes[j])); } encodingNeeded = true; } } return encodingNeeded ? sb.toString() : s; } private static char hex(int b) { final char ch = Character.forDigit(b & 0xF, 16); if (Character.isLetter(ch)) { return Character.toUpperCase(ch); } else { return ch; } } }
Add some deprecation optimizations (#37597) This commit optimizes some of the performance issues from using deprecation logging: - we optimize encoding the deprecation value - we optimize formatting the deprecation string - we optimize away getting the current time (by using cached startup time)
server/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java
Add some deprecation optimizations (#37597)
<ide><path>erver/src/main/java/org/elasticsearch/common/logging/DeprecationLogger.java <ide> * arbitrary token; here we use the Elasticsearch version and build hash. The warn text must be quoted. The warn-date is an optional <ide> * quoted field that can be in a variety of specified date formats; here we use RFC 1123 format. <ide> */ <del> private static final String WARNING_FORMAT = <add> private static final String WARNING_PREFIX = <ide> String.format( <ide> Locale.ROOT, <del> "299 Elasticsearch-%s%s-%s ", <add> "299 Elasticsearch-%s%s-%s", <ide> Version.CURRENT.toString(), <ide> Build.CURRENT.isSnapshot() ? "-SNAPSHOT" : "", <del> Build.CURRENT.shortHash()) + <del> "\"%s\" \"%s\""; <add> Build.CURRENT.shortHash()); <ide> <ide> /* <ide> * RFC 7234 section 5.5 specifies that the warn-date is a quoted HTTP-date. HTTP-date is defined in RFC 7234 Appendix B as being from <ide> .toFormatter(Locale.getDefault(Locale.Category.FORMAT)); <ide> } <ide> <del> private static final ZoneId GMT = ZoneId.of("GMT"); <add> private static final String STARTUP_TIME = RFC_7231_DATE_TIME.format(ZonedDateTime.now(ZoneId.of("GMT"))); <ide> <ide> /** <ide> * Regular expression to test if a string matches the RFC7234 specification for warning headers. This pattern assumes that the warn code <ide> * @return a warning value formatted according to RFC 7234 <ide> */ <ide> public static String formatWarning(final String s) { <del> return String.format(Locale.ROOT, WARNING_FORMAT, escapeAndEncode(s), RFC_7231_DATE_TIME.format(ZonedDateTime.now(GMT))); <add> return WARNING_PREFIX + " " <add> + "\"" + escapeAndEncode(s) + "\"" + " " <add> + "\"" + STARTUP_TIME + "\""; <ide> } <ide> <ide> /** <ide> * @return the escaped string <ide> */ <ide> static String escapeBackslashesAndQuotes(final String s) { <del> return s.replaceAll("([\"\\\\])", "\\\\$1"); <add> /* <add> * We want a fast path check to avoid creating the string builder and copying characters if needed. So we walk the string looking <add> * for either of the characters that we need to escape. If we find a character that needs escaping, we start over and <add> */ <add> boolean escapingNeeded = false; <add> for (int i = 0; i < s.length(); i++) { <add> final char c = s.charAt(i); <add> if (c == '\\' || c == '"') { <add> escapingNeeded = true; <add> break; <add> } <add> } <add> <add> if (escapingNeeded) { <add> final StringBuilder sb = new StringBuilder(); <add> for (final char c : s.toCharArray()) { <add> if (c == '\\' || c == '"') { <add> sb.append("\\"); <add> } <add> sb.append(c); <add> } <add> return sb.toString(); <add> } else { <add> return s; <add> } <ide> } <ide> <ide> private static BitSet doesNotNeedEncoding; <ide> for (int i = 0x80; i <= 0xFF; i++) { <ide> doesNotNeedEncoding.set(i); <ide> } <del> assert !doesNotNeedEncoding.get('%'); <add> assert doesNotNeedEncoding.get('%') == false : doesNotNeedEncoding; <ide> } <ide> <ide> private static final Charset UTF_8 = Charset.forName("UTF-8"); <ide> * @return the encoded string <ide> */ <ide> static String encode(final String s) { <add> // first check if the string needs any encoding; this is the fast path and we want to avoid creating a string builder and copying <add> boolean encodingNeeded = false; <add> for (int i = 0; i < s.length(); i++) { <add> int current = s.charAt(i); <add> if (doesNotNeedEncoding.get(current) == false) { <add> encodingNeeded = true; <add> break; <add> } <add> } <add> <add> if (encodingNeeded == false) { <add> return s; <add> } <add> <ide> final StringBuilder sb = new StringBuilder(s.length()); <del> boolean encodingNeeded = false; <ide> for (int i = 0; i < s.length();) { <ide> int current = (int) s.charAt(i); <ide> /* <ide> for (int j = 0; j < bytes.length; j++) { <ide> sb.append('%').append(hex(bytes[j] >> 4)).append(hex(bytes[j])); <ide> } <del> encodingNeeded = true; <ide> } <ide> } <del> return encodingNeeded ? sb.toString() : s; <add> return sb.toString(); <ide> } <ide> <ide> private static char hex(int b) {
Java
mit
5675f1d85e1ad74dabb3b3ae0b8eae14cbefc3d3
0
drtnf/resistanceAI
package cits3001_2016s2; import java.util.*; import java.io.*; /** * A Class to represent a single game of resistance * @author Tim French * */ public class Game{ private Map<Character,Agent> players; private Set<Character> spies; private String playerString = ""; private String spyString = ""; private String resString = ""; private int numPlayers = 0; private static final int[] spyNum = {2,2,3,3,3,4}; //spyNum[n-5] is the number of spies in an n player game private static final int[][] missionNum = {{2,3,2,3,3},{2,3,4,3,4},{2,3,3,4,4},{3,4,4,5,5},{3,4,4,5,5},{3,4,4,5,5}}; //missionNum[n-5][i] is the number to send on mission i in a in an n player game private Random rand; private File logFile; private boolean logging = false; private boolean started = false; private long stopwatch = 0; /** * Creates an empty game. * Game log printed to stdout * */ public Game(){ init(); } /** * Creates an empty game * @param logFile path to the log file * */ public Game(String fName){ logFile = new File(fName); logging = true; init(); } /** * Initializes the data structures for the game * */ private void init(){ players = new HashMap<Character,Agent>(); spies = new HashSet<Character>(); rand = new Random(); long seed = rand.nextLong(); rand.setSeed(seed); log("Seed: "+seed); } /** * Writes the String to the log file * @param msg the String to log * */ private void log(String msg){ if(logging){ try{ FileWriter log = new FileWriter(logFilei, true); log.write(msg); log.close(); }catch(IOException e){e.printStackTrace();} } System.out.println(msg); } /** * Adds a player to a game. Once a player is added they cannot be removed * @param a the agent to be added * */ public char addPlayer(Agent a){ if(numPlayers > 9) throw new RuntimeException("Too many players"); else if(started) throw new RuntimeException("Game already underway"); else{ Character name = (char)(65+numPlayers++); players.put(name, a); log("Player "+name+" added."); return name; } } /** * Sets up the game and informs all players of their status. * This involves assigning players as spies according to the rules. */ public void setup(){ if(numPlayers < 5) throw new RuntimeException("Too few players"); else if(started) throw new RuntimeException("Game already underway"); else{ for(int i = 0; i<spyNum[numPlayers-5]; i++){ char spy = ' '; while(spy==' ' || spies.contains(spy)){ spy = (char)(65+rand.nextInt(numPlayers)); } spies.add(spy); } for(Character c: players.keySet())playerString+=c; for(Character c: spies){spyString+=c; resString+='?';} char[] pArr = playerString.toCharArray(); Arrays.sort(pArr); playerString = new String(pArr); char[] sArr = spyString.toCharArray(); Arrays.sort(sArr); spyString = new String(sArr); statusUpdate(1,0); started= true; log("Game set up. Spys allocated"); } } /** * Starts a timer for Agent method calls * */ private void stopwatchOn(){ stopwatch = System.currentTimeMillis(); } /** * Checks how if timelimit exceed and if so, logs a violation against a player. * @param limit the limit since stopwatch start, in milliseconds * @param player the player who the violation will be recorded against. * */ private void stopwatchOff(long limit, Character player){ long delay = System.currentTimeMillis()-stopwatch; if(delay>limit) log("Player: "+player+". Time exceeded by "+delay); } /** * Sends a status update to all players. * The status includes the players name, the player string, the spys (or a string of ? if the player is not a spy, the number of rounds played and the number of rounds failed) * @param round the current round * @param fails the number of rounds failed **/ private void statusUpdate(int round, int fails){ for(Character c: players.keySet()){ if(spies.contains(c)){ stopwatchOn(); players.get(c).get_status(""+c,playerString,spyString,round,fails); stopwatchOff(100,c); } else{ stopwatchOn(); players.get(c).get_status(""+c,playerString,resString,round,fails); stopwatchOff(100,c); } } } /** * This method picks a random leader for the next round and has them nominate a mission team. * If the leader does not pick a legitimate mission team (wrong number of agents, or agents that are not in the game) a default selection is given instead. * @param round the round in the game the mission is for. * @return a String containing the names of the agents being sent on the mission * */ private String nominate(int round, Character leader){ int mNum = missionNum[numPlayers-5][round-1]; stopwatchOn(); String team = players.get(leader).do_Nominate(mNum); stopwatchOff(1000,leader); char[] tA = team.toCharArray(); Arrays.sort(tA); boolean legit = tA.length==mNum; for(int i = 0; i<mNum && legit; i++){ if(!players.keySet().contains(tA[i])) legit = false; if(i>0 && tA[i]==tA[i-1]) legit=false; } if(!legit){ team = ""; for(int i = 0; i< mNum; i++) team+=(char)(65+i); } for(Character c: players.keySet()){ stopwatchOn(); players.get(c).get_ProposedMission(leader+"", team); stopwatchOff(100, c); } log(leader+" nominated "+team); return team; } /** * This method requests votes from all players on the most recently proposed mission teams, and reports whether a majority voted yes. * It counts the votes and reports a String of all agents who voted in favour to the each agent. * @return true if a strict majority supported the mission. * */ private boolean vote(){ int votes = 0; String yays = ""; for(Character c: players.keySet()){ stopwatchOn(); if(players.get(c).do_Vote()){ votes++; yays+=c; } stopwatchOff(1000,c); } for(Character c: players.keySet()){ stopwatchOn(); players.get(c).get_Votes(yays); stopwatchOff(100,c); } log(votes+" votes for: "+yays); return (votes>numPlayers/2); } /** * Polls the mission team on whether they betray or not, and reports the result. * First it informs all players of the team being sent on the mission. * Then polls each agent who goes on the mission on whether or not they betray the mission. * It reports to each agent the number of betrayals. * @param team A string with one character for each member of the team. * @return the number of agents who betray the mission. * */ public int mission(String team){ for(Character c: players.keySet()){ stopwatchOn(); players.get(c).get_Mission(team); stopwatchOff(100,c); } int traitors = 0; for(Character c: team.toCharArray()){ stopwatchOn(); if(spies.contains(c) && players.get(c).do_Betray()) traitors++; stopwatchOff(1000,c); } for(Character c: players.keySet()){ stopwatchOn(); players.get(c).get_Traitors(traitors); stopwatchOff(100,c); } log(traitors +(traitors==1?" spy ":" spies ")+ "betrayed the mission"); return traitors; } /** * Conducts the game play, consisting of 5 rounds, each with a series of nominations and votes, and the eventual mission. * It logs the result of the game at the end. * @return the number of failed missions * */ public int play(){ int fails = 0; int leader = (rand.nextInt(numPlayers)); for(int round = 1; round<=5; round++){ String team = nominate(round, playerString.charAt(leader++%numPlayers)); leader%=numPlayers; int voteRnd = 0; while(voteRnd++<5 && !vote()) team = nominate(round, playerString.charAt(leader++%numPlayers)); log(team+" elected"); int traitors = mission(team); if(traitors !=0 && (traitors !=1 || round !=4 || numPlayers<7)){ fails++; log("Mission failed"); } else log("Mission succeeded"); statusUpdate(round+1, fails); HashMap<Character,String> accusations = new HashMap<Character, String>(); for(Character c: players.keySet()){ stopwatchOn(); accusations.put(c,players.get(c).do_Accuse()); stopwatchOff(1000,c); } for(Character c: players.keySet()){ log(c+" accuses "+accusations.get(c)); for(Character a: players.keySet()){ stopwatchOn(); players.get(a).get_Accusation(c+"", accusations.get(c)); stopwatchOff(100,c); } } } if(fails>2) log("Government Wins! "+fails+" missions failed."); else log("Resistance Wins! "+fails+" missions failed."); log("The Government Spies were "+spyString+"."); return fails; } static class Competitor implements Comparable{ private Class agent; private String name; private String authors; public int spyWins; public int spyPlays; public int resWins; public int resPlays; public Competitor(Agent agent, String name, String authors){ this.agent = agent.getClass(); this.name = name; this.authors = authors; } public int compareTo(Object o){ try{ Competitor c = (Competitor) o; return (int)(1000*(this.winRate()-c.winRate())); } catch(Exception e){return 1;} } public Agent getAgent(){ try{return (Agent)agent.newInstance();} catch(Exception e){return null;} } public String getName(){return name;} public String getAuthors(){return authors;} public void spyWin(){ spyWins++;spyPlays++; } public void spyLoss(){ spyPlays++; } public void resWin(){ resWins++;resPlays++; } public void resLoss(){ resPlays++; } public double spyWinRate(){ return (1.0*spyWins)/spyPlays; } public double resWinRate(){ return (1.0*resWins)/resPlays; } public double winRate(){ return (1.0*(spyWins+resWins))/(spyPlays+resPlays); } public String toString(){ return "<tr><td>"+name+ "</td><td>"+authors+ "</td><td>"+spyWins+ "</td><td>"+spyPlays+ "</td><td>"+resWins+ "</td><td>"+resPlays+ "</td><td>"+winRate()+ "</td></tr>\n"; } } public static String tournament(Competitor[] agents, int rounds){ Random tRand = new Random(); for(int round = 0; round<rounds; round++){ Game g = new Game("Round"+round+".txt"); int playerNum = 5+tRand.nextInt(6); for(int i = 0; i<playerNum; i++){ int index = tRand.nextInt(agents.length); g.stopwatchOn();char name = g.addPlayer(agents[index].getAgent());g.stopwatchOff(1000,name); g.log("Player "+ agents[index].getName()+" from "+agents[index].getAuthors()+" is "+name); } g.setup(); int fails = g.play(); int i = 0; char[] spies = g.spyString.toCharArray(); for(Character c: g.playerString.toCharArray()){ for(Competitor cc: agents){ if(cc.agent.isInstance(g.players.get(c))){ if(i<spies.length && c==spies[i]){ if (fails>2) cc.spyWin(); else cc.spyLoss(); i++; } else{ if(fails>2) cc.resLoss(); else cc.resWin(); } g.log(cc.toString()); } } } } Arrays.sort(agents); String ret = "<html><body><table><tr><th>Name</th><th>Author</th><th>Spy Wins</th><th>Spy Plays</th><th>Res Wins</th><th>Res Plays</th><th>Win Rate</th></tr>"; for(int i = 0; i< agents.length; i++) ret+= agents[i]; return ret+"</table></body></html>"; } /** * Sets up game with random agents and plays **/ public static void main(String[] args){ /* Run a single game Game g = new Game(); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'A'); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'B'); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'C'); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'D'); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'E'); g.setup(); g.play(); */ /*Run a tournament*/ try{ File f = new File("Results.html"); FileWriter fw = new FileWriter(f); Competitor[] contenders = {new Competitor(new RandomAgent(),"Randy","Tim")}; fw.write(tournament(contenders, 10)); fw.close(); } catch(IOException e){System.out.println("IO fail");} } }
src/cits3001_2016s2/Game.java
package cits3001_2016s2; import java.util.*; import java.io.*; /** * A Class to represent a single game of resistance * @author Tim French * */ public class Game{ private Map<Character,Agent> players; private Set<Character> spies; private String playerString = ""; private String spyString = ""; private String resString = ""; private int numPlayers = 0; private static final int[] spyNum = {2,2,3,3,3,4}; //spyNum[n-5] is the number of spies in an n player game private static final int[][] missionNum = {{2,3,2,3,3},{2,3,4,3,4},{2,3,3,4,4},{3,4,4,5,5},{3,4,4,5,5},{3,4,4,5,5}}; //missionNum[n-5][i] is the number to send on mission i in a in an n player game private Random rand; private File logFile; private boolean logging = false; private boolean started = false; private long stopwatch = 0; /** * Creates an empty game. * Game log printed to stdout * */ public Game(){ init(); } /** * Creates an empty game * @param logFile path to the log file * */ public Game(String fName){ logFile = new File(fName, true); logging = true; init(); } /** * Initializes the data structures for the game * */ private void init(){ players = new HashMap<Character,Agent>(); spies = new HashSet<Character>(); rand = new Random(); long seed = rand.nextLong(); rand.setSeed(seed); log("Seed: "+seed); } /** * Writes the String to the log file * @param msg the String to log * */ private void log(String msg){ if(logging){ try{ FileWriter log = new FileWriter(logFile); log.write(msg); log.close(); }catch(IOException e){e.printStackTrace();} } System.out.println(msg); } /** * Adds a player to a game. Once a player is added they cannot be removed * @param a the agent to be added * */ public char addPlayer(Agent a){ if(numPlayers > 9) throw new RuntimeException("Too many players"); else if(started) throw new RuntimeException("Game already underway"); else{ Character name = (char)(65+numPlayers++); players.put(name, a); log("Player "+name+" added."); return name; } } /** * Sets up the game and informs all players of their status. * This involves assigning players as spies according to the rules. */ public void setup(){ if(numPlayers < 5) throw new RuntimeException("Too few players"); else if(started) throw new RuntimeException("Game already underway"); else{ for(int i = 0; i<spyNum[numPlayers-5]; i++){ char spy = ' '; while(spy==' ' || spies.contains(spy)){ spy = (char)(65+rand.nextInt(numPlayers)); } spies.add(spy); } for(Character c: players.keySet())playerString+=c; for(Character c: spies){spyString+=c; resString+='?';} char[] pArr = playerString.toCharArray(); Arrays.sort(pArr); playerString = new String(pArr); char[] sArr = spyString.toCharArray(); Arrays.sort(sArr); spyString = new String(sArr); statusUpdate(1,0); started= true; log("Game set up. Spys allocated"); } } /** * Starts a timer for Agent method calls * */ private void stopwatchOn(){ stopwatch = System.currentTimeMillis(); } /** * Checks how if timelimit exceed and if so, logs a violation against a player. * @param limit the limit since stopwatch start, in milliseconds * @param player the player who the violation will be recorded against. * */ private void stopwatchOff(long limit, Character player){ long delay = System.currentTimeMillis()-stopwatch; if(delay>limit) log("Player: "+player+". Time exceeded by "+delay); } /** * Sends a status update to all players. * The status includes the players name, the player string, the spys (or a string of ? if the player is not a spy, the number of rounds played and the number of rounds failed) * @param round the current round * @param fails the number of rounds failed **/ private void statusUpdate(int round, int fails){ for(Character c: players.keySet()){ if(spies.contains(c)){ stopwatchOn(); players.get(c).get_status(""+c,playerString,spyString,round,fails); stopwatchOff(100,c); } else{ stopwatchOn(); players.get(c).get_status(""+c,playerString,resString,round,fails); stopwatchOff(100,c); } } } /** * This method picks a random leader for the next round and has them nominate a mission team. * If the leader does not pick a legitimate mission team (wrong number of agents, or agents that are not in the game) a default selection is given instead. * @param round the round in the game the mission is for. * @return a String containing the names of the agents being sent on the mission * */ private String nominate(int round, Character leader){ int mNum = missionNum[numPlayers-5][round-1]; stopwatchOn(); String team = players.get(leader).do_Nominate(mNum); stopwatchOff(1000,leader); char[] tA = team.toCharArray(); Arrays.sort(tA); boolean legit = tA.length==mNum; for(int i = 0; i<mNum && legit; i++){ if(!players.keySet().contains(tA[i])) legit = false; if(i>0 && tA[i]==tA[i-1]) legit=false; } if(!legit){ team = ""; for(int i = 0; i< mNum; i++) team+=(char)(65+i); } for(Character c: players.keySet()){ stopwatchOn(); players.get(c).get_ProposedMission(leader+"", team); stopwatchOff(100, c); } log(leader+" nominated "+team); return team; } /** * This method requests votes from all players on the most recently proposed mission teams, and reports whether a majority voted yes. * It counts the votes and reports a String of all agents who voted in favour to the each agent. * @return true if a strict majority supported the mission. * */ private boolean vote(){ int votes = 0; String yays = ""; for(Character c: players.keySet()){ stopwatchOn(); if(players.get(c).do_Vote()){ votes++; yays+=c; } stopwatchOff(1000,c); } for(Character c: players.keySet()){ stopwatchOn(); players.get(c).get_Votes(yays); stopwatchOff(100,c); } log(votes+" votes for: "+yays); return (votes>numPlayers/2); } /** * Polls the mission team on whether they betray or not, and reports the result. * First it informs all players of the team being sent on the mission. * Then polls each agent who goes on the mission on whether or not they betray the mission. * It reports to each agent the number of betrayals. * @param team A string with one character for each member of the team. * @return the number of agents who betray the mission. * */ public int mission(String team){ for(Character c: players.keySet()){ stopwatchOn(); players.get(c).get_Mission(team); stopwatchOff(100,c); } int traitors = 0; for(Character c: team.toCharArray()){ stopwatchOn(); if(spies.contains(c) && players.get(c).do_Betray()) traitors++; stopwatchOff(1000,c); } for(Character c: players.keySet()){ stopwatchOn(); players.get(c).get_Traitors(traitors); stopwatchOff(100,c); } log(traitors +(traitors==1?" spy ":" spies ")+ "betrayed the mission"); return traitors; } /** * Conducts the game play, consisting of 5 rounds, each with a series of nominations and votes, and the eventual mission. * It logs the result of the game at the end. * @return the number of failed missions * */ public int play(){ int fails = 0; int leader = (rand.nextInt(numPlayers)); for(int round = 1; round<=5; round++){ String team = nominate(round, playerString.charAt(leader++%numPlayers)); leader%=numPlayers; int voteRnd = 0; while(voteRnd++<5 && !vote()) team = nominate(round, playerString.charAt(leader++%numPlayers)); log(team+" elected"); int traitors = mission(team); if(traitors !=0 && (traitors !=1 || round !=4 || numPlayers<7)){ fails++; log("Mission failed"); } else log("Mission succeeded"); statusUpdate(round+1, fails); HashMap<Character,String> accusations = new HashMap<Character, String>(); for(Character c: players.keySet()){ stopwatchOn(); accusations.put(c,players.get(c).do_Accuse()); stopwatchOff(1000,c); } for(Character c: players.keySet()){ log(c+" accuses "+accusations.get(c)); for(Character a: players.keySet()){ stopwatchOn(); players.get(a).get_Accusation(c+"", accusations.get(c)); stopwatchOff(100,c); } } } if(fails>2) log("Government Wins! "+fails+" missions failed."); else log("Resistance Wins! "+fails+" missions failed."); log("The Government Spies were "+spyString+"."); return fails; } static class Competitor implements Comparable{ private Class agent; private String name; private String authors; public int spyWins; public int spyPlays; public int resWins; public int resPlays; public Competitor(Agent agent, String name, String authors){ this.agent = agent.getClass(); this.name = name; this.authors = authors; } public int compareTo(Object o){ try{ Competitor c = (Competitor) o; return (int)(1000*(this.winRate()-c.winRate())); } catch(Exception e){return 1;} } public Agent getAgent(){ try{return (Agent)agent.newInstance();} catch(Exception e){return null;} } public String getName(){return name;} public String getAuthors(){return authors;} public void spyWin(){ spyWins++;spyPlays++; } public void spyLoss(){ spyPlays++; } public void resWin(){ resWins++;resPlays++; } public void resLoss(){ resPlays++; } public double spyWinRate(){ return (1.0*spyWins)/spyPlays; } public double resWinRate(){ return (1.0*resWins)/resPlays; } public double winRate(){ return (1.0*(spyWins+resWins))/(spyPlays+resPlays); } public String toString(){ return "<tr><td>"+name+ "</td><td>"+authors+ "</td><td>"+spyWins+ "</td><td>"+spyPlays+ "</td><td>"+resWins+ "</td><td>"+resPlays+ "</td><td>"+winRate()+ "</td></tr>\n"; } } public static String tournament(Competitor[] agents, int rounds){ Random tRand = new Random(); for(int round = 0; round<rounds; round++){ Game g = new Game("Round"+round+".txt"); int playerNum = 5+tRand.nextInt(6); for(int i = 0; i<playerNum; i++){ int index = tRand.nextInt(agents.length); g.stopwatchOn();char name = g.addPlayer(agents[index].getAgent());g.stopwatchOff(1000,name); g.log("Player "+ agents[index].getName()+" from "+agents[index].getAuthors()+" is "+name); } g.setup(); int fails = g.play(); int i = 0; char[] spies = g.spyString.toCharArray(); for(Character c: g.playerString.toCharArray()){ for(Competitor cc: agents){ if(cc.agent.isInstance(g.players.get(c))){ if(i<spies.length && c==spies[i]){ if (fails>2) cc.spyWin(); else cc.spyLoss(); i++; } else{ if(fails>2) cc.resLoss(); else cc.resWin(); } g.log(cc.toString()); } } } } Arrays.sort(agents); String ret = "<html><body><table><tr><th>Name</th><th>Author</th><th>Spy Wins</th><th>Spy Plays</th><th>Res Wins</th><th>Res Plays</th><th>Win Rate</th></tr>"; for(int i = 0; i< agents.length; i++) ret+= agents[i]; return ret+"</table></body></html>"; } /** * Sets up game with random agents and plays **/ public static void main(String[] args){ /* Run a single game Game g = new Game(); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'A'); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'B'); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'C'); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'D'); g.stopwatchOn();g.addPlayer(new RandomAgent());g.stopwatchOff(1000,'E'); g.setup(); g.play(); */ /*Run a tournament*/ try{ File f = new File("Results.html"); FileWriter fw = new FileWriter(f); Competitor[] contenders = {new Competitor(new RandomAgent(),"Randy","Tim")}; fw.write(tournament(contenders, 10)); fw.close(); } catch(IOException e){System.out.println("IO fail");} } }
fixed comile issue in game
src/cits3001_2016s2/Game.java
fixed comile issue in game
<ide><path>rc/cits3001_2016s2/Game.java <ide> * @param logFile path to the log file <ide> * */ <ide> public Game(String fName){ <del> logFile = new File(fName, true); <add> logFile = new File(fName); <ide> logging = true; <ide> init(); <ide> } <ide> private void log(String msg){ <ide> if(logging){ <ide> try{ <del> FileWriter log = new FileWriter(logFile); <add> FileWriter log = new FileWriter(logFilei, true); <ide> log.write(msg); <ide> log.close(); <ide> }catch(IOException e){e.printStackTrace();}
Java
agpl-3.0
7d5a7a1dfd2c68a5e558ff3d4911adb134f33b2f
0
jwillia/kc-old1,ColostateResearchServices/kc,ColostateResearchServices/kc,jwillia/kc-old1,geothomasp/kcmit,ColostateResearchServices/kc,kuali/kc,jwillia/kc-old1,iu-uits-es/kc,mukadder/kc,kuali/kc,UniversityOfHawaiiORS/kc,iu-uits-es/kc,geothomasp/kcmit,kuali/kc,UniversityOfHawaiiORS/kc,jwillia/kc-old1,mukadder/kc,geothomasp/kcmit,geothomasp/kcmit,geothomasp/kcmit,iu-uits-es/kc,UniversityOfHawaiiORS/kc,mukadder/kc
/* * Copyright 2006-2008 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.meeting; import java.util.ArrayList; import junit.framework.Assert; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.kuali.kra.KraTestBase; import org.kuali.kra.committee.bo.CommitteeSchedule; import org.kuali.kra.committee.web.struts.form.schedule.Time12HrFmt; import org.kuali.kra.committee.web.struts.form.schedule.Time12HrFmt.MERIDIEM; import org.kuali.kra.infrastructure.KeyConstants; import org.kuali.rice.kns.util.GlobalVariables; import org.kuali.rice.test.data.PerSuiteUnitTestData; import org.kuali.rice.test.data.UnitTestData; import org.kuali.rice.test.data.UnitTestFile; @PerSuiteUnitTestData(@UnitTestData(sqlFiles = { @UnitTestFile(filename = "classpath:sql/dml/load_PROTOCOL_CONTINGENCY.sql", delimiter = ";") })) public class MeetingRuleTest extends KraTestBase { // need extends Kratestbase for businessservice called in rules. private MeetingDetailsRule rule; private Time12HrFmt viewStartTime; private Time12HrFmt viewEndTime; private Time12HrFmt viewTime; @Before public void setUp() throws Exception { super.setUp(); viewTime = new Time12HrFmt("01:00", MERIDIEM.PM); viewStartTime = new Time12HrFmt("01:00", MERIDIEM.PM); viewEndTime = new Time12HrFmt("01:00", MERIDIEM.PM); rule = new MeetingDetailsRule(); } @After public void tearDown() throws Exception { super.tearDown(); rule = null; } @Test public void testRuleIsOK() throws Exception { CommitteeSchedule committeeSchedule = new CommitteeSchedule(); committeeSchedule.setViewStartTime(viewStartTime); committeeSchedule.setViewEndTime(viewEndTime); committeeSchedule.setViewTime(viewTime); Assert.assertTrue(rule.validateMeetingDetails(committeeSchedule)); viewStartTime.setTime("12:00"); Assert.assertTrue(rule.validateMeetingDetails(committeeSchedule)); viewStartTime.setTime("01:00"); viewEndTime.setTime("02:30"); Assert.assertTrue(rule.validateMeetingDetails(committeeSchedule)); } @Test public void testViewTimeIsNotOK() throws Exception { testTimeIsNotFormatOk(viewTime); } @Test public void testViewStartTimeIsNotOK() throws Exception { testTimeIsNotFormatOk(viewStartTime); } @Test public void testViewEndTimeIsNotOK() throws Exception { testTimeIsNotFormatOk(viewEndTime); } @Test public void testViewEndTimeBeforeViewStartTime() throws Exception { CommitteeSchedule committeeSchedule = new CommitteeSchedule(); committeeSchedule.setViewStartTime(viewStartTime); committeeSchedule.setViewEndTime(viewEndTime); committeeSchedule.setViewTime(viewTime); viewEndTime.setMeridiem("AM"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_ENDTIME_BEFORE_STARTTIME)); viewEndTime.setMeridiem("PM"); viewEndTime.setTime("12:30"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_ENDTIME_BEFORE_STARTTIME)); viewEndTime.setTime("01:30"); viewStartTime.setTime("02:30"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_ENDTIME_BEFORE_STARTTIME)); } @Test public void testvalidateNewOther() throws Exception { OtherPresentBean newOtherPresentBean = new OtherPresentBean(); CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance(); newOtherPresentBean.setAttendance(attendance); MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm()); meetingHelper.setNewOtherPresentBean(newOtherPresentBean); meetingHelper.setMemberPresentBeans(new ArrayList<MemberPresentBean>()); Assert.assertFalse(rule.validateNewOther(meetingHelper)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PERSON)); attendance.setPersonName("tester 1"); Assert.assertTrue(rule.validateNewOther(meetingHelper)); meetingHelper.getMemberPresentBeans().add(getMemberPresent("001", "tester 1")); newOtherPresentBean.getAttendance().setPersonId("001"); // member present found newOtherPresentBean.getAttendance().setNonEmployeeFlag(false); Assert.assertFalse(rule.validateNewOther(meetingHelper)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_ADD_MEMBER_PRESENT)); // personid is the same but one is non-employee, the other is not newOtherPresentBean.getAttendance().setNonEmployeeFlag(true); Assert.assertTrue(rule.validateNewOther(meetingHelper)); // personid is not matched newOtherPresentBean.getAttendance().setPersonId("002"); newOtherPresentBean.getAttendance().setNonEmployeeFlag(false); Assert.assertTrue(rule.validateNewOther(meetingHelper)); // person matched alternatefor meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFor("002"); Assert.assertFalse(rule.validateNewOther(meetingHelper)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_ADD_MEMBER_PRESENT)); } @Test public void testValidateNotAlternateFor() throws Exception { MemberAbsentBean memberAbsentBean = new MemberAbsentBean(); CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance(); attendance.setPersonName("tester 2"); attendance.setPersonId("002"); memberAbsentBean.setAttendance(attendance); MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm()); meetingHelper.setMemberPresentBeans(new ArrayList<MemberPresentBean>()); meetingHelper.getMemberPresentBeans().add(getMemberPresent("001", "tester 1")); Assert.assertTrue(rule.validateNotAlternateFor(meetingHelper.getMemberPresentBeans(), memberAbsentBean)); meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFor("002"); meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFlag(true); Assert.assertFalse(rule.validateNotAlternateFor(meetingHelper.getMemberPresentBeans(), memberAbsentBean)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_PRESENT_MEMBER_ABSENT)); } @Test public void testValidateDuplicateAlternateFor() throws Exception { MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm()); meetingHelper.setMemberPresentBeans(new ArrayList<MemberPresentBean>()); meetingHelper.getMemberPresentBeans().add(getMemberPresent("001", "tester 1")); meetingHelper.getMemberPresentBeans().add(getMemberPresent("002", "tester 2")); // Both alternate for are null Assert.assertTrue(rule.validateDuplicateAlternateFor(meetingHelper.getMemberPresentBeans())); meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFor("003"); meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFlag(true); // one is null, the otehr is '003' Assert.assertTrue(rule.validateDuplicateAlternateFor(meetingHelper.getMemberPresentBeans())); meetingHelper.getMemberPresentBeans().get(1).getAttendance().setAlternateFor("004"); meetingHelper.getMemberPresentBeans().get(1).getAttendance().setAlternateFlag(true); // one '004', the other is '003' Assert.assertTrue(rule.validateDuplicateAlternateFor(meetingHelper.getMemberPresentBeans())); meetingHelper.getMemberPresentBeans().get(1).getAttendance().setAlternateFor("003"); // both '003' Assert.assertFalse(rule.validateDuplicateAlternateFor(meetingHelper.getMemberPresentBeans())); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_DUPLICATE_ALTERNATE_FOR)); } @Test public void testValidateProtocolInMinute() throws Exception { CommitteeScheduleMinute committeeScheduleMinute = new CommitteeScheduleMinute(){ // @Override // public void refreshReferenceObject(String referenceObjectName) { // if (referenceObjectName.equals("protocolContingency")) { // ProtocolContingency protocolContingency= new ProtocolContingency(); // protocolContingency.setProtocolContingencyCode(getProtocolContingencyCode()); // protocolContingency.setDescription(getProtocolContingencyCode()+" description"); // setProtocolContingency(protocolContingency); // // } // // } }; committeeScheduleMinute.setMinuteEntryTypeCode("3"); Assert.assertFalse(rule.validateProtocolInMinute(committeeScheduleMinute)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL)); ProtocolContingency protocolContingency= new ProtocolContingency(); protocolContingency.setProtocolContingencyCode("1"); protocolContingency.setDescription("description 1"); committeeScheduleMinute.setProtocolContingency(protocolContingency); committeeScheduleMinute.setProtocolContingencyCode("1"); Assert.assertFalse(rule.validateProtocolInMinute(committeeScheduleMinute)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL)); protocolContingency= new ProtocolContingency(); protocolContingency.setProtocolContingencyCode("111"); protocolContingency.setDescription("description 111"); committeeScheduleMinute.setProtocolContingency(protocolContingency); committeeScheduleMinute.setProtocolContingencyCode("111"); Assert.assertFalse(rule.validateProtocolInMinute(committeeScheduleMinute)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL_CONTINGENCY)); committeeScheduleMinute.setProtocolIdFk(1L); Assert.assertFalse(rule.validateProtocolInMinute(committeeScheduleMinute)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL_CONTINGENCY)); committeeScheduleMinute.setProtocolContingencyCode("1"); Assert.assertTrue(rule.validateProtocolInMinute(committeeScheduleMinute)); } private MemberPresentBean getMemberPresent(String personId, String personName) { MemberPresentBean memberPresentBean = new MemberPresentBean(); CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance(); attendance.setNonEmployeeFlag(false); attendance.setPersonId(personId); attendance.setPersonName(personName); memberPresentBean.setAttendance(attendance); return memberPresentBean; } private void testTimeIsNotFormatOk(Time12HrFmt time) { CommitteeSchedule committeeSchedule = new CommitteeSchedule(); committeeSchedule.setViewStartTime(viewStartTime); committeeSchedule.setViewEndTime(viewEndTime); committeeSchedule.setViewTime(viewTime); time.setTime("13:00"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_VIEWTIME)); time.setTime("12:61"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_VIEWTIME)); time.setTime("13:61"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_VIEWTIME)); } }
src/test/java/org/kuali/kra/meeting/MeetingRuleTest.java
/* * Copyright 2006-2008 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.kra.meeting; import java.util.ArrayList; import junit.framework.Assert; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.kuali.kra.KraTestBase; import org.kuali.kra.committee.bo.CommitteeSchedule; import org.kuali.kra.committee.web.struts.form.schedule.Time12HrFmt; import org.kuali.kra.committee.web.struts.form.schedule.Time12HrFmt.MERIDIEM; import org.kuali.kra.infrastructure.KeyConstants; import org.kuali.rice.kns.util.GlobalVariables; import org.kuali.rice.test.data.PerSuiteUnitTestData; import org.kuali.rice.test.data.UnitTestData; import org.kuali.rice.test.data.UnitTestFile; @PerSuiteUnitTestData(@UnitTestData(sqlFiles = { @UnitTestFile(filename = "classpath:sql/dml/load_protocol_CONTINGENCY.sql", delimiter = ";") })) public class MeetingRuleTest extends KraTestBase { // need extends Kratestbase for businessservice called in rules. private MeetingDetailsRule rule; private Time12HrFmt viewStartTime; private Time12HrFmt viewEndTime; private Time12HrFmt viewTime; @Before public void setUp() throws Exception { super.setUp(); viewTime = new Time12HrFmt("01:00", MERIDIEM.PM); viewStartTime = new Time12HrFmt("01:00", MERIDIEM.PM); viewEndTime = new Time12HrFmt("01:00", MERIDIEM.PM); rule = new MeetingDetailsRule(); } @After public void tearDown() throws Exception { super.tearDown(); rule = null; } @Test public void testRuleIsOK() throws Exception { CommitteeSchedule committeeSchedule = new CommitteeSchedule(); committeeSchedule.setViewStartTime(viewStartTime); committeeSchedule.setViewEndTime(viewEndTime); committeeSchedule.setViewTime(viewTime); Assert.assertTrue(rule.validateMeetingDetails(committeeSchedule)); viewStartTime.setTime("12:00"); Assert.assertTrue(rule.validateMeetingDetails(committeeSchedule)); viewStartTime.setTime("01:00"); viewEndTime.setTime("02:30"); Assert.assertTrue(rule.validateMeetingDetails(committeeSchedule)); } @Test public void testViewTimeIsNotOK() throws Exception { testTimeIsNotFormatOk(viewTime); } @Test public void testViewStartTimeIsNotOK() throws Exception { testTimeIsNotFormatOk(viewStartTime); } @Test public void testViewEndTimeIsNotOK() throws Exception { testTimeIsNotFormatOk(viewEndTime); } @Test public void testViewEndTimeBeforeViewStartTime() throws Exception { CommitteeSchedule committeeSchedule = new CommitteeSchedule(); committeeSchedule.setViewStartTime(viewStartTime); committeeSchedule.setViewEndTime(viewEndTime); committeeSchedule.setViewTime(viewTime); viewEndTime.setMeridiem("AM"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_ENDTIME_BEFORE_STARTTIME)); viewEndTime.setMeridiem("PM"); viewEndTime.setTime("12:30"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_ENDTIME_BEFORE_STARTTIME)); viewEndTime.setTime("01:30"); viewStartTime.setTime("02:30"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_ENDTIME_BEFORE_STARTTIME)); } @Test public void testvalidateNewOther() throws Exception { OtherPresentBean newOtherPresentBean = new OtherPresentBean(); CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance(); newOtherPresentBean.setAttendance(attendance); MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm()); meetingHelper.setNewOtherPresentBean(newOtherPresentBean); meetingHelper.setMemberPresentBeans(new ArrayList<MemberPresentBean>()); Assert.assertFalse(rule.validateNewOther(meetingHelper)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PERSON)); attendance.setPersonName("tester 1"); Assert.assertTrue(rule.validateNewOther(meetingHelper)); meetingHelper.getMemberPresentBeans().add(getMemberPresent("001", "tester 1")); newOtherPresentBean.getAttendance().setPersonId("001"); // member present found newOtherPresentBean.getAttendance().setNonEmployeeFlag(false); Assert.assertFalse(rule.validateNewOther(meetingHelper)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_ADD_MEMBER_PRESENT)); // personid is the same but one is non-employee, the other is not newOtherPresentBean.getAttendance().setNonEmployeeFlag(true); Assert.assertTrue(rule.validateNewOther(meetingHelper)); // personid is not matched newOtherPresentBean.getAttendance().setPersonId("002"); newOtherPresentBean.getAttendance().setNonEmployeeFlag(false); Assert.assertTrue(rule.validateNewOther(meetingHelper)); // person matched alternatefor meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFor("002"); Assert.assertFalse(rule.validateNewOther(meetingHelper)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_ADD_MEMBER_PRESENT)); } @Test public void testValidateNotAlternateFor() throws Exception { MemberAbsentBean memberAbsentBean = new MemberAbsentBean(); CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance(); attendance.setPersonName("tester 2"); attendance.setPersonId("002"); memberAbsentBean.setAttendance(attendance); MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm()); meetingHelper.setMemberPresentBeans(new ArrayList<MemberPresentBean>()); meetingHelper.getMemberPresentBeans().add(getMemberPresent("001", "tester 1")); Assert.assertTrue(rule.validateNotAlternateFor(meetingHelper.getMemberPresentBeans(), memberAbsentBean)); meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFor("002"); meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFlag(true); Assert.assertFalse(rule.validateNotAlternateFor(meetingHelper.getMemberPresentBeans(), memberAbsentBean)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_PRESENT_MEMBER_ABSENT)); } @Test public void testValidateDuplicateAlternateFor() throws Exception { MeetingHelper meetingHelper = new MeetingHelper(new MeetingForm()); meetingHelper.setMemberPresentBeans(new ArrayList<MemberPresentBean>()); meetingHelper.getMemberPresentBeans().add(getMemberPresent("001", "tester 1")); meetingHelper.getMemberPresentBeans().add(getMemberPresent("002", "tester 2")); // Both alternate for are null Assert.assertTrue(rule.validateDuplicateAlternateFor(meetingHelper.getMemberPresentBeans())); meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFor("003"); meetingHelper.getMemberPresentBeans().get(0).getAttendance().setAlternateFlag(true); // one is null, the otehr is '003' Assert.assertTrue(rule.validateDuplicateAlternateFor(meetingHelper.getMemberPresentBeans())); meetingHelper.getMemberPresentBeans().get(1).getAttendance().setAlternateFor("004"); meetingHelper.getMemberPresentBeans().get(1).getAttendance().setAlternateFlag(true); // one '004', the other is '003' Assert.assertTrue(rule.validateDuplicateAlternateFor(meetingHelper.getMemberPresentBeans())); meetingHelper.getMemberPresentBeans().get(1).getAttendance().setAlternateFor("003"); // both '003' Assert.assertFalse(rule.validateDuplicateAlternateFor(meetingHelper.getMemberPresentBeans())); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_DUPLICATE_ALTERNATE_FOR)); } @Test public void testValidateProtocolInMinute() throws Exception { CommitteeScheduleMinute committeeScheduleMinute = new CommitteeScheduleMinute(){ // @Override // public void refreshReferenceObject(String referenceObjectName) { // if (referenceObjectName.equals("protocolContingency")) { // ProtocolContingency protocolContingency= new ProtocolContingency(); // protocolContingency.setProtocolContingencyCode(getProtocolContingencyCode()); // protocolContingency.setDescription(getProtocolContingencyCode()+" description"); // setProtocolContingency(protocolContingency); // // } // // } }; committeeScheduleMinute.setMinuteEntryTypeCode("3"); Assert.assertFalse(rule.validateProtocolInMinute(committeeScheduleMinute)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL)); ProtocolContingency protocolContingency= new ProtocolContingency(); protocolContingency.setProtocolContingencyCode("1"); protocolContingency.setDescription("description 1"); committeeScheduleMinute.setProtocolContingency(protocolContingency); committeeScheduleMinute.setProtocolContingencyCode("1"); Assert.assertFalse(rule.validateProtocolInMinute(committeeScheduleMinute)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL)); protocolContingency= new ProtocolContingency(); protocolContingency.setProtocolContingencyCode("111"); protocolContingency.setDescription("description 111"); committeeScheduleMinute.setProtocolContingency(protocolContingency); committeeScheduleMinute.setProtocolContingencyCode("111"); Assert.assertFalse(rule.validateProtocolInMinute(committeeScheduleMinute)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL_CONTINGENCY)); committeeScheduleMinute.setProtocolIdFk(1L); Assert.assertFalse(rule.validateProtocolInMinute(committeeScheduleMinute)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_EMPTY_PROTOCOL_CONTINGENCY)); committeeScheduleMinute.setProtocolContingencyCode("1"); Assert.assertTrue(rule.validateProtocolInMinute(committeeScheduleMinute)); } private MemberPresentBean getMemberPresent(String personId, String personName) { MemberPresentBean memberPresentBean = new MemberPresentBean(); CommitteeScheduleAttendance attendance = new CommitteeScheduleAttendance(); attendance.setNonEmployeeFlag(false); attendance.setPersonId(personId); attendance.setPersonName(personName); memberPresentBean.setAttendance(attendance); return memberPresentBean; } private void testTimeIsNotFormatOk(Time12HrFmt time) { CommitteeSchedule committeeSchedule = new CommitteeSchedule(); committeeSchedule.setViewStartTime(viewStartTime); committeeSchedule.setViewEndTime(viewEndTime); committeeSchedule.setViewTime(viewTime); time.setTime("13:00"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_VIEWTIME)); time.setTime("12:61"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_VIEWTIME)); time.setTime("13:61"); Assert.assertFalse(rule.validateMeetingDetails(committeeSchedule)); Assert.assertTrue(GlobalVariables.getMessageMap().containsMessageKey(KeyConstants.ERROR_COMMITTEESCHEDULE_VIEWTIME)); } }
kcirb-565: fix file name
src/test/java/org/kuali/kra/meeting/MeetingRuleTest.java
kcirb-565: fix file name
<ide><path>rc/test/java/org/kuali/kra/meeting/MeetingRuleTest.java <ide> import org.kuali.rice.test.data.UnitTestFile; <ide> <ide> @PerSuiteUnitTestData(@UnitTestData(sqlFiles = { <del> @UnitTestFile(filename = "classpath:sql/dml/load_protocol_CONTINGENCY.sql", delimiter = ";") <add> @UnitTestFile(filename = "classpath:sql/dml/load_PROTOCOL_CONTINGENCY.sql", delimiter = ";") <ide> })) <ide> public class MeetingRuleTest extends KraTestBase { <ide> // need extends Kratestbase for businessservice called in rules.
Java
apache-2.0
5d398815869e4ee4491bcd91d00697705a62c457
0
caturday/plus,caturday/plus,plus-provenance/plus,plus-provenance/plus,plus-provenance/plus,caturday/plus,caturday/plus,plus-provenance/plus
/* Copyright 2014 MITRE Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mitre.provenance.user; import java.util.HashMap; import java.util.Map; import java.util.NoSuchElementException; import java.util.logging.Logger; import org.mitre.provenance.PLUSException; import org.mitre.provenance.db.neo4j.Neo4JCapable; import org.mitre.provenance.db.neo4j.Neo4JPLUSObjectFactory; import org.mitre.provenance.db.neo4j.Neo4JStorage; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.PropertyContainer; /** * A privilege class is a kind of identity that a user can have. PLUSObjects can require that a user have a * particular privilege class in order to see an object. Privilege classes are a partially ordered domain. * <p>Worth noting though: users don't have privilege classes, they have privilege sets. * @see PrivilegeSet#PrivilegeSet() * @author DMALLEN */ public class PrivilegeClass implements Neo4JCapable { protected static Logger log = Logger.getLogger(PrivilegeClass.class.getName()); protected static final String GOD_ID = "urn:uuid:plus:111111111111111111111111111111111111"; protected static final String NATIONAL_SECURITY_ID = "urn:uuid:plus:000000000000000000000000000000000014"; protected static final String PRIVATE_MEDICAL_ID = "urn:uuid:plus:000000000000000000000000000000000013"; protected static final String EMERGENCY_LOW_ID = "urn:uuid:plus:000000000000000000000000000000000012"; protected static final String EMERGENCY_HIGH_ID = "urn:uuid:plus:000000000000000000000000000000000011"; protected static final String PUBLIC_ID = "urn:uuid:plus:000000000000000000000000000000000015"; /* Static singletons for commonly used PCs */ public static final PrivilegeClass ADMIN = new PrivilegeClass(GOD_ID, "Super User", "Super User");; public static final PrivilegeClass NATIONAL_SECURITY = new PrivilegeClass(NATIONAL_SECURITY_ID, "National Security"); public static final PrivilegeClass PRIVATE_MEDICAL = new PrivilegeClass(PRIVATE_MEDICAL_ID, "Private Medical"); public static final PrivilegeClass EMERGENCY_HIGH = new PrivilegeClass(EMERGENCY_HIGH_ID, "Emergency High"); public static final PrivilegeClass EMERGENCY_LOW = new PrivilegeClass(EMERGENCY_LOW_ID, "Emergency Low"); public static final PrivilegeClass PUBLIC = new PrivilegeClass(PUBLIC_ID, "Public"); /** the name of the privilege class */ protected String name; /** a PLUS OID */ protected String id; /** Brief description */ protected String description; /** When created */ protected long created; /** * As a special case you can create a privilege class that is a "security level" -- a totally ordered 0-10 setup * similar to what was in the first iteration of the prototype. * @param level the security level you want. This must be 0-10 */ public PrivilegeClass(int level) { if(level < 0) level = 0; if(level > 10) level = 10; // These special case security levels for totally-ordered integers are already hard-wired in the DB. if(level == 10) id = "urn:uuid:plus:000000000000000000000000000000000010"; else id = "urn:uuid:plus:00000000000000000000000000000000000" + level; name = "Security Level " + level; description = "Security Level " + level; setCreated(System.currentTimeMillis()); } // End PrivilegeClass /** * Create a new privilege class. Note this constructor is used with data loaded from the database. You * cannot create a new item in the database by using this call. * @param id ID from database * @param name name from database * @param description description from database. */ protected PrivilegeClass(String id, String name, String description) { setName(name); setId(id); setDescription(description); setCreated(System.currentTimeMillis()); } // End PrivilegeClass protected PrivilegeClass(String id, String name) { this(id, name, name); } public String getId() { return id; } public long getCreated() { return created; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public void setId(String id) { this.id = id; } public void setCreated(long d) { this.created = d; } public String getName() { return name; } public void setName(String name) { this.name = name; } /** * Comparison predicate for Privilege classes. They are equal if and only if their database IDs are equal. * @param p class to compare against * @return true if they are functionally the same, false otherwise. */ public boolean equals(Object p) { if(p == null) return false; if(!(p instanceof PrivilegeClass)) return false; return getId().equals(((PrivilegeClass)p).getId()); } // End equals public String toString() { return new String("(Privilege " + getName() + ")"); } /** * One privilege class dominates another when it is at an equal or higher level of security. All classes * trivially dominate themselves. * @param other the class to compare against. * @return true if this object dominates other, false otherwise. * @throws PLUSException */ public boolean dominates(PrivilegeClass other) throws PLUSException { if(equals(other)) return true; // Every class trivially dominates itself. String query = "start n=node:node_auto_index(pid=\"" + getId() + "\") " + "match n-[r:" + Neo4JStorage.DOMINATES.name() + "*..100]->m " + "where has(m.pid) and m.pid = \"" + other.getId() + "\" " + "return m "; try { PrivilegeClass pc = Neo4JPLUSObjectFactory.newPrivilegeClass((Node)Neo4JStorage.execute(query).columnAs("m").next()); if(pc.getName().equals(other.getName())) return true; throw new PLUSException("Inconsistency: " + pc.getName() + " vs " + other.getName()); } catch(NoSuchElementException nse) { // This happens when no element was returned by the query, i.e. this privilege class doesn't dominate the other. return false; } catch(Exception exc) { exc.printStackTrace(); return false; } } // End dominates public Map<String, Object> getStorableProperties() { Map<String,Object> m = new HashMap<String,Object>(); m.put("name", getName()); m.put("pid", getId()); m.put("description", getDescription()); m.put("type", "privilegeclass"); m.put("created", getCreated()); return m; } public PrivilegeClass setProperties(PropertyContainer props) throws PLUSException { setName(""+props.getProperty("name")); setId(""+props.getProperty("pid")); setDescription(""+props.getProperty("description")); Long c = (long)props.getProperty("created", null); if(c != null) setCreated(c); else setCreated(System.currentTimeMillis()); return this; } // End setProperties } // End PrivilegeClass
src/main/java/org/mitre/provenance/user/PrivilegeClass.java
/* Copyright 2014 MITRE Corporation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.mitre.provenance.user; import java.util.HashMap; import java.util.Map; import java.util.NoSuchElementException; import java.util.logging.Logger; import org.mitre.provenance.PLUSException; import org.mitre.provenance.db.neo4j.Neo4JCapable; import org.mitre.provenance.db.neo4j.Neo4JPLUSObjectFactory; import org.mitre.provenance.db.neo4j.Neo4JStorage; import org.neo4j.graphdb.Node; import org.neo4j.graphdb.PropertyContainer; /** * A privilege class is a kind of identity that a user can have. PLUSObjects can require that a user have a * particular privilege class in order to see an object. Privilege classes are a partially ordered domain. * <p>Worth noting though: users don't have privilege classes, they have privilege sets. * @see PrivilegeSet#PrivilegeSet() * @author DMALLEN */ public class PrivilegeClass implements Neo4JCapable { protected static Logger log = Logger.getLogger(PrivilegeClass.class.getName()); protected static final String GOD_ID = "urn:uuid:plus:111111111111111111111111111111111111"; protected static final String NATIONAL_SECURITY_ID = "urn:uuid:plus:000000000000000000000000000000000014"; protected static final String PRIVATE_MEDICAL_ID = "urn:uuid:plus:000000000000000000000000000000000013"; protected static final String EMERGENCY_LOW_ID = "urn:uuid:plus:000000000000000000000000000000000012"; protected static final String EMERGENCY_HIGH_ID = "urn:uuid:plus:000000000000000000000000000000000011"; protected static final String PUBLIC_ID = "urn:uuid:plus:000000000000000000000000000000000015"; /* Static singletons for commonly used PCs */ public static final PrivilegeClass ADMIN = new PrivilegeClass(GOD_ID, "Super User", "Super User");; public static final PrivilegeClass NATIONAL_SECURITY = new PrivilegeClass(NATIONAL_SECURITY_ID, "National Security"); public static final PrivilegeClass PRIVATE_MEDICAL = new PrivilegeClass(PRIVATE_MEDICAL_ID, "Private Medical"); public static final PrivilegeClass EMERGENCY_HIGH = new PrivilegeClass(EMERGENCY_HIGH_ID, "Emergency High"); public static final PrivilegeClass EMERGENCY_LOW = new PrivilegeClass(EMERGENCY_LOW_ID, "Emergency Low"); public static final PrivilegeClass PUBLIC = new PrivilegeClass(PUBLIC_ID, "Public"); /** the name of the privilege class */ protected String name; /** a PLUS OID */ protected String id; /** Brief description */ protected String description; /** When created */ protected long created; /** * As a special case you can create a privilege class that is a "security level" -- a totally ordered 0-10 setup * similar to what was in the first iteration of the prototype. * @param level the security level you want. This must be 0-10 */ public PrivilegeClass(int level) { if(level < 0) level = 0; if(level > 10) level = 10; // These special case security levels for totally-ordered integers are already hard-wired in the DB. if(level == 10) id = "urn:uuid:plus:000000000000000000000000000000000010"; else id = "urn:uuid:plus:00000000000000000000000000000000000" + level; name = "Security Level " + level; description = "Security Level " + level; setCreated(System.currentTimeMillis()); } // End PrivilegeClass /** * Create a new privilege class. Note this constructor is used with data loaded from the database. You * cannot create a new item in the database by using this call. * @param id ID from database * @param name name from database * @param description description from database. */ protected PrivilegeClass(String id, String name, String description) { setName(name); setId(id); setDescription(description); setCreated(System.currentTimeMillis()); } // End PrivilegeClass protected PrivilegeClass(String id, String name) { this(id, name, name); } public String getId() { return id; } public long getCreated() { return created; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public void setId(String id) { this.id = id; } public void setCreated(long d) { this.created = d; } public String getName() { return name; } public void setName(String name) { this.name = name; } /** * Comparison predicate for Privilege classes. They are equal if and only if their database IDs are equal. * @param p class to compare against * @return true if they are functionally the same, false otherwise. */ public boolean equals(Object p) { if(p == null) return false; if(!(p instanceof PrivilegeClass)) return false; return getId().equals(((PrivilegeClass)p).getId()); } // End equals public String toString() { return new String("[[" + getName() + "]]"); } /** * One privilege class dominates another when it is at an equal or higher level of security. All classes * trivially dominate themselves. * @param other the class to compare against. * @return true if this object dominates other, false otherwise. * @throws PLUSException */ public boolean dominates(PrivilegeClass other) throws PLUSException { if(equals(other)) return true; // Every class trivially dominates itself. String query = "start n=node:node_auto_index(pid=\"" + getId() + "\") " + "match n-[r:" + Neo4JStorage.DOMINATES.name() + "*..100]->m " + "where has(m.pid) and m.pid = \"" + other.getId() + "\" " + "return m "; try { PrivilegeClass pc = Neo4JPLUSObjectFactory.newPrivilegeClass((Node)Neo4JStorage.execute(query).columnAs("m").next()); if(pc.getName().equals(other.getName())) return true; throw new PLUSException("Inconsistency: " + pc.getName() + " vs " + other.getName()); } catch(NoSuchElementException nse) { // This happens when no element was returned by the query, i.e. this privilege class doesn't dominate the other. return false; } catch(Exception exc) { exc.printStackTrace(); return false; } } // End dominates public Map<String, Object> getStorableProperties() { Map<String,Object> m = new HashMap<String,Object>(); m.put("name", getName()); m.put("pid", getId()); m.put("description", getDescription()); m.put("type", "privilegeclass"); m.put("created", getCreated()); return m; } public PrivilegeClass setProperties(PropertyContainer props) throws PLUSException { setName(""+props.getProperty("name")); setId(""+props.getProperty("pid")); setDescription(""+props.getProperty("description")); Long c = (long)props.getProperty("created", null); if(c != null) setCreated(c); else setCreated(System.currentTimeMillis()); return this; } // End setProperties } // End PrivilegeClass
minor toString() changes
src/main/java/org/mitre/provenance/user/PrivilegeClass.java
minor toString() changes
<ide><path>rc/main/java/org/mitre/provenance/user/PrivilegeClass.java <ide> } // End equals <ide> <ide> public String toString() { <del> return new String("[[" + getName() + "]]"); <add> return new String("(Privilege " + getName() + ")"); <ide> } <ide> <ide> /**
Java
apache-2.0
71cb0eb52f36403ebe61732fe66c9fed753f0aee
0
caskdata/cdap,anthcp/cdap,caskdata/cdap,hsaputra/cdap,chtyim/cdap,mpouttuclarke/cdap,mpouttuclarke/cdap,chtyim/cdap,hsaputra/cdap,mpouttuclarke/cdap,hsaputra/cdap,chtyim/cdap,hsaputra/cdap,anthcp/cdap,caskdata/cdap,caskdata/cdap,caskdata/cdap,anthcp/cdap,anthcp/cdap,chtyim/cdap,mpouttuclarke/cdap,anthcp/cdap,chtyim/cdap,chtyim/cdap,mpouttuclarke/cdap,caskdata/cdap,hsaputra/cdap
package com.continuuity.gateway.handlers; import com.continuuity.api.Application; import com.continuuity.api.ApplicationSpecification; import com.continuuity.api.annotation.Handle; import com.continuuity.api.procedure.AbstractProcedure; import com.continuuity.api.procedure.ProcedureRequest; import com.continuuity.api.procedure.ProcedureResponder; import com.continuuity.api.procedure.ProcedureSpecification; import com.continuuity.gateway.GatewayFastTestsSuite; import com.continuuity.http.AbstractHttpHandler; import com.continuuity.http.HttpResponder; import com.continuuity.http.NettyHttpService; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.reflect.TypeToken; import com.google.gson.Gson; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicHeader; import org.apache.http.util.EntityUtils; import org.apache.twill.discovery.Discoverable; import org.apache.twill.discovery.DiscoveryService; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import java.lang.reflect.Type; import java.net.InetSocketAddress; import java.net.URLEncoder; import java.util.List; import java.util.Map; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import static com.continuuity.gateway.GatewayFastTestsSuite.doGet; import static com.continuuity.gateway.GatewayFastTestsSuite.doPost; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; /** * Tests Procedure API Handling. */ public class ProcedureHandlerTest { private static final Gson GSON = new Gson(); private static final String hostname = "127.0.0.1"; private static final Type MAP_STRING_STRING_TYPE = new TypeToken<Map<String, String>>() { }.getType(); private static NettyHttpService httpService; private static int port; @BeforeClass public static void startProcedureServer() throws Exception { NettyHttpService.Builder builder = NettyHttpService.builder(); builder.addHttpHandlers(ImmutableSet.of(new TestHandler())); builder.setHost(hostname); builder.setPort(0); httpService = builder.build(); httpService.startAndWait(); // Register services of test server DiscoveryService discoveryService = GatewayFastTestsSuite.getInjector().getInstance(DiscoveryService.class); discoveryService.register(new Discoverable() { @Override public String getName() { return String.format("procedure.%s.%s.%s", "developer", "testApp1", "testProc1"); } @Override public InetSocketAddress getSocketAddress() { return httpService.getBindAddress(); } }); discoveryService.register(new Discoverable() { @Override public String getName() { return String.format("procedure.%s.%s.%s", "developer", "testApp2", "testProc2"); } @Override public InetSocketAddress getSocketAddress() { return httpService.getBindAddress(); } }); port = httpService.getBindAddress().getPort(); testTestServer(); } @AfterClass public static void stopProcedureServer() { httpService.stopAndWait(); } @Test public void testPostProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); Type type = MAP_STRING_STRING_TYPE; String contentStr = GSON.toJson(content, type); Assert.assertNotNull(contentStr); Assert.assertFalse(contentStr.isEmpty()); HttpResponse response = doPost("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", contentStr, new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals(content, GSON.fromJson(responseStr, type)); } @Test public void testPostEmptyProcedureCall() throws Exception { HttpResponse response = doPost("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", "", new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals("", responseStr); } @Test public void testPostNullProcedureCall() throws Exception { HttpResponse response = doPost("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", null, new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals("", responseStr); } @Test public void testPostNoProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); HttpResponse response = GatewayFastTestsSuite.doPost("/v2/apps/testApp1/procedures/testProc2/methods/testMethod1", GSON.toJson(content, new TypeToken<Map<String, String>>() { }.getType())); Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); } /** * Test big content in Post request is not chunked. The content length is char[1423]. */ @Test public void testPostBigContentProcedureCall() throws Exception { Map<String, String> content = new ImmutableMap.Builder<String, String>() .put("key1", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva1") .put("key2", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva2") .put("key3", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva3") .put("key4", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva4") .put("key5", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva5") .put("key6", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva6") .put("key7", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva7") .put("key8", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva8") .put("key9", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva9") .put("key10", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva10") .put("key11", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva11") .put("key12", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva12") .put("key13", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva13") .put("key14", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva14") .put("key15", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva15") .build(); Type type = MAP_STRING_STRING_TYPE; String contentStr = GSON.toJson(content, type); Assert.assertNotNull(contentStr); Assert.assertFalse(contentStr.isEmpty()); // Set entity chunked StringEntity entity = new StringEntity(contentStr); entity.setChunked(true); HttpPost post = GatewayFastTestsSuite.getPost("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1"); post.setHeader("Expect", "100-continue"); post.setEntity(entity); HttpResponse response = GatewayFastTestsSuite.doPost(post); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals(content, GSON.fromJson(responseStr, type)); } @Test public void testPostChunkedProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key5", "val5"); Type type = MAP_STRING_STRING_TYPE; String contentStr = GSON.toJson(content, type); Assert.assertNotNull(contentStr); Assert.assertFalse(contentStr.isEmpty()); HttpResponse response = GatewayFastTestsSuite.doPost("/v2/apps/testApp2/procedures/testProc2/methods/testChunkedMethod", contentStr); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String expected = contentStr + contentStr; String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals(expected, responseStr); } @Test public void testPostErrorProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); HttpResponse response = GatewayFastTestsSuite.doPost("/v2/apps/testApp2/procedures/testProc2/methods/testExceptionMethod", GSON.toJson(content, new TypeToken<Map<String, String>>() { }.getType())); Assert.assertEquals(HttpResponseStatus.INTERNAL_SERVER_ERROR.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testGetProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1&", "val1=", "key3", "\"val3\""); HttpResponse response = doGet("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1?" + getQueryParams(content), new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testGetEmptyProcedureCall() throws Exception { HttpResponse response = doGet("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals("", responseStr); } @Test public void testGetNoProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); HttpResponse response = GatewayFastTestsSuite.doGet("/v2/apps/testApp1/procedures/testProc2/methods/testMethod1&" + getQueryParams (content)); Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testGetChunkedProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key5", "val5"); Type type = MAP_STRING_STRING_TYPE; String contentStr = GSON.toJson(content, type); Assert.assertNotNull(contentStr); Assert.assertFalse(contentStr.isEmpty()); HttpResponse response = GatewayFastTestsSuite.doGet("/v2/apps/testApp2/procedures/testProc2/methods/testChunkedMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testGetErrorProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); HttpResponse response = GatewayFastTestsSuite.doGet("/v2/apps/testApp2/procedures/testProc2/methods/testExceptionMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.INTERNAL_SERVER_ERROR.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testRealProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); //Make procedure call without deploying ProcedureTestApp HttpResponse response = GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); Assert.assertEquals("Router cannot forward this request to any service", EntityUtils.toString(response.getEntity())); // Deploy procedure, but do not start it. AppFabricServiceHandlerTest.deploy(ProcedureTestApp.class); response = GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); Assert.assertEquals("Router cannot forward this request to any service", EntityUtils.toString(response.getEntity())); // Start procedure response = GatewayFastTestsSuite.doPost("/v2/apps/ProcedureTestApp/procedures/TestProcedure/start", null); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); // Make procedure call response = GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); Assert.assertEquals(content, GSON.fromJson(EntityUtils.toString(response.getEntity()), MAP_STRING_STRING_TYPE)); // Stop procedure response = GatewayFastTestsSuite.doPost("/v2/apps/ProcedureTestApp/procedures/TestProcedure/stop", null); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); response = GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); Assert.assertEquals("Router cannot forward this request to any service", EntityUtils.toString(response.getEntity())); // Delete app response = GatewayFastTestsSuite.doDelete("/v2/apps/ProcedureTestApp"); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); response = GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); Assert.assertEquals("Router cannot forward this request to any service", EntityUtils.toString(response.getEntity())); } /** * Handler for test server. */ public static class TestHandler extends AbstractHttpHandler { @POST @GET @Path("/v2/apps/{appId}/procedures/{procedureName}/methods/{methodName}") public void handle(HttpRequest request, final HttpResponder responder, @PathParam("appId") String appId, @PathParam("procedureName") String procedureName, @PathParam("methodName") String methodName) { // /apps/testApp1/procedures/testProc1/testMethod1 if ("testApp1".equals(appId) && "testProc1".equals(procedureName) && "testMethod1".equals(methodName)) { byte [] content = request.getContent().array(); ImmutableMultimap.Builder<String, String> headerBuilder = ImmutableMultimap.builder(); for (Map.Entry<String, String> entry : request.getHeaders()) { headerBuilder.put(entry.getKey(), entry.getValue()); } if (request.getHeader(CONTENT_TYPE) == null) { headerBuilder.put(CONTENT_TYPE, "text/plain"); } if (request.getHeader(CONTENT_LENGTH) == null || Integer.parseInt(request.getHeader(CONTENT_LENGTH)) == 0) { headerBuilder.put(CONTENT_LENGTH, Integer.toString(content.length)); } responder.sendByteArray(HttpResponseStatus.OK, content, headerBuilder.build()); } else if ("testApp2".equals(appId) && "testProc2".equals(procedureName) && "testChunkedMethod".equals(methodName)) { // /apps/testApp2/procedures/testProc2/testChunkedMethod responder.sendChunkStart(HttpResponseStatus.OK, ImmutableMultimap.of(CONTENT_TYPE, "text/plain")); responder.sendChunk(ChannelBuffers.wrappedBuffer(request.getContent().array())); responder.sendChunk(ChannelBuffers.wrappedBuffer(request.getContent().array())); responder.sendChunkEnd(); } else if ("testApp2".equals(appId) && "testProc2".equals(procedureName) && "testExceptionMethod".equals(methodName)) { // /apps/testApp2/procedures/testProc2/testExceptionMethod responder.sendStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR); } else { responder.sendStatus(HttpResponseStatus.NOT_FOUND); } } } private static void testTestServer() throws Exception { DefaultHttpClient httpclient = new DefaultHttpClient(); HttpPost request = new HttpPost(String.format( "http://%s:%d/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", hostname, port)); HttpResponse response = httpclient.execute(request); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); } private String getQueryParams(Map<String, String> params) throws Exception { List<String> plist = Lists.newArrayList(); for (Map.Entry<String, String> entry : params.entrySet()) { plist.add(String.format("%s=%s", URLEncoder.encode(entry.getKey(), "utf-8"), URLEncoder.encode(entry.getValue(), "utf-8"))); } return Joiner.on("&").join(plist); } /** * App to test Procedure API Handling. */ public static class ProcedureTestApp implements Application { @Override public ApplicationSpecification configure() { return ApplicationSpecification.Builder.with() .setName("ProcedureTestApp") .setDescription("App to test Procedure API Handling") .noStream() .noDataSet() .noFlow() .withProcedures() .add(new TestProcedure()) .noMapReduce() .noWorkflow() .build(); } /** * TestProcedure handler. */ public static class TestProcedure extends AbstractProcedure { @Override public ProcedureSpecification configure() { return ProcedureSpecification.Builder.with() .setName("TestProcedure") .setDescription("Test Procedure") .build(); } @SuppressWarnings("UnusedDeclaration") @Handle("TestMethod") public void testMethod1(ProcedureRequest request, ProcedureResponder responder) throws Exception { responder.sendJson(request.getArguments()); } } } }
gateway/src/test/java/com/continuuity/gateway/handlers/ProcedureHandlerTest.java
package com.continuuity.gateway.handlers; import com.continuuity.api.Application; import com.continuuity.api.ApplicationSpecification; import com.continuuity.api.annotation.Handle; import com.continuuity.api.procedure.AbstractProcedure; import com.continuuity.api.procedure.ProcedureRequest; import com.continuuity.api.procedure.ProcedureResponder; import com.continuuity.api.procedure.ProcedureSpecification; import com.continuuity.gateway.GatewayFastTestsSuite; import com.continuuity.http.AbstractHttpHandler; import com.continuuity.http.HttpResponder; import com.continuuity.http.NettyHttpService; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMultimap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.reflect.TypeToken; import com.google.gson.Gson; import org.apache.http.Header; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.message.BasicHeader; import org.apache.http.util.EntityUtils; import org.apache.twill.discovery.Discoverable; import org.apache.twill.discovery.DiscoveryService; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.handler.codec.http.HttpRequest; import org.jboss.netty.handler.codec.http.HttpResponseStatus; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import java.lang.reflect.Type; import java.net.InetSocketAddress; import java.net.URLEncoder; import java.util.List; import java.util.Map; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import static com.continuuity.gateway.GatewayFastTestsSuite.doGet; import static com.continuuity.gateway.GatewayFastTestsSuite.doPost; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; /** * Tests Procedure API Handling. */ public class ProcedureHandlerTest { private static final Gson GSON = new Gson(); private static final String hostname = "127.0.0.1"; private static final Type MAP_STRING_STRING_TYPE = new TypeToken<Map<String, String>>() { }.getType(); private static NettyHttpService httpService; private static int port; @BeforeClass public static void startProcedureServer() throws Exception { NettyHttpService.Builder builder = NettyHttpService.builder(); builder.addHttpHandlers(ImmutableSet.of(new TestHandler())); builder.setHost(hostname); builder.setPort(0); httpService = builder.build(); httpService.startAndWait(); // Register services of test server DiscoveryService discoveryService = GatewayFastTestsSuite.getInjector().getInstance(DiscoveryService.class); discoveryService.register(new Discoverable() { @Override public String getName() { return String.format("procedure.%s.%s.%s", "developer", "testApp1", "testProc1"); } @Override public InetSocketAddress getSocketAddress() { return httpService.getBindAddress(); } }); discoveryService.register(new Discoverable() { @Override public String getName() { return String.format("procedure.%s.%s.%s", "developer", "testApp2", "testProc2"); } @Override public InetSocketAddress getSocketAddress() { return httpService.getBindAddress(); } }); port = httpService.getBindAddress().getPort(); testTestServer(); } @AfterClass public static void stopProcedureServer() { httpService.stopAndWait(); } @Test public void testPostProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); Type type = MAP_STRING_STRING_TYPE; String contentStr = GSON.toJson(content, type); Assert.assertNotNull(contentStr); Assert.assertFalse(contentStr.isEmpty()); HttpResponse response = doPost("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", contentStr, new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals(content, GSON.fromJson(responseStr, type)); } @Test public void testPostEmptyProcedureCall() throws Exception { HttpResponse response = doPost("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", "", new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals("", responseStr); } @Test public void testPostNullProcedureCall() throws Exception { HttpResponse response = doPost("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", null, new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals("", responseStr); } @Test public void testPostNoProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); HttpResponse response = GatewayFastTestsSuite.doPost("/v2/apps/testApp1/procedures/testProc2/methods/testMethod1", GSON.toJson(content, new TypeToken<Map<String, String>>() { }.getType())); Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); } /** * Test big content in Post request is not chunked. The content length is char[1423]. */ @Test public void testPostBigContentProcedureCall() throws Exception { Map<String, String> content = new ImmutableMap.Builder<String, String>() .put("key1", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva1") .put("key2", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva2") .put("key3", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva3") .put("key4", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva4") .put("key5", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva5") .put("key6", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva6") .put("key7", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva7") .put("key8", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva8") .put("key9", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva9") .put("key10", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva10") .put("key11", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva11") .put("key12", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva12") .put("key13", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva13") .put("key14", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva14") .put("key15", "valvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalvalva15") .build(); Type type = MAP_STRING_STRING_TYPE; String contentStr = GSON.toJson(content, type); Assert.assertNotNull(contentStr); Assert.assertFalse(contentStr.isEmpty()); // Set entity chunked StringEntity entity = new StringEntity(contentStr); entity.setChunked(true); HttpPost post = GatewayFastTestsSuite.getPost("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1"); post.setHeader("Expect", "100-continue"); post.setEntity(entity); HttpResponse response = GatewayFastTestsSuite.doPost(post); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals(content, GSON.fromJson(responseStr, type)); } @Test public void testPostChunkedProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key5", "val5"); Type type = MAP_STRING_STRING_TYPE; String contentStr = GSON.toJson(content, type); Assert.assertNotNull(contentStr); Assert.assertFalse(contentStr.isEmpty()); HttpResponse response = GatewayFastTestsSuite.doPost("/v2/apps/testApp2/procedures/testProc2/methods/testChunkedMethod", contentStr); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String expected = contentStr + contentStr; String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals(expected, responseStr); } @Test public void testPostErrorProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); HttpResponse response = GatewayFastTestsSuite.doPost("/v2/apps/testApp2/procedures/testProc2/methods/testExceptionMethod", GSON.toJson(content, new TypeToken<Map<String, String>>() { }.getType())); Assert.assertEquals(HttpResponseStatus.INTERNAL_SERVER_ERROR.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testGetProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1&", "val1=", "key3", "\"val3\""); HttpResponse response = doGet("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1?" + getQueryParams(content), new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testGetEmptyProcedureCall() throws Exception { HttpResponse response = doGet("/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", new Header[]{new BasicHeader("X-Test", "1234")}); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); String responseStr = EntityUtils.toString(response.getEntity()); Assert.assertEquals("", responseStr); } @Test public void testGetNoProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); HttpResponse response = GatewayFastTestsSuite.doGet("/v2/apps/testApp1/procedures/testProc2/methods/testMethod1&" + getQueryParams (content)); Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testGetChunkedProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key5", "val5"); Type type = MAP_STRING_STRING_TYPE; String contentStr = GSON.toJson(content, type); Assert.assertNotNull(contentStr); Assert.assertFalse(contentStr.isEmpty()); HttpResponse response = GatewayFastTestsSuite.doGet("/v2/apps/testApp2/procedures/testProc2/methods/testChunkedMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testGetErrorProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); HttpResponse response = GatewayFastTestsSuite.doGet("/v2/apps/testApp2/procedures/testProc2/methods/testExceptionMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.INTERNAL_SERVER_ERROR.getCode(), response.getStatusLine().getStatusCode()); } @Test public void testRealProcedureCall() throws Exception { Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); //TODO: 404 won't be returned until Router refactoring //Second assertion won't be valid even after refactoring // Make procedure call without deploying ProcedureTestApp //HttpResponse response = // GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + // getQueryParams(content)); //Assert.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), response.getStatusLine().getStatusCode()); //Assert.assertEquals("Procedure not deployed", EntityUtils.toString(response.getEntity())); // Deploy procedure, but do not start it. AppFabricServiceHandlerTest.deploy(ProcedureTestApp.class); //TODO: 404 won't be returned until Router refactoring //Second assertion won't be valid even after refactoring //response = // GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + // getQueryParams(content)); //Assert.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), response.getStatusLine().getStatusCode()); //Assert.assertEquals("Procedure not running", EntityUtils.toString(response.getEntity())); // Start procedure HttpResponse response = GatewayFastTestsSuite.doPost("/v2/apps/ProcedureTestApp/procedures/TestProcedure/start", null); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); // Make procedure call response = GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + getQueryParams(content)); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); Assert.assertEquals(content, GSON.fromJson(EntityUtils.toString(response.getEntity()), MAP_STRING_STRING_TYPE)); // Stop procedure response = GatewayFastTestsSuite.doPost("/v2/apps/ProcedureTestApp/procedures/TestProcedure/stop", null); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); //TODO: 404 won't be returned until Router refactoring //Second assertion won't be valid even after refactoring //response = // GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + // getQueryParams(content)); //Assert.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), response.getStatusLine().getStatusCode()); //Assert.assertEquals("Procedure not running", EntityUtils.toString(response.getEntity())); // Delete app response = GatewayFastTestsSuite.doDelete("/v2/apps/ProcedureTestApp"); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); //TODO: 404 won't be returned until Router refactoring //Second assertion won't be valid even after refactoring //response = // GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + // getQueryParams(content)); //Assert.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), response.getStatusLine().getStatusCode()); //Assert.assertEquals("Procedure not deployed", EntityUtils.toString(response.getEntity())); } /** * Handler for test server. */ public static class TestHandler extends AbstractHttpHandler { @POST @GET @Path("/v2/apps/{appId}/procedures/{procedureName}/methods/{methodName}") public void handle(HttpRequest request, final HttpResponder responder, @PathParam("appId") String appId, @PathParam("procedureName") String procedureName, @PathParam("methodName") String methodName) { // /apps/testApp1/procedures/testProc1/testMethod1 if ("testApp1".equals(appId) && "testProc1".equals(procedureName) && "testMethod1".equals(methodName)) { byte [] content = request.getContent().array(); ImmutableMultimap.Builder<String, String> headerBuilder = ImmutableMultimap.builder(); for (Map.Entry<String, String> entry : request.getHeaders()) { headerBuilder.put(entry.getKey(), entry.getValue()); } if (request.getHeader(CONTENT_TYPE) == null) { headerBuilder.put(CONTENT_TYPE, "text/plain"); } if (request.getHeader(CONTENT_LENGTH) == null || Integer.parseInt(request.getHeader(CONTENT_LENGTH)) == 0) { headerBuilder.put(CONTENT_LENGTH, Integer.toString(content.length)); } responder.sendByteArray(HttpResponseStatus.OK, content, headerBuilder.build()); } else if ("testApp2".equals(appId) && "testProc2".equals(procedureName) && "testChunkedMethod".equals(methodName)) { // /apps/testApp2/procedures/testProc2/testChunkedMethod responder.sendChunkStart(HttpResponseStatus.OK, ImmutableMultimap.of(CONTENT_TYPE, "text/plain")); responder.sendChunk(ChannelBuffers.wrappedBuffer(request.getContent().array())); responder.sendChunk(ChannelBuffers.wrappedBuffer(request.getContent().array())); responder.sendChunkEnd(); } else if ("testApp2".equals(appId) && "testProc2".equals(procedureName) && "testExceptionMethod".equals(methodName)) { // /apps/testApp2/procedures/testProc2/testExceptionMethod responder.sendStatus(HttpResponseStatus.INTERNAL_SERVER_ERROR); } else { responder.sendStatus(HttpResponseStatus.NOT_FOUND); } } } private static void testTestServer() throws Exception { DefaultHttpClient httpclient = new DefaultHttpClient(); HttpPost request = new HttpPost(String.format( "http://%s:%d/v2/apps/testApp1/procedures/testProc1/methods/testMethod1", hostname, port)); HttpResponse response = httpclient.execute(request); Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); } private String getQueryParams(Map<String, String> params) throws Exception { List<String> plist = Lists.newArrayList(); for (Map.Entry<String, String> entry : params.entrySet()) { plist.add(String.format("%s=%s", URLEncoder.encode(entry.getKey(), "utf-8"), URLEncoder.encode(entry.getValue(), "utf-8"))); } return Joiner.on("&").join(plist); } /** * App to test Procedure API Handling. */ public static class ProcedureTestApp implements Application { @Override public ApplicationSpecification configure() { return ApplicationSpecification.Builder.with() .setName("ProcedureTestApp") .setDescription("App to test Procedure API Handling") .noStream() .noDataSet() .noFlow() .withProcedures() .add(new TestProcedure()) .noMapReduce() .noWorkflow() .build(); } /** * TestProcedure handler. */ public static class TestProcedure extends AbstractProcedure { @Override public ProcedureSpecification configure() { return ProcedureSpecification.Builder.with() .setName("TestProcedure") .setDescription("Test Procedure") .build(); } @SuppressWarnings("UnusedDeclaration") @Handle("TestMethod") public void testMethod1(ProcedureRequest request, ProcedureResponder responder) throws Exception { responder.sendJson(request.getArguments()); } } } }
Enhanced procedure service not found test
gateway/src/test/java/com/continuuity/gateway/handlers/ProcedureHandlerTest.java
Enhanced procedure service not found test
<ide><path>ateway/src/test/java/com/continuuity/gateway/handlers/ProcedureHandlerTest.java <ide> public void testRealProcedureCall() throws Exception { <ide> Map<String, String> content = ImmutableMap.of("key1", "val1", "key3", "val3"); <ide> <del> //TODO: 404 won't be returned until Router refactoring <del> //Second assertion won't be valid even after refactoring <del> // Make procedure call without deploying ProcedureTestApp <del> //HttpResponse response = <del> // GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + <del> // getQueryParams(content)); <del> //Assert.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), response.getStatusLine().getStatusCode()); <del> //Assert.assertEquals("Procedure not deployed", EntityUtils.toString(response.getEntity())); <add> //Make procedure call without deploying ProcedureTestApp <add> HttpResponse response = <add> GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + <add> getQueryParams(content)); <add> Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); <add> Assert.assertEquals("Router cannot forward this request to any service", EntityUtils.toString(response.getEntity())); <ide> <ide> // Deploy procedure, but do not start it. <ide> AppFabricServiceHandlerTest.deploy(ProcedureTestApp.class); <ide> <del> //TODO: 404 won't be returned until Router refactoring <del> //Second assertion won't be valid even after refactoring <del> //response = <del> // GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + <del> // getQueryParams(content)); <del> //Assert.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), response.getStatusLine().getStatusCode()); <del> //Assert.assertEquals("Procedure not running", EntityUtils.toString(response.getEntity())); <add> response = <add> GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + <add> getQueryParams(content)); <add> Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); <add> Assert.assertEquals("Router cannot forward this request to any service", EntityUtils.toString(response.getEntity())); <ide> <ide> // Start procedure <del> HttpResponse response = <add> response = <ide> GatewayFastTestsSuite.doPost("/v2/apps/ProcedureTestApp/procedures/TestProcedure/start", null); <ide> Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); <ide> <ide> GatewayFastTestsSuite.doPost("/v2/apps/ProcedureTestApp/procedures/TestProcedure/stop", null); <ide> Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); <ide> <del> //TODO: 404 won't be returned until Router refactoring <del> //Second assertion won't be valid even after refactoring <del> //response = <del> // GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + <del> // getQueryParams(content)); <del> //Assert.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), response.getStatusLine().getStatusCode()); <del> //Assert.assertEquals("Procedure not running", EntityUtils.toString(response.getEntity())); <add> response = <add> GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + <add> getQueryParams(content)); <add> Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); <add> Assert.assertEquals("Router cannot forward this request to any service", EntityUtils.toString(response.getEntity())); <ide> <ide> <ide> // Delete app <ide> response = GatewayFastTestsSuite.doDelete("/v2/apps/ProcedureTestApp"); <ide> Assert.assertEquals(HttpResponseStatus.OK.getCode(), response.getStatusLine().getStatusCode()); <ide> <del> //TODO: 404 won't be returned until Router refactoring <del> //Second assertion won't be valid even after refactoring <del> //response = <del> // GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + <del> // getQueryParams(content)); <del> //Assert.assertEquals(HttpResponseStatus.NOT_FOUND.getCode(), response.getStatusLine().getStatusCode()); <del> //Assert.assertEquals("Procedure not deployed", EntityUtils.toString(response.getEntity())); <add> response = <add> GatewayFastTestsSuite.doGet("/v2/apps/ProcedureTestApp/procedures/TestProcedure/methods/TestMethod?" + <add> getQueryParams(content)); <add> Assert.assertEquals(HttpResponseStatus.SERVICE_UNAVAILABLE.getCode(), response.getStatusLine().getStatusCode()); <add> Assert.assertEquals("Router cannot forward this request to any service", EntityUtils.toString(response.getEntity())); <ide> } <ide> <ide> /**
Java
mit
2d5613b19ec238543bc6c118c787440aab45d787
0
kiruthikasp/PushPlugin,kiruthikasp/PushPlugin,kiruthikasp/PushPlugin,kiruthikasp/PushPlugin,kiruthikasp/PushPlugin
package com.adobe.phonegap.push; import android.app.NotificationManager; import android.content.Context; import android.content.SharedPreferences; import android.os.Bundle; import java.io.File; import android.util.Log; import android.widget.Toast; import android.content.Context; import android.content.Intent; import android.media.RingtoneManager; import android.view.View.OnClickListener; import android.app.Activity; import android.media.Ringtone; import android.widget.TextView; import android.view.View; import android.widget.Button; import android.database.Cursor; import android.view.Menu; import java.lang.NullPointerException; import java.util.Timer; import java.util.TimerTask; import com.google.android.gms.gcm.GcmPubSub; import com.google.android.gms.iid.InstanceID; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaWebView; import org.apache.cordova.PluginResult; import org.json.JSONArray; import android.net.Uri; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; public class PushPlugin extends CordovaPlugin implements PushConstants { public static final String LOG_TAG = "PushPlugin"; private static CallbackContext pushContext; private static CordovaWebView gWebView; private static Bundle gCachedExtras = null; private static boolean gForeground = false; public static final String MY_PREFS_NAME = "MyPrefsFile"; /** * Gets the application context from cordova's main activity. * @return the application context */ private Context getApplicationContext() { return this.cordova.getActivity().getApplicationContext(); } @Override public boolean execute(final String action, final JSONArray data, final CallbackContext callbackContext) { Log.v(LOG_TAG, "execute: action=" + action); gWebView = this.webView; if (INITIALIZE.equals(action)) { cordova.getThreadPool().execute(new Runnable() { public void run() { pushContext = callbackContext; JSONObject jo = null; Log.v(LOG_TAG, "execute: data=" + data.toString()); SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(COM_ADOBE_PHONEGAP_PUSH, Context.MODE_PRIVATE); String token = null; String senderID = null; String soundpath = null; try { jo = data.getJSONObject(0).getJSONObject(ANDROID); Log.v(LOG_TAG, "execute: jo=" + jo.toString()); senderID = jo.getString(SENDER_ID); Log.v(LOG_TAG, "execute: senderID=" + senderID); String savedSenderID = sharedPref.getString(SENDER_ID, ""); String savedRegID = sharedPref.getString(REGISTRATION_ID, ""); // first time run get new token if ("".equals(savedRegID)) { token = InstanceID.getInstance(getApplicationContext()).getToken(senderID, GCM); } // new sender ID, re-register else if (!savedSenderID.equals(senderID)) { token = InstanceID.getInstance(getApplicationContext()).getToken(senderID, GCM); } // use the saved one else { token = sharedPref.getString(REGISTRATION_ID, ""); } if (!"".equals(token)) { JSONObject json = new JSONObject().put(REGISTRATION_ID, token); Log.v(LOG_TAG, "onRegistered: " + json.toString()); JSONArray topics = jo.optJSONArray(TOPICS); subscribeToTopics(topics, token); PushPlugin.sendEvent( json ); } else { callbackContext.error("Empty registration ID received from GCM"); return; } } catch (JSONException e) { Log.e(LOG_TAG, "execute: Got JSON Exception " + e.getMessage()); callbackContext.error(e.getMessage()); } catch (IOException e) { Log.e(LOG_TAG, "execute: Got JSON Exception " + e.getMessage()); callbackContext.error(e.getMessage()); } if (jo != null) { SharedPreferences.Editor editor = sharedPref.edit(); try { editor.putString(ICON, jo.getString(ICON)); } catch (JSONException e) { Log.d(LOG_TAG, "no icon option"); } try { editor.putString(ICON_COLOR, jo.getString(ICON_COLOR)); } catch (JSONException e) { Log.d(LOG_TAG, "no iconColor option"); } editor.putBoolean(SOUND, jo.optBoolean(SOUND, true)); editor.putString(SOUNDPATH, soundpath); editor.putBoolean(VIBRATE, jo.optBoolean(VIBRATE, true)); editor.putBoolean(CLEAR_NOTIFICATIONS, jo.optBoolean(CLEAR_NOTIFICATIONS, true)); editor.putBoolean(FORCE_SHOW, jo.optBoolean(FORCE_SHOW, false)); editor.putString(SENDER_ID, senderID); editor.putString(REGISTRATION_ID, token); editor.commit(); } if (gCachedExtras != null) { Log.v(LOG_TAG, "sending cached extras"); sendExtras(gCachedExtras); gCachedExtras = null; } } }); } else if (UNREGISTER.equals(action)) { cordova.getThreadPool().execute(new Runnable() { public void run() { try { SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(COM_ADOBE_PHONEGAP_PUSH, Context.MODE_PRIVATE); String token = sharedPref.getString(REGISTRATION_ID, ""); JSONArray topics = data.optJSONArray(0); if (topics != null && !"".equals(token)) { unsubscribeFromTopics(topics, token); } else { InstanceID.getInstance(getApplicationContext()).deleteInstanceID(); Log.v(LOG_TAG, "UNREGISTER"); // Remove shared prefs SharedPreferences.Editor editor = sharedPref.edit(); editor.remove(SOUND); editor.remove(SOUNDPATH); editor.remove(VIBRATE); editor.remove(CLEAR_NOTIFICATIONS); editor.remove(FORCE_SHOW); editor.remove(SENDER_ID); editor.remove(REGISTRATION_ID); editor.commit(); } callbackContext.success(); } catch (IOException e) { Log.e(LOG_TAG, "execute: Got JSON Exception " + e.getMessage()); callbackContext.error(e.getMessage()); } } }); } else if (FINISH.equals(action)) { callbackContext.success(); } else if (HAS_PERMISSION.equals(action)) { cordova.getThreadPool().execute(new Runnable() { public void run() { JSONObject jo = new JSONObject(); try { jo.put("isEnabled", PermissionUtils.hasPermission(getApplicationContext(), "OP_POST_NOTIFICATION")); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, jo); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); } catch (UnknownError e) { callbackContext.error(e.getMessage()); } catch (JSONException e) { callbackContext.error(e.getMessage()); } } }); } else if (SELECT.equals(action)){ // Intent intent = new Intent(RingtoneManager.ACTION_RINGTONE_PICKER); // intent.putExtra(RingtoneManager.EXTRA_RINGTONE_TITLE, "Select ringtone for notifications:"); // intent.putExtra(RingtoneManager.EXTRA_RINGTONE_SHOW_SILENT, false); // intent.putExtra(RingtoneManager.EXTRA_RINGTONE_SHOW_DEFAULT, true); // intent.putExtra(RingtoneManager.EXTRA_RINGTONE_TYPE,RingtoneManager.TYPE_ALARM); // this.startActivityForResult( intent, 999); try { //Context context=this.cordova.getActivity().getApplicationContext(); //or Context context=cordova.getActiivity().getApplicationContext(); this.cordova.setActivityResultCallback(this); Intent intent = new Intent(RingtoneManager.ACTION_RINGTONE_PICKER); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_TYPE,RingtoneManager.TYPE_NOTIFICATION | RingtoneManager.TYPE_RINGTONE); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_SHOW_DEFAULT, true); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_DEFAULT_URI, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION)); this.cordova.getActivity().startActivityForResult(intent, 999); SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE); String s = sharedPref.getString("soundpath",null); Uri u = Uri.parse(s); File f = new File("" + u); String r = f.getName(); callbackContext.success(r); // Toast.makeText(this.cordova.getActivity().getApplicationContext(),"called", // 4000).show(); } catch (UnknownError e) { callbackContext.error(e.getMessage()); } }else { Log.e(LOG_TAG, "Invalid action : " + action); callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } return true; } public void onActivityResult( int requestCode, int resultCode, Intent intent) { SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE); if ( requestCode == 999) { try{ Uri uri = intent.getParcelableExtra(RingtoneManager.EXTRA_RINGTONE_PICKED_URI); if(uri != null){ String str = uri.toString(); Context context=this.cordova.getActivity().getApplicationContext(); SharedPreferences.Editor editor = sharedPref.edit(); editor.putString("soundpath",str); editor.commit(); }else{ Toast.makeText(this.cordova.getActivity().getApplicationContext(), "No Ringtone Path Found!", 2500).show(); } }catch(NullPointerException e){ Toast.makeText(this.cordova.getActivity().getApplicationContext(), "No Ringtone Selected!", 2500).show(); } } } public static void sendEvent(JSONObject _json) { PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, _json); pluginResult.setKeepCallback(true); if (pushContext != null) { pushContext.sendPluginResult(pluginResult); } } public static void sendError(String message) { PluginResult pluginResult = new PluginResult(PluginResult.Status.ERROR, message); pluginResult.setKeepCallback(true); if (pushContext != null) { pushContext.sendPluginResult(pluginResult); } } /* * Sends the pushbundle extras to the client application. * If the client application isn't currently active, it is cached for later processing. */ public static void sendExtras(Bundle extras) { if (extras != null) { if (gWebView != null) { sendEvent(convertBundleToJson(extras)); } else { Log.v(LOG_TAG, "sendExtras: caching extras to send at a later time."); gCachedExtras = extras; } } } @Override public void initialize(CordovaInterface cordova, CordovaWebView webView) { super.initialize(cordova, webView); gForeground = true; } @Override public void onPause(boolean multitasking) { super.onPause(multitasking); gForeground = false; SharedPreferences prefs = getApplicationContext().getSharedPreferences(COM_ADOBE_PHONEGAP_PUSH, Context.MODE_PRIVATE); if (prefs.getBoolean(CLEAR_NOTIFICATIONS, true)) { final NotificationManager notificationManager = (NotificationManager) cordova.getActivity().getSystemService(Context.NOTIFICATION_SERVICE); notificationManager.cancelAll(); } } @Override public void onResume(boolean multitasking) { super.onResume(multitasking); gForeground = true; } @Override public void onDestroy() { super.onDestroy(); gForeground = false; gWebView = null; } private void subscribeToTopics(JSONArray topics, String registrationToken) { if (topics != null) { String topic = null; for (int i=0; i<topics.length(); i++) { try { topic = topics.optString(i, null); if (topic != null) { Log.d(LOG_TAG, "Subscribing to topic: " + topic); GcmPubSub.getInstance(getApplicationContext()).subscribe(registrationToken, "/topics/" + topic, null); } } catch (IOException e) { Log.e(LOG_TAG, "Failed to subscribe to topic: " + topic, e); } } } } private void unsubscribeFromTopics(JSONArray topics, String registrationToken) { if (topics != null) { String topic = null; for (int i=0; i<topics.length(); i++) { try { topic = topics.optString(i, null); if (topic != null) { Log.d(LOG_TAG, "Unsubscribing to topic: " + topic); GcmPubSub.getInstance(getApplicationContext()).unsubscribe(registrationToken, "/topics/" + topic); } } catch (IOException e) { Log.e(LOG_TAG, "Failed to unsubscribe to topic: " + topic, e); } } } } /* * serializes a bundle to JSON. */ private static JSONObject convertBundleToJson(Bundle extras) { Log.d(LOG_TAG, "convert extras to json"); try { JSONObject json = new JSONObject(); JSONObject additionalData = new JSONObject(); // Add any keys that need to be in top level json to this set HashSet<String> jsonKeySet = new HashSet(); Collections.addAll(jsonKeySet, TITLE,MESSAGE,COUNT,SOUND,SOUNDPATH,IMAGE); Iterator<String> it = extras.keySet().iterator(); while (it.hasNext()) { String key = it.next(); Object value = extras.get(key); Log.d(LOG_TAG, "key = " + key); if (jsonKeySet.contains(key)) { json.put(key, value); } else if (key.equals(COLDSTART)) { additionalData.put(key, extras.getBoolean(COLDSTART)); } else if (key.equals(FOREGROUND)) { additionalData.put(key, extras.getBoolean(FOREGROUND)); } else if ( value instanceof String ) { String strValue = (String)value; try { // Try to figure out if the value is another JSON object if (strValue.startsWith("{")) { additionalData.put(key, new JSONObject(strValue)); } // Try to figure out if the value is another JSON array else if (strValue.startsWith("[")) { additionalData.put(key, new JSONArray(strValue)); } else { additionalData.put(key, value); } } catch (Exception e) { additionalData.put(key, value); } } } // while json.put(ADDITIONAL_DATA, additionalData); Log.v(LOG_TAG, "extrasToJSON: " + json.toString()); return json; } catch( JSONException e) { Log.e(LOG_TAG, "extrasToJSON: JSON exception"); } return null; } public static boolean isInForeground() { return gForeground; } public static boolean isActive() { return gWebView != null; } }
src/android/com/adobe/phonegap/push/PushPlugin.java
package com.adobe.phonegap.push; import android.app.NotificationManager; import android.content.Context; import android.content.SharedPreferences; import android.os.Bundle; import android.util.Log; import android.widget.Toast; import android.content.Context; import android.content.Intent; import android.media.RingtoneManager; import android.view.View.OnClickListener; import android.app.Activity; import android.media.Ringtone; import android.widget.TextView; import android.view.View; import android.widget.Button; import android.database.Cursor; import android.view.Menu; import java.lang.NullPointerException; import java.util.Timer; import java.util.TimerTask; import com.google.android.gms.gcm.GcmPubSub; import com.google.android.gms.iid.InstanceID; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaInterface; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaWebView; import org.apache.cordova.PluginResult; import org.json.JSONArray; import android.net.Uri; import org.json.JSONException; import org.json.JSONObject; import java.io.IOException; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; public class PushPlugin extends CordovaPlugin implements PushConstants { public static final String LOG_TAG = "PushPlugin"; private static CallbackContext pushContext; private static CordovaWebView gWebView; private static Bundle gCachedExtras = null; private static boolean gForeground = false; public static final String MY_PREFS_NAME = "MyPrefsFile"; /** * Gets the application context from cordova's main activity. * @return the application context */ private Context getApplicationContext() { return this.cordova.getActivity().getApplicationContext(); } @Override public boolean execute(final String action, final JSONArray data, final CallbackContext callbackContext) { Log.v(LOG_TAG, "execute: action=" + action); gWebView = this.webView; if (INITIALIZE.equals(action)) { cordova.getThreadPool().execute(new Runnable() { public void run() { pushContext = callbackContext; JSONObject jo = null; Log.v(LOG_TAG, "execute: data=" + data.toString()); SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(COM_ADOBE_PHONEGAP_PUSH, Context.MODE_PRIVATE); String token = null; String senderID = null; String soundpath = null; try { jo = data.getJSONObject(0).getJSONObject(ANDROID); Log.v(LOG_TAG, "execute: jo=" + jo.toString()); senderID = jo.getString(SENDER_ID); Log.v(LOG_TAG, "execute: senderID=" + senderID); String savedSenderID = sharedPref.getString(SENDER_ID, ""); String savedRegID = sharedPref.getString(REGISTRATION_ID, ""); // first time run get new token if ("".equals(savedRegID)) { token = InstanceID.getInstance(getApplicationContext()).getToken(senderID, GCM); } // new sender ID, re-register else if (!savedSenderID.equals(senderID)) { token = InstanceID.getInstance(getApplicationContext()).getToken(senderID, GCM); } // use the saved one else { token = sharedPref.getString(REGISTRATION_ID, ""); } if (!"".equals(token)) { JSONObject json = new JSONObject().put(REGISTRATION_ID, token); Log.v(LOG_TAG, "onRegistered: " + json.toString()); JSONArray topics = jo.optJSONArray(TOPICS); subscribeToTopics(topics, token); PushPlugin.sendEvent( json ); } else { callbackContext.error("Empty registration ID received from GCM"); return; } } catch (JSONException e) { Log.e(LOG_TAG, "execute: Got JSON Exception " + e.getMessage()); callbackContext.error(e.getMessage()); } catch (IOException e) { Log.e(LOG_TAG, "execute: Got JSON Exception " + e.getMessage()); callbackContext.error(e.getMessage()); } if (jo != null) { SharedPreferences.Editor editor = sharedPref.edit(); try { editor.putString(ICON, jo.getString(ICON)); } catch (JSONException e) { Log.d(LOG_TAG, "no icon option"); } try { editor.putString(ICON_COLOR, jo.getString(ICON_COLOR)); } catch (JSONException e) { Log.d(LOG_TAG, "no iconColor option"); } editor.putBoolean(SOUND, jo.optBoolean(SOUND, true)); editor.putString(SOUNDPATH, soundpath); editor.putBoolean(VIBRATE, jo.optBoolean(VIBRATE, true)); editor.putBoolean(CLEAR_NOTIFICATIONS, jo.optBoolean(CLEAR_NOTIFICATIONS, true)); editor.putBoolean(FORCE_SHOW, jo.optBoolean(FORCE_SHOW, false)); editor.putString(SENDER_ID, senderID); editor.putString(REGISTRATION_ID, token); editor.commit(); } if (gCachedExtras != null) { Log.v(LOG_TAG, "sending cached extras"); sendExtras(gCachedExtras); gCachedExtras = null; } } }); } else if (UNREGISTER.equals(action)) { cordova.getThreadPool().execute(new Runnable() { public void run() { try { SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(COM_ADOBE_PHONEGAP_PUSH, Context.MODE_PRIVATE); String token = sharedPref.getString(REGISTRATION_ID, ""); JSONArray topics = data.optJSONArray(0); if (topics != null && !"".equals(token)) { unsubscribeFromTopics(topics, token); } else { InstanceID.getInstance(getApplicationContext()).deleteInstanceID(); Log.v(LOG_TAG, "UNREGISTER"); // Remove shared prefs SharedPreferences.Editor editor = sharedPref.edit(); editor.remove(SOUND); editor.remove(SOUNDPATH); editor.remove(VIBRATE); editor.remove(CLEAR_NOTIFICATIONS); editor.remove(FORCE_SHOW); editor.remove(SENDER_ID); editor.remove(REGISTRATION_ID); editor.commit(); } callbackContext.success(); } catch (IOException e) { Log.e(LOG_TAG, "execute: Got JSON Exception " + e.getMessage()); callbackContext.error(e.getMessage()); } } }); } else if (FINISH.equals(action)) { callbackContext.success(); } else if (HAS_PERMISSION.equals(action)) { cordova.getThreadPool().execute(new Runnable() { public void run() { JSONObject jo = new JSONObject(); try { jo.put("isEnabled", PermissionUtils.hasPermission(getApplicationContext(), "OP_POST_NOTIFICATION")); PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, jo); pluginResult.setKeepCallback(true); callbackContext.sendPluginResult(pluginResult); } catch (UnknownError e) { callbackContext.error(e.getMessage()); } catch (JSONException e) { callbackContext.error(e.getMessage()); } } }); } else if (SELECT.equals(action)){ // Intent intent = new Intent(RingtoneManager.ACTION_RINGTONE_PICKER); // intent.putExtra(RingtoneManager.EXTRA_RINGTONE_TITLE, "Select ringtone for notifications:"); // intent.putExtra(RingtoneManager.EXTRA_RINGTONE_SHOW_SILENT, false); // intent.putExtra(RingtoneManager.EXTRA_RINGTONE_SHOW_DEFAULT, true); // intent.putExtra(RingtoneManager.EXTRA_RINGTONE_TYPE,RingtoneManager.TYPE_ALARM); // this.startActivityForResult( intent, 999); try { //Context context=this.cordova.getActivity().getApplicationContext(); //or Context context=cordova.getActiivity().getApplicationContext(); this.cordova.setActivityResultCallback(this); Intent intent = new Intent(RingtoneManager.ACTION_RINGTONE_PICKER); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_TYPE,RingtoneManager.TYPE_NOTIFICATION | RingtoneManager.TYPE_RINGTONE); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_SHOW_DEFAULT, true); intent.putExtra(RingtoneManager.EXTRA_RINGTONE_DEFAULT_URI, RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION)); this.cordova.getActivity().startActivityForResult(intent, 999); // SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE); // String s = sharedPref.getString("soundpath",null); // callbackContext.success(s); //Toast.makeText(this.cordova.getActivity().getApplicationContext(),"called", //4000).show(); } catch (UnknownError e) { callbackContext.error(e.getMessage()); } }else { Log.e(LOG_TAG, "Invalid action : " + action); callbackContext.sendPluginResult(new PluginResult(PluginResult.Status.INVALID_ACTION)); return false; } return true; } public void onActivityResult( int requestCode, int resultCode, Intent intent, CallbackContext callbackContext) { SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE); if ( requestCode == 999) { try{ Uri uri = intent.getParcelableExtra(RingtoneManager.EXTRA_RINGTONE_PICKED_URI); if(uri != null){ String str = uri.toString(); Context context=this.cordova.getActivity().getApplicationContext(); SharedPreferences.Editor editor = sharedPref.edit(); editor.putString("soundpath",str); editor.commit(); callbackContext.success(str); }else{ Toast.makeText(this.cordova.getActivity().getApplicationContext(), "No Ringtone Path Found!", 2500).show(); } }catch(NullPointerException e){ Toast.makeText(this.cordova.getActivity().getApplicationContext(), "No Ringtone Selected!", 2500).show(); } } } public static void sendEvent(JSONObject _json) { PluginResult pluginResult = new PluginResult(PluginResult.Status.OK, _json); pluginResult.setKeepCallback(true); if (pushContext != null) { pushContext.sendPluginResult(pluginResult); } } public static void sendError(String message) { PluginResult pluginResult = new PluginResult(PluginResult.Status.ERROR, message); pluginResult.setKeepCallback(true); if (pushContext != null) { pushContext.sendPluginResult(pluginResult); } } /* * Sends the pushbundle extras to the client application. * If the client application isn't currently active, it is cached for later processing. */ public static void sendExtras(Bundle extras) { if (extras != null) { if (gWebView != null) { sendEvent(convertBundleToJson(extras)); } else { Log.v(LOG_TAG, "sendExtras: caching extras to send at a later time."); gCachedExtras = extras; } } } @Override public void initialize(CordovaInterface cordova, CordovaWebView webView) { super.initialize(cordova, webView); gForeground = true; } @Override public void onPause(boolean multitasking) { super.onPause(multitasking); gForeground = false; SharedPreferences prefs = getApplicationContext().getSharedPreferences(COM_ADOBE_PHONEGAP_PUSH, Context.MODE_PRIVATE); if (prefs.getBoolean(CLEAR_NOTIFICATIONS, true)) { final NotificationManager notificationManager = (NotificationManager) cordova.getActivity().getSystemService(Context.NOTIFICATION_SERVICE); notificationManager.cancelAll(); } } @Override public void onResume(boolean multitasking) { super.onResume(multitasking); gForeground = true; } @Override public void onDestroy() { super.onDestroy(); gForeground = false; gWebView = null; } private void subscribeToTopics(JSONArray topics, String registrationToken) { if (topics != null) { String topic = null; for (int i=0; i<topics.length(); i++) { try { topic = topics.optString(i, null); if (topic != null) { Log.d(LOG_TAG, "Subscribing to topic: " + topic); GcmPubSub.getInstance(getApplicationContext()).subscribe(registrationToken, "/topics/" + topic, null); } } catch (IOException e) { Log.e(LOG_TAG, "Failed to subscribe to topic: " + topic, e); } } } } private void unsubscribeFromTopics(JSONArray topics, String registrationToken) { if (topics != null) { String topic = null; for (int i=0; i<topics.length(); i++) { try { topic = topics.optString(i, null); if (topic != null) { Log.d(LOG_TAG, "Unsubscribing to topic: " + topic); GcmPubSub.getInstance(getApplicationContext()).unsubscribe(registrationToken, "/topics/" + topic); } } catch (IOException e) { Log.e(LOG_TAG, "Failed to unsubscribe to topic: " + topic, e); } } } } /* * serializes a bundle to JSON. */ private static JSONObject convertBundleToJson(Bundle extras) { Log.d(LOG_TAG, "convert extras to json"); try { JSONObject json = new JSONObject(); JSONObject additionalData = new JSONObject(); // Add any keys that need to be in top level json to this set HashSet<String> jsonKeySet = new HashSet(); Collections.addAll(jsonKeySet, TITLE,MESSAGE,COUNT,SOUND,SOUNDPATH,IMAGE); Iterator<String> it = extras.keySet().iterator(); while (it.hasNext()) { String key = it.next(); Object value = extras.get(key); Log.d(LOG_TAG, "key = " + key); if (jsonKeySet.contains(key)) { json.put(key, value); } else if (key.equals(COLDSTART)) { additionalData.put(key, extras.getBoolean(COLDSTART)); } else if (key.equals(FOREGROUND)) { additionalData.put(key, extras.getBoolean(FOREGROUND)); } else if ( value instanceof String ) { String strValue = (String)value; try { // Try to figure out if the value is another JSON object if (strValue.startsWith("{")) { additionalData.put(key, new JSONObject(strValue)); } // Try to figure out if the value is another JSON array else if (strValue.startsWith("[")) { additionalData.put(key, new JSONArray(strValue)); } else { additionalData.put(key, value); } } catch (Exception e) { additionalData.put(key, value); } } } // while json.put(ADDITIONAL_DATA, additionalData); Log.v(LOG_TAG, "extrasToJSON: " + json.toString()); return json; } catch( JSONException e) { Log.e(LOG_TAG, "extrasToJSON: JSON exception"); } return null; } public static boolean isInForeground() { return gForeground; } public static boolean isActive() { return gWebView != null; } }
Update PushPlugin.java
src/android/com/adobe/phonegap/push/PushPlugin.java
Update PushPlugin.java
<ide><path>rc/android/com/adobe/phonegap/push/PushPlugin.java <ide> import android.content.Context; <ide> import android.content.SharedPreferences; <ide> import android.os.Bundle; <add>import java.io.File; <ide> import android.util.Log; <ide> import android.widget.Toast; <ide> import android.content.Context; <ide> intent.putExtra(RingtoneManager.EXTRA_RINGTONE_DEFAULT_URI, <ide> RingtoneManager.getDefaultUri(RingtoneManager.TYPE_NOTIFICATION)); <ide> this.cordova.getActivity().startActivityForResult(intent, 999); <del> // SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE); <del> // String s = sharedPref.getString("soundpath",null); <del> // callbackContext.success(s); <del> //Toast.makeText(this.cordova.getActivity().getApplicationContext(),"called", <del> //4000).show(); <add> SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE); <add> String s = sharedPref.getString("soundpath",null); <add> Uri u = Uri.parse(s); <add> File f = new File("" + u); <add> String r = f.getName(); <add> callbackContext.success(r); <add> // Toast.makeText(this.cordova.getActivity().getApplicationContext(),"called", <add> // 4000).show(); <ide> } catch (UnknownError e) { <ide> callbackContext.error(e.getMessage()); <ide> } <ide> } <ide> <ide> <del> public void onActivityResult( int requestCode, int resultCode, Intent intent, CallbackContext callbackContext) <add> public void onActivityResult( int requestCode, int resultCode, Intent intent) <ide> { <ide> SharedPreferences sharedPref = getApplicationContext().getSharedPreferences(MY_PREFS_NAME, Context.MODE_PRIVATE); <ide> if ( requestCode == 999) <ide> SharedPreferences.Editor editor = sharedPref.edit(); <ide> editor.putString("soundpath",str); <ide> editor.commit(); <del> callbackContext.success(str); <ide> }else{ <ide> Toast.makeText(this.cordova.getActivity().getApplicationContext(), "No Ringtone Path Found!", 2500).show(); <ide> }
JavaScript
mit
82354c920972c057ce4e1a71418376036e34fba8
0
frigus02/RESTer
import db from './data/utils/db.js'; import * as authorizationProviderConfigurations from './data/authorization-provider-configurations.js'; import * as authorizationTokens from './data/authorization-tokens.js'; import * as environments from './data/environments.js'; import * as history from './data/history.js'; import * as requests from './data/requests.js'; import * as exportImport from './exportImport/index.js'; import * as settings from './settings/index.js'; import { select } from './utils/fields.js'; // WARNING: The variable name "resterApi" is configured as reserved for UglifyJS. // Do not change this name here without changing it in the webpack config. const resterApi = { data: { authorizationProviderConfigurations: { put: authorizationProviderConfigurations.putAuthorizationProviderConfiguration, query: authorizationProviderConfigurations.queryAuthorizationProviderConfigurations, delete: authorizationProviderConfigurations.deleteAuthorizationProviderConfiguration }, authorizationTokens: { add: authorizationTokens.addAuthorizationToken, query: authorizationTokens.queryAuthorizationTokens, delete: authorizationTokens.deleteAuthorizationToken }, environments: { put: environments.putEnvironment, get: environments.getEnvironment, query: environments.queryEnvironments, delete: environments.deleteEnvironment }, history: { add: history.addHistoryEntry, get: history.getHistoryEntry, query: history.queryHistoryEntries, delete: history.deleteHistoryEntries }, requests: { put: requests.putRequest, get: requests.getRequest, query: requests.queryRequests, queryCollections: requests.queryRequestCollections, delete: requests.deleteRequest } }, exportImport: { export: exportImport.exportData, import: exportImport.importData }, settings: { get: settings.get, set: settings.set } }; chrome.browserAction.onClicked.addListener(() => { chrome.tabs.query({ active: true, currentWindow: true }, tabs => { const resterUrl = chrome.extension.getURL('site/index.html'); const blankUrls = ['about:blank', 'about:newtab']; if (blankUrls.includes(tabs[0].url)) { try { chrome.tabs.update({ loadReplace: true, url: resterUrl }); } catch (e) { // Chrome does not support loadReplace and throws an exception, // it is specified. Try again without loadReplace. chrome.tabs.update({ url: resterUrl }); } } else { chrome.tabs.create({ url: resterUrl }); } }); }); chrome.runtime.onConnect.addListener(port => { if (port.name !== 'api') { return; } function onDataChange(event) { port.postMessage({ action: 'event.dataChange', detail: JSON.stringify(event.detail) }); } function onDataSlowPerformance(event) { port.postMessage({ action: 'event.dataSlowPerformance', detail: JSON.stringify(event.detail) }); } function onSettingsChange(event) { port.postMessage({ action: 'event.settingsChange', detail: JSON.stringify(event.detail) }); } db.addEventListener('change', onDataChange); db.addEventListener('slowPerformance', onDataSlowPerformance); settings.e.addEventListener('change', onSettingsChange); port.onMessage.addListener(({ id, action, args, fields }) => { if (!action.startsWith('api.')) { return; } const actionPath = action.split('.').slice(1); const actionFunc = actionPath.reduce((api, path) => api && api[path], resterApi); if (!actionFunc) { return; } Promise.resolve(actionFunc(args && JSON.parse(args))) .then(result => { if (result && fields) { result = select(result, fields); } port.postMessage({ id, action: 'apiresponse', result: JSON.stringify(result) }); }) .catch(error => { if (error.message) { error = error.message; } port.postMessage({ id, action: 'apiresponse', error: JSON.stringify(error) }); }); }); port.onDisconnect.addListener(() => { db.removeEventListener('change', onDataChange); db.removeEventListener('slowPerformance', onDataSlowPerformance); settings.e.removeEventListener('change', onSettingsChange); }); });
src/background/index.js
import db from './data/utils/db.js'; import * as authorizationProviderConfigurations from './data/authorization-provider-configurations.js'; import * as authorizationTokens from './data/authorization-tokens.js'; import * as environments from './data/environments.js'; import * as history from './data/history.js'; import * as requests from './data/requests.js'; import * as exportImport from './exportImport/index.js'; import * as settings from './settings/index.js'; import { select } from './utils/fields.js'; // WARNING: The variable name "resterApi" is configured as reserved for UglifyJS. // Do not change this name here without changing it in the webpack config. const resterApi = { data: { authorizationProviderConfigurations: { put: authorizationProviderConfigurations.putAuthorizationProviderConfiguration, query: authorizationProviderConfigurations.queryAuthorizationProviderConfigurations, delete: authorizationProviderConfigurations.deleteAuthorizationProviderConfiguration }, authorizationTokens: { add: authorizationTokens.addAuthorizationToken, query: authorizationTokens.queryAuthorizationTokens, delete: authorizationTokens.deleteAuthorizationToken }, environments: { put: environments.putEnvironment, get: environments.getEnvironment, query: environments.queryEnvironments, delete: environments.deleteEnvironment }, history: { add: history.addHistoryEntry, get: history.getHistoryEntry, query: history.queryHistoryEntries, delete: history.deleteHistoryEntries }, requests: { put: requests.putRequest, get: requests.getRequest, query: requests.queryRequests, queryCollections: requests.queryRequestCollections, delete: requests.deleteRequest } }, exportImport: { export: exportImport.exportData, import: exportImport.importData }, settings: { get: settings.get, set: settings.set } }; chrome.browserAction.onClicked.addListener(() => { chrome.tabs.query({ active: true, currentWindow: true }, tabs => { const resterUrl = chrome.extension.getURL('site/index.html'); const blankUrls = ['about:blank', 'about:newtab']; if (blankUrls.includes(tabs[0].url)) { chrome.tabs.update({ loadReplace: true, url: resterUrl }); } else { chrome.tabs.create({ url: resterUrl }); } }); }); chrome.runtime.onConnect.addListener(port => { if (port.name !== 'api') { return; } function onDataChange(event) { port.postMessage({ action: 'event.dataChange', detail: JSON.stringify(event.detail) }); } function onDataSlowPerformance(event) { port.postMessage({ action: 'event.dataSlowPerformance', detail: JSON.stringify(event.detail) }); } function onSettingsChange(event) { port.postMessage({ action: 'event.settingsChange', detail: JSON.stringify(event.detail) }); } db.addEventListener('change', onDataChange); db.addEventListener('slowPerformance', onDataSlowPerformance); settings.e.addEventListener('change', onSettingsChange); port.onMessage.addListener(({ id, action, args, fields }) => { if (!action.startsWith('api.')) { return; } const actionPath = action.split('.').slice(1); const actionFunc = actionPath.reduce((api, path) => api && api[path], resterApi); if (!actionFunc) { return; } Promise.resolve(actionFunc(args && JSON.parse(args))) .then(result => { if (result && fields) { result = select(result, fields); } port.postMessage({ id, action: 'apiresponse', result: JSON.stringify(result) }); }) .catch(error => { if (error.message) { error = error.message; } port.postMessage({ id, action: 'apiresponse', error: JSON.stringify(error) }); }); }); port.onDisconnect.addListener(() => { db.removeEventListener('change', onDataChange); db.removeEventListener('slowPerformance', onDataSlowPerformance); settings.e.removeEventListener('change', onSettingsChange); }); });
Support Chrome for opening RESTer in current tab
src/background/index.js
Support Chrome for opening RESTer in current tab
<ide><path>rc/background/index.js <ide> const resterUrl = chrome.extension.getURL('site/index.html'); <ide> const blankUrls = ['about:blank', 'about:newtab']; <ide> if (blankUrls.includes(tabs[0].url)) { <del> chrome.tabs.update({ <del> loadReplace: true, <del> url: resterUrl <del> }); <add> try { <add> chrome.tabs.update({ <add> loadReplace: true, <add> url: resterUrl <add> }); <add> } catch (e) { <add> // Chrome does not support loadReplace and throws an exception, <add> // it is specified. Try again without loadReplace. <add> chrome.tabs.update({ <add> url: resterUrl <add> }); <add> } <ide> } else { <ide> chrome.tabs.create({ <ide> url: resterUrl
Java
apache-2.0
199e1aa4f58a2cc56bf9d63f4daec8cfb1865d83
0
arothian/artifactory-plugin,DimaNevelev/jenkins-artifactory-plugin,JFrogDev/jenkins-artifactory-plugin,christ66/jenkins-artifactory-plugin,recena/artifactory-plugin,arothian/artifactory-plugin,nilleb/artifactory-plugin,AlexeiVainshtein/jenkins-artifactory-plugin,recena/artifactory-plugin,stephenliang/artifactory-plugin,shikloshi/jenkins-artifactory-plugin,DimaNevelev/jenkins-artifactory-plugin,grossws/jenkins-artifactory-plugin,grossws/jenkins-artifactory-plugin,grossws/jenkins-artifactory-plugin,jglick/artifactory-plugin,JFrogDev/jenkins-artifactory-plugin,AlexeiVainshtein/jenkins-artifactory-plugin,arothian/artifactory-plugin,DimaNevelev/jenkins-artifactory-plugin,kuberkaul/artifactory-plugin,recena/artifactory-plugin,shikloshi/jenkins-artifactory-plugin,stephenliang/artifactory-plugin,nilleb/artifactory-plugin,JFrogDev/jenkins-artifactory-plugin,shikloshi/jenkins-artifactory-plugin,kuberkaul/artifactory-plugin,stephenliang/artifactory-plugin,jglick/artifactory-plugin,nilleb/artifactory-plugin,christ66/jenkins-artifactory-plugin,kuberkaul/artifactory-plugin,christ66/jenkins-artifactory-plugin,AlexeiVainshtein/jenkins-artifactory-plugin
/* * Copyright (C) 2010 JFrog Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jfrog.hudson.ivy; import com.google.common.collect.Maps; import com.thoughtworks.xstream.annotations.XStreamAlias; import hudson.Extension; import hudson.Launcher; import hudson.ivy.AntIvyBuildWrapper; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.BuildListener; import hudson.model.Cause; import hudson.model.Hudson; import hudson.model.Result; import hudson.remoting.Which; import hudson.tasks.BuildWrapperDescriptor; import hudson.util.Scrambler; import net.sf.json.JSONObject; import org.aspectj.weaver.loadtime.Agent; import org.jfrog.build.api.BuildInfoConfigProperties; import org.jfrog.build.api.BuildInfoProperties; import org.jfrog.build.client.ClientProperties; import org.jfrog.build.config.ArtifactoryIvySettingsConfigurator; import org.jfrog.hudson.ArtifactoryBuilder; import org.jfrog.hudson.ArtifactoryServer; import org.jfrog.hudson.ServerDetails; import org.jfrog.hudson.util.ActionableHelper; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.StaplerRequest; import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; /** * @author Tomer Cohen */ @XStreamAlias("artifactory-ivy-config") public class ArtifactoryIvyConfigurator extends AntIvyBuildWrapper { private ServerDetails details; private String username; private String password; private boolean deployArtifacts; private boolean deployBuildInfo; private boolean includeEnvVars; @DataBoundConstructor public ArtifactoryIvyConfigurator(ServerDetails details, String username, String password, boolean deployArtifacts, boolean deployBuildInfo, boolean includeEnvVars) { this.details = details; this.username = username; this.password = Scrambler.scramble(password); this.deployArtifacts = deployArtifacts; this.deployBuildInfo = deployBuildInfo; this.includeEnvVars = includeEnvVars; } public ServerDetails getDetails() { return details; } public String getPassword() { return Scrambler.descramble(password); } public String getUsername() { return username; } public boolean isDeployArtifacts() { return deployArtifacts; } public boolean isDeployBuildInfo() { return deployBuildInfo; } public boolean isIncludeEnvVars() { return includeEnvVars; } public String getArtifactoryName() { return details != null ? details.artifactoryName : null; } public String getRepositoryKey() { return details != null ? details.repositoryKey : null; } @Override public Environment setUp(final AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { final ArtifactoryServer artifactoryServer = getArtifactoryServer(); build.setResult(Result.SUCCESS); return new AntIvyBuilderEnvironment() { @Override public void buildEnvVars(Map<String, String> env) { Map<String, String> envVars = Maps.newHashMap(); for (Map.Entry<String, String> entry : env.entrySet()) { envVars.put(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + entry.getKey(), entry.getValue()); } env.putAll(envVars); env.put(ClientProperties.PROP_CONTEXT_URL, artifactoryServer.getUrl()); env.put(ClientProperties.PROP_PUBLISH_REPOKEY, getRepositoryKey()); env.put(ClientProperties.PROP_PUBLISH_USERNAME, getUsername()); env.put(ClientProperties.PROP_PUBLISH_PASSWORD, getPassword()); env.put(BuildInfoProperties.PROP_AGENT_NAME, "Hudson"); env.put(BuildInfoProperties.PROP_AGENT_VERSION, build.getHudsonVersion()); env.put(BuildInfoProperties.PROP_BUILD_NUMBER, build.getNumber() + ""); env.put(BuildInfoProperties.PROP_BUILD_NAME, build.getProject().getName()); env.put(BuildInfoProperties.PROP_PRINCIPAL, ActionableHelper.getHudsonPrincipal(build)); env.put(BuildInfoConfigProperties.PROP_INCLUDE_ENV_VARS, String.valueOf(isIncludeEnvVars())); env.put(ClientProperties.PROP_PUBLISH_BUILD_INFO, String.valueOf(isDeployBuildInfo())); env.put(ClientProperties.PROP_PUBLISH_ARTIFACT, String.valueOf(isDeployArtifacts())); if (Hudson.getInstance().getRootUrl() != null) { env.put(BuildInfoProperties.PROP_BUILD_URL, Hudson.getInstance().getRootUrl() + build.getUrl()); } Cause.UpstreamCause parent = ActionableHelper.getUpstreamCause(build); if (parent != null) { env.put(BuildInfoProperties.PROP_PARENT_BUILD_NAME, parent.getUpstreamProject()); env.put(BuildInfoProperties.PROP_PARENT_BUILD_NUMBER, parent.getUpstreamBuild() + ""); } } @Override public String getAdditionalOpts() { File agentLib; try { agentLib = Which.jarFile(Agent.class); } catch (IOException e) { throw new RuntimeException(e); } StringBuilder extraAntOpts = new StringBuilder(); String path = agentLib.getAbsolutePath(); path = path.replace('\\', '/'); path = "\"" + path + "\""; extraAntOpts.append("-javaagent:").append(path).append(" "); return extraAntOpts.toString(); } @Override public String getAdditionalArgs() { final File agentFile; try { agentFile = Which.jarFile(ArtifactoryIvySettingsConfigurator.class); } catch (IOException e) { throw new RuntimeException(e); } StringBuilder targets = new StringBuilder(); String path = agentFile.getParentFile().getAbsolutePath(); path = path.replace('\\', '/'); path = "\"" + path + "\""; targets.append("-lib ").append(path).append(" "); targets.append("-listener ").append("org.jfrog.build.extractor.listener.ArtifactoryBuildListener") .append(" "); return targets.toString(); } }; } public ArtifactoryServer getArtifactoryServer() { List<ArtifactoryServer> servers = getDescriptor().getArtifactoryServers(); for (ArtifactoryServer server : servers) { if (server.getName().equals(getArtifactoryName())) { return server; } } return null; } @Override public DescriptorImpl getDescriptor() { return (DescriptorImpl) super.getDescriptor(); } @Extension(optional = true) public static class DescriptorImpl extends BuildWrapperDescriptor { public DescriptorImpl() { super(ArtifactoryIvyConfigurator.class); load(); } @Override public boolean isApplicable(AbstractProject<?, ?> item) { return "hudson.ivy.IvyModuleSet".equals(item.getClass().getName()); } @Override public String getDisplayName() { return "Publish to Artifactory"; } @Override public String getHelpFile() { return "/plugin/artifactory/ivy/help-publish.html"; } @Override public boolean configure(StaplerRequest req, JSONObject json) throws FormException { req.bindParameters(this, "ivy"); save(); return true; } /** * Returns the list of {@link org.jfrog.hudson.ArtifactoryServer} configured. * * @return can be empty but never null. */ public List<ArtifactoryServer> getArtifactoryServers() { ArtifactoryBuilder.DescriptorImpl descriptor = (ArtifactoryBuilder.DescriptorImpl) Hudson.getInstance().getDescriptor(ArtifactoryBuilder.class); return descriptor.getArtifactoryServers(); } } }
src/main/java/org/jfrog/hudson/ivy/ArtifactoryIvyConfigurator.java
/* * Copyright (C) 2010 JFrog Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jfrog.hudson.ivy; import com.google.common.collect.Maps; import com.thoughtworks.xstream.annotations.XStreamAlias; import hudson.Extension; import hudson.Launcher; import hudson.ivy.AntIvyBuildWrapper; import hudson.model.AbstractBuild; import hudson.model.AbstractProject; import hudson.model.BuildListener; import hudson.model.Cause; import hudson.model.Hudson; import hudson.model.Result; import hudson.remoting.Which; import hudson.tasks.BuildWrapperDescriptor; import hudson.util.Scrambler; import net.sf.json.JSONObject; import org.aspectj.weaver.loadtime.Agent; import org.jfrog.build.api.BuildInfoConfigProperties; import org.jfrog.build.api.BuildInfoProperties; import org.jfrog.build.client.ClientProperties; import org.jfrog.build.config.ArtifactoryIvySettingsConfigurator; import org.jfrog.hudson.ArtifactoryBuilder; import org.jfrog.hudson.ArtifactoryServer; import org.jfrog.hudson.ServerDetails; import org.jfrog.hudson.util.ActionableHelper; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.StaplerRequest; import java.io.File; import java.io.IOException; import java.util.List; import java.util.Map; /** * @author Tomer Cohen */ @XStreamAlias("artifactory-ivy-config") public class ArtifactoryIvyConfigurator extends AntIvyBuildWrapper { private ServerDetails details; private String username; private String password; private boolean deployArtifacts; private boolean deployBuildInfo; private boolean includeEnvVars; @DataBoundConstructor public ArtifactoryIvyConfigurator(ServerDetails details, String username, String password, boolean deployArtifacts, boolean deployBuildInfo, boolean includeEnvVars) { this.details = details; this.username = username; this.password = Scrambler.scramble(password); this.deployArtifacts = deployArtifacts; this.deployBuildInfo = deployBuildInfo; this.includeEnvVars = includeEnvVars; } public ServerDetails getDetails() { return details; } public String getPassword() { return Scrambler.descramble(password); } public String getUsername() { return username; } public boolean isDeployArtifacts() { return deployArtifacts; } public boolean isDeployBuildInfo() { return deployBuildInfo; } public boolean isIncludeEnvVars() { return includeEnvVars; } public String getArtifactoryName() { return details != null ? details.artifactoryName : null; } public String getRepositoryKey() { return details != null ? details.repositoryKey : null; } @Override public Environment setUp(final AbstractBuild build, Launcher launcher, BuildListener listener) throws IOException, InterruptedException { final ArtifactoryServer artifactoryServer = getArtifactoryServer(); build.setResult(Result.SUCCESS); return new AntIvyBuilderEnvironment() { @Override public void buildEnvVars(Map<String, String> env) { Map<String, String> envVars = Maps.newHashMap(); for (Map.Entry<String, String> entry : env.entrySet()) { envVars.put(BuildInfoProperties.BUILD_INFO_ENVIRONMENT_PREFIX + entry.getKey(), entry.getValue()); } env.putAll(envVars); env.put(ClientProperties.PROP_CONTEXT_URL, artifactoryServer.getUrl()); env.put(ClientProperties.PROP_PUBLISH_REPOKEY, getRepositoryKey()); env.put(ClientProperties.PROP_PUBLISH_USERNAME, getUsername()); env.put(ClientProperties.PROP_PUBLISH_PASSWORD, getPassword()); env.put(BuildInfoProperties.PROP_BUILD_AGENT_NAME, "Hudson"); env.put(BuildInfoProperties.PROP_BUILD_AGENT_VERSION, build.getHudsonVersion()); env.put(BuildInfoProperties.PROP_BUILD_NUMBER, build.getNumber() + ""); env.put(BuildInfoProperties.PROP_BUILD_NAME, build.getProject().getName()); env.put(BuildInfoProperties.PROP_PRINCIPAL, ActionableHelper.getHudsonPrincipal(build)); env.put(BuildInfoConfigProperties.PROP_INCLUDE_ENV_VARS, String.valueOf(isIncludeEnvVars())); env.put(ClientProperties.PROP_PUBLISH_BUILD_INFO, String.valueOf(isDeployBuildInfo())); env.put(ClientProperties.PROP_PUBLISH_ARTIFACT, String.valueOf(isDeployArtifacts())); Cause.UpstreamCause parent = ActionableHelper.getUpstreamCause(build); if (parent != null) { env.put(BuildInfoProperties.PROP_PARENT_BUILD_NAME, parent.getUpstreamProject()); env.put(BuildInfoProperties.PROP_PARENT_BUILD_NUMBER, parent.getUpstreamBuild() + ""); } } @Override public String getAdditionalOpts() { File agentLib; try { agentLib = Which.jarFile(Agent.class); } catch (IOException e) { throw new RuntimeException(e); } StringBuilder extraAntOpts = new StringBuilder(); String path = agentLib.getAbsolutePath(); path = path.replace('\\', '/'); path = "\"" + path + "\""; extraAntOpts.append("-javaagent:").append(path).append(" "); return extraAntOpts.toString(); } @Override public String getAdditionalArgs() { final File agentFile; try { agentFile = Which.jarFile(ArtifactoryIvySettingsConfigurator.class); } catch (IOException e) { throw new RuntimeException(e); } StringBuilder targets = new StringBuilder(); String path = agentFile.getParentFile().getAbsolutePath(); path = path.replace('\\', '/'); path = "\"" + path + "\""; targets.append("-lib ").append(path).append(" "); targets.append("-listener ").append("org.jfrog.build.extractor.listener.ArtifactoryBuildListener") .append(" "); return targets.toString(); } }; } public ArtifactoryServer getArtifactoryServer() { List<ArtifactoryServer> servers = getDescriptor().getArtifactoryServers(); for (ArtifactoryServer server : servers) { if (server.getName().equals(getArtifactoryName())) { return server; } } return null; } @Override public DescriptorImpl getDescriptor() { return (DescriptorImpl) super.getDescriptor(); } @Extension(optional = true) public static class DescriptorImpl extends BuildWrapperDescriptor { public DescriptorImpl() { super(ArtifactoryIvyConfigurator.class); load(); } @Override public boolean isApplicable(AbstractProject<?, ?> item) { return "hudson.ivy.IvyModuleSet".equals(item.getClass().getName()); } @Override public String getDisplayName() { return "Publish to Artifactory"; } @Override public String getHelpFile() { return "/plugin/artifactory/ivy/help-publish.html"; } @Override public boolean configure(StaplerRequest req, JSONObject json) throws FormException { req.bindParameters(this, "ivy"); save(); return true; } /** * Returns the list of {@link org.jfrog.hudson.ArtifactoryServer} configured. * * @return can be empty but never null. */ public List<ArtifactoryServer> getArtifactoryServers() { ArtifactoryBuilder.DescriptorImpl descriptor = (ArtifactoryBuilder.DescriptorImpl) Hudson.getInstance().getDescriptor(ArtifactoryBuilder.class); return descriptor.getArtifactoryServers(); } } }
use propety for build URL
src/main/java/org/jfrog/hudson/ivy/ArtifactoryIvyConfigurator.java
use propety for build URL
<ide><path>rc/main/java/org/jfrog/hudson/ivy/ArtifactoryIvyConfigurator.java <ide> env.put(ClientProperties.PROP_PUBLISH_REPOKEY, getRepositoryKey()); <ide> env.put(ClientProperties.PROP_PUBLISH_USERNAME, getUsername()); <ide> env.put(ClientProperties.PROP_PUBLISH_PASSWORD, getPassword()); <del> env.put(BuildInfoProperties.PROP_BUILD_AGENT_NAME, "Hudson"); <del> env.put(BuildInfoProperties.PROP_BUILD_AGENT_VERSION, build.getHudsonVersion()); <add> env.put(BuildInfoProperties.PROP_AGENT_NAME, "Hudson"); <add> env.put(BuildInfoProperties.PROP_AGENT_VERSION, build.getHudsonVersion()); <ide> env.put(BuildInfoProperties.PROP_BUILD_NUMBER, build.getNumber() + ""); <ide> env.put(BuildInfoProperties.PROP_BUILD_NAME, build.getProject().getName()); <ide> env.put(BuildInfoProperties.PROP_PRINCIPAL, ActionableHelper.getHudsonPrincipal(build)); <ide> env.put(BuildInfoConfigProperties.PROP_INCLUDE_ENV_VARS, String.valueOf(isIncludeEnvVars())); <ide> env.put(ClientProperties.PROP_PUBLISH_BUILD_INFO, String.valueOf(isDeployBuildInfo())); <ide> env.put(ClientProperties.PROP_PUBLISH_ARTIFACT, String.valueOf(isDeployArtifacts())); <add> if (Hudson.getInstance().getRootUrl() != null) { <add> env.put(BuildInfoProperties.PROP_BUILD_URL, Hudson.getInstance().getRootUrl() + build.getUrl()); <add> <add> } <ide> Cause.UpstreamCause parent = ActionableHelper.getUpstreamCause(build); <ide> if (parent != null) { <ide> env.put(BuildInfoProperties.PROP_PARENT_BUILD_NAME, parent.getUpstreamProject());
Java
bsd-3-clause
55ceb6dd2201bb7a2b477cad391a9dd515b41b86
0
pepyakin/threetenbp,naixx/threetenbp,jnehlmeier/threetenbp,ThreeTen/threetenbp,pepyakin/threetenbp,naixx/threetenbp,ThreeTen/threetenbp,jnehlmeier/threetenbp
/* * Copyright (c) 2007-2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package javax.time; import static javax.time.calendrical.LocalDateTimeField.INSTANT_SECONDS; import static javax.time.calendrical.LocalDateTimeField.NANO_OF_SECOND; import static javax.time.calendrical.LocalPeriodUnit.DAYS; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.util.Objects; import javax.time.calendrical.DateTime; import javax.time.calendrical.DateTime.MinusAdjuster; import javax.time.calendrical.DateTime.PlusAdjuster; import javax.time.calendrical.DateTimeAccessor; import javax.time.calendrical.LocalPeriodUnit; import javax.time.calendrical.PeriodUnit; import javax.time.format.DateTimeParseException; /** * A duration between two instants on the time-line. * <p> * This class models a duration of time and is not tied to any instant. * The model is of a directed duration, meaning that the duration may be negative. * <p> * A physical duration could be of infinite length. * For practicality, the duration is stored with constraints similar to {@link Instant}. * The duration uses nanosecond resolution with a maximum value of the seconds that can * be held in a {@code long}. This is greater than the current estimated age of the universe. * <p> * The range of a duration requires the storage of a number larger than a {@code long}. * To achieve this, the class stores a {@code long} representing seconds and an {@code int} * representing nanosecond-of-second, which will always be between 0 and 999,999,999. * <p> * The duration is measured in "seconds", but these are not necessarily identical to * the scientific "SI second" definition based on atomic clocks. * This difference only impacts durations measured near a leap-second and should not affect * most applications. * See {@link Instant} for a discussion as to the meaning of the second and time-scales. * * <h4>Implementation notes</h4> * This class is immutable and thread-safe. */ public final class Duration implements PlusAdjuster, MinusAdjuster, Comparable<Duration>, Serializable { /** * Constant for a duration of zero. */ public static final Duration ZERO = new Duration(0, 0); /** * Serialization version. */ private static final long serialVersionUID = 1L; /** * Constant for nanos per second. */ private static final int NANOS_PER_SECOND = 1000_000_000; /** * Constant for nanos per second. */ private static final BigInteger BI_NANOS_PER_SECOND = BigInteger.valueOf(NANOS_PER_SECOND); /** * The number of seconds in the duration. */ private final long seconds; /** * The number of nanoseconds in the duration, expressed as a fraction of the * number of seconds. This is always positive, and never exceeds 999,999,999. */ private final int nanos; //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a number of seconds. * <p> * The nanosecond in second field is set to zero. * * @param seconds the number of seconds, positive or negative * @return a {@code Duration}, not null */ public static Duration ofSeconds(long seconds) { return create(seconds, 0); } /** * Obtains an instance of {@code Duration} from a number of seconds * and an adjustment in nanoseconds. * <p> * This method allows an arbitrary number of nanoseconds to be passed in. * The factory will alter the values of the second and nanosecond in order * to ensure that the stored nanosecond is in the range 0 to 999,999,999. * For example, the following will result in the exactly the same duration: * <pre> * Duration.ofSeconds(3, 1); * Duration.ofSeconds(4, -999_999_999); * Duration.ofSeconds(2, 1000_000_001); * </pre> * * @param seconds the number of seconds, positive or negative * @param nanoAdjustment the nanosecond adjustment to the number of seconds, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if the adjustment causes the seconds to exceed the capacity of {@code Duration} */ public static Duration ofSeconds(long seconds, long nanoAdjustment) { long secs = DateTimes.safeAdd(seconds, DateTimes.floorDiv(nanoAdjustment, NANOS_PER_SECOND)); int nos = DateTimes.floorMod(nanoAdjustment, NANOS_PER_SECOND); return create(secs, nos); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a number of milliseconds. * <p> * The seconds and nanoseconds are extracted from the specified milliseconds. * * @param millis the number of milliseconds, positive or negative * @return a {@code Duration}, not null */ public static Duration ofMillis(long millis) { long secs = millis / 1000; int mos = (int) (millis % 1000); if (mos < 0) { mos += 1000; secs--; } return create(secs, mos * 1000_000); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a number of nanoseconds. * <p> * The seconds and nanoseconds are extracted from the specified nanoseconds. * * @param nanos the number of nanoseconds, positive or negative * @return a {@code Duration}, not null */ public static Duration ofNanos(long nanos) { long secs = nanos / NANOS_PER_SECOND; int nos = (int) (nanos % NANOS_PER_SECOND); if (nos < 0) { nos += NANOS_PER_SECOND; secs--; } return create(secs, nos); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a number of standard length minutes. * <p> * The seconds are calculated based on the standard definition of a minute, * where each minute is 60 seconds. * The nanosecond in second field is set to zero. * * @param minutes the number of minutes, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if the input minutes exceeds the capacity of {@code Duration} */ public static Duration ofMinutes(long minutes) { return create(DateTimes.safeMultiply(minutes, 60), 0); } /** * Obtains an instance of {@code Duration} from a number of standard length hours. * <p> * The seconds are calculated based on the standard definition of an hour, * where each hour is 3600 seconds. * The nanosecond in second field is set to zero. * * @param hours the number of hours, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if the input hours exceeds the capacity of {@code Duration} */ public static Duration ofHours(long hours) { return create(DateTimes.safeMultiply(hours, 3600), 0); } /** * Obtains an instance of {@code Duration} from a number of standard 24 hour days. * <p> * The seconds are calculated based on the standard definition of a day, * where each day is 86400 seconds which implies a 24 hour day. * The nanosecond in second field is set to zero. * * @param days the number of days, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if the input days exceeds the capacity of {@code Duration} */ public static Duration ofDays(long days) { return create(DateTimes.safeMultiply(days, 86400), 0); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a duration in the specified unit. * <p> * The parameters represent the two parts of a phrase like '6 Hours'. For example: * <pre> * Duration.of(3, SECONDS); * Duration.of(465, HOURS); * </pre> * Only a subset of units are accepted by this method. * The unit must either have an {@link PeriodUnit#isDurationEstimated() exact duration} or * be {@link LocalPeriodUnit#DAYS} which is treated as 24 hours. Other units throw an exception. * * @param amount the amount of the duration, measured in terms of the unit, positive or negative * @param unit the unit that the duration is measured in, must have an exact duration, not null * @return a {@code Duration}, not null * @throws DateTimeException if the period unit has an estimated duration * @throws ArithmeticException if a numeric overflow occurs */ public static Duration of(long amount, PeriodUnit unit) { return ZERO.plus(amount, unit); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} representing the duration between two instants. * <p> * A {@code Duration} represents a directed distance between two points on the time-line. * As such, this method will return a negative duration if the end is before the start. * To guarantee to obtain a positive duration call {@link #abs()} on the result of this factory. * * @param startInclusive the start instant, inclusive, not null * @param endExclusive the end instant, exclusive, not null * @return a {@code Duration}, not null * @throws ArithmeticException if the calculation exceeds the capacity of {@code Duration} */ public static Duration between(DateTimeAccessor startInclusive, DateTimeAccessor endExclusive) { long secs = DateTimes.safeSubtract(endExclusive.getLong(INSTANT_SECONDS), startInclusive.getLong(INSTANT_SECONDS)); long nanos = endExclusive.getLong(NANO_OF_SECOND) - startInclusive.getLong(NANO_OF_SECOND); secs = DateTimes.safeAdd(secs, DateTimes.floorDiv(nanos, DateTimes.NANOS_PER_SECOND)); nanos = DateTimes.floorMod(nanos, DateTimes.NANOS_PER_SECOND); return create(secs, (int) nanos); // safe from overflow } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} by parsing a text string. * <p> * This will parse the string produced by {@link #toString()} which is * the ISO-8601 format {@code PTnS} where {@code n} is * the number of seconds with optional decimal part. * The number must consist of ASCII numerals. * There must only be a negative sign at the start of the number and it can * only be present if the value is less than zero. * There must be at least one digit before any decimal point. * There must be between 1 and 9 inclusive digits after any decimal point. * The letters (P, T and S) will be accepted in upper or lower case. * The decimal point may be either a dot or a comma. * * @param text the text to parse, not null * @return a {@code Duration}, not null * @throws DateTimeParseException if the text cannot be parsed to a {@code Duration} */ public static Duration parse(final CharSequence text) { Objects.requireNonNull(text, "Text to parse"); int len = text.length(); if (len < 4 || (text.charAt(0) != 'P' && text.charAt(0) != 'p') || (text.charAt(1) != 'T' && text.charAt(1) != 't') || (text.charAt(len - 1) != 'S' && text.charAt(len - 1) != 's') || (len == 5 && text.charAt(2) == '-' && text.charAt(3) == '0')) { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 0); } String numberText = text.subSequence(2, len - 1).toString().replace(',', '.'); if (numberText.charAt(0) == '+') { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 2); } int dot = numberText.indexOf('.'); try { if (dot == -1) { // no decimal places if (numberText.startsWith("-0")) { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 2); } return create(Long.parseLong(numberText), 0); } // decimal places boolean negative = false; if (numberText.charAt(0) == '-') { negative = true; } long secs = Long.parseLong(numberText.substring(0, dot)); numberText = numberText.substring(dot + 1); len = numberText.length(); if (len == 0 || len > 9 || numberText.charAt(0) == '-' || numberText.charAt(0) == '+') { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 2); } int nanos = Integer.parseInt(numberText); switch (len) { case 1: nanos *= 100000000; break; case 2: nanos *= 10000000; break; case 3: nanos *= 1000000; break; case 4: nanos *= 100000; break; case 5: nanos *= 10000; break; case 6: nanos *= 1000; break; case 7: nanos *= 100; break; case 8: nanos *= 10; break; } return negative ? ofSeconds(secs, -nanos) : create(secs, nanos); } catch (ArithmeticException | NumberFormatException ex) { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 2, ex); } } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} using seconds and nanoseconds. * * @param seconds the length of the duration in seconds, positive or negative * @param nanoAdjustment the nanosecond adjustment within the second, from 0 to 999,999,999 */ private static Duration create(long seconds, int nanoAdjustment) { if ((seconds | nanoAdjustment) == 0) { return ZERO; } return new Duration(seconds, nanoAdjustment); } /** * Constructs an instance of {@code Duration} using seconds and nanoseconds. * * @param seconds the length of the duration in seconds, positive or negative * @param nanos the nanoseconds within the second, from 0 to 999,999,999 */ private Duration(long seconds, int nanos) { super(); this.seconds = seconds; this.nanos = nanos; } /** * Resolves singletons. * * @return the resolved instance, not null */ private Object readResolve() { return (seconds | nanos) == 0 ? ZERO : this; } //----------------------------------------------------------------------- /** * Checks if this duration is zero length. * <p> * A {@code Duration} represents a directed distance between two points on * the time-line and can therefore be positive, zero or negative. * This method checks whether the length is zero. * * @return true if this duration has a total length equal to zero */ public boolean isZero() { return (seconds | nanos) == 0; } /** * Checks if this duration is positive, excluding zero. * <p> * A {@code Duration} represents a directed distance between two points on * the time-line and can therefore be positive, zero or negative. * This method checks whether the length is greater than zero. * * @return true if this duration has a total length greater than zero */ public boolean isPositive() { return seconds >= 0 && ((seconds | nanos) != 0); } /** * Checks if this duration is negative, excluding zero. * <p> * A {@code Duration} represents a directed distance between two points on * the time-line and can therefore be positive, zero or negative. * This method checks whether the length is less than zero. * * @return true if this duration has a total length less than zero */ public boolean isNegative() { return seconds < 0; } //----------------------------------------------------------------------- /** * Gets the number of seconds in this duration. * <p> * The length of the duration is stored using two fields - seconds and nanoseconds. * The nanoseconds part is a value from 0 to 999,999,999 that is an adjustment to * the length in seconds. * The total duration is defined by calling this method and {@link #getNano()}. * <p> * A {@code Duration} represents a directed distance between two points on the time-line. * A negative duration is expressed by the negative sign of the seconds part. * A duration of -1 nanosecond is stored as -1 seconds plus 999,999,999 nanoseconds. * * @return the whole seconds part of the length of the duration, positive or negative */ public long getSeconds() { return seconds; } /** * Gets the number of nanoseconds within the second in this duration. * <p> * The length of the duration is stored using two fields - seconds and nanoseconds. * The nanoseconds part is a value from 0 to 999,999,999 that is an adjustment to * the length in seconds. * The total duration is defined by calling this method and {@link #getSeconds()}. * <p> * A {@code Duration} represents a directed distance between two points on the time-line. * A negative duration is expressed by the negative sign of the seconds part. * A duration of -1 nanosecond is stored as -1 seconds plus 999,999,999 nanoseconds. * * @return the nanoseconds within the second part of the length of the duration, from 0 to 999,999,999 */ public int getNano() { return nanos; } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the specified duration added. * <p> * This instance is immutable and unaffected by this method call. * * @param duration the duration to add, positive or negative, not null * @return a {@code Duration} based on this duration with the specified duration added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plus(Duration duration) { return plus(duration.getSeconds(), duration.getNano()); } /** * Returns a copy of this duration with the specified duration added. * <p> * The duration amount is measured in terms of the specified unit. * Only a subset of units are accepted by this method. * The unit must either have an {@link PeriodUnit#isDurationEstimated() exact duration} or * be {@link LocalPeriodUnit#DAYS} which is treated as 24 hours. Other units throw an exception. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToAdd the amount of the period, measured in terms of the unit, positive or negative * @param unit the unit that the period is measured in, must have an exact duration, not null * @return a {@code Duration} based on this duration with the specified duration added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plus(long amountToAdd, PeriodUnit unit) { Objects.requireNonNull(unit, "PeriodUnit"); if (unit == DAYS) { return plus(DateTimes.safeMultiply(amountToAdd, DateTimes.SECONDS_PER_DAY), 0); } if (unit.isDurationEstimated()) { throw new DateTimeException("Unit must not have an estimated duration"); } if (amountToAdd == 0) { return this; } if (unit instanceof LocalPeriodUnit) { switch ((LocalPeriodUnit) unit) { case NANOS: return plusNanos(amountToAdd); case MICROS: return plusSeconds((amountToAdd / (1000_000L * 1000)) * 1000).plusNanos((amountToAdd % (1000_000L * 1000)) * 1000); case MILLIS: return plusMillis(amountToAdd); case SECONDS: return plusSeconds(amountToAdd); } return plusSeconds(DateTimes.safeMultiply(unit.getDuration().seconds, amountToAdd)); } Duration duration = unit.getDuration().multipliedBy(amountToAdd); return plusSeconds(duration.getSeconds()).plusNanos(duration.getNano()); } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the specified duration in seconds added. * <p> * This instance is immutable and unaffected by this method call. * * @param secondsToAdd the seconds to add, positive or negative * @return a {@code Duration} based on this duration with the specified seconds added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plusSeconds(long secondsToAdd) { return plus(secondsToAdd, 0); } /** * Returns a copy of this duration with the specified duration in milliseconds added. * <p> * This instance is immutable and unaffected by this method call. * * @param millisToAdd the milliseconds to add, positive or negative * @return a {@code Duration} based on this duration with the specified milliseconds added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plusMillis(long millisToAdd) { return plus(millisToAdd / 1000, (millisToAdd % 1000) * 1000_000); } /** * Returns a copy of this duration with the specified duration in nanoseconds added. * <p> * This instance is immutable and unaffected by this method call. * * @param nanosToAdd the nanoseconds to add, positive or negative * @return a {@code Duration} based on this duration with the specified nanoseconds added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plusNanos(long nanosToAdd) { return plus(0, nanosToAdd); } /** * Returns a copy of this duration with the specified duration added. * <p> * This instance is immutable and unaffected by this method call. * * @param secondsToAdd the seconds to add, positive or negative * @param nanosToAdd the nanos to add, positive or negative * @return a {@code Duration} based on this duration with the specified seconds added, not null * @throws ArithmeticException if numeric overflow occurs */ private Duration plus(long secondsToAdd, long nanosToAdd) { if ((secondsToAdd | nanosToAdd) == 0) { return this; } long epochSec = DateTimes.safeAdd(seconds, secondsToAdd); epochSec = DateTimes.safeAdd(epochSec, nanosToAdd / NANOS_PER_SECOND); nanosToAdd = nanosToAdd % NANOS_PER_SECOND; long nanoAdjustment = nanos + nanosToAdd; // safe int+NANOS_PER_SECOND return ofSeconds(epochSec, nanoAdjustment); } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the specified duration subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param duration the duration to subtract, positive or negative, not null * @return a {@code Duration} based on this duration with the specified duration subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minus(Duration duration) { long secsToSubtract = duration.getSeconds(); int nanosToSubtract = duration.getNano(); if (secsToSubtract == Long.MIN_VALUE) { return plus(Long.MAX_VALUE, -nanosToSubtract).plus(1, 0); } return plus(-secsToSubtract, -nanosToSubtract); } /** * Returns a copy of this duration with the specified duration subtracted. * <p> * The duration amount is measured in terms of the specified unit. * Only a subset of units are accepted by this method. * The unit must either have an {@link PeriodUnit#isDurationEstimated() exact duration} or * be {@link LocalPeriodUnit#DAYS} which is treated as 24 hours. Other units throw an exception. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToSubtract the amount of the period, measured in terms of the unit, positive or negative * @param unit the unit that the period is measured in, must have an exact duration, not null * @return a {@code Duration} based on this duration with the specified duration subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minus(long amountToSubtract, PeriodUnit unit) { return (amountToSubtract == Long.MIN_VALUE ? plus(Long.MAX_VALUE, unit).plus(1, unit) : plus(-amountToSubtract, unit)); } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the specified duration in seconds subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param secondsToSubtract the seconds to subtract, positive or negative * @return a {@code Duration} based on this duration with the specified seconds subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minusSeconds(long secondsToSubtract) { return (secondsToSubtract == Long.MIN_VALUE ? plusSeconds(Long.MAX_VALUE).plusSeconds(1) : plusSeconds(-secondsToSubtract)); } /** * Returns a copy of this duration with the specified duration in milliseconds subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param millisToSubtract the milliseconds to subtract, positive or negative * @return a {@code Duration} based on this duration with the specified milliseconds subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minusMillis(long millisToSubtract) { return (millisToSubtract == Long.MIN_VALUE ? plusMillis(Long.MAX_VALUE).plusMillis(1) : plusMillis(-millisToSubtract)); } /** * Returns a copy of this duration with the specified duration in nanoseconds subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param nanosToSubtract the nanoseconds to subtract, positive or negative * @return a {@code Duration} based on this duration with the specified nanoseconds subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minusNanos(long nanosToSubtract) { return (nanosToSubtract == Long.MIN_VALUE ? plusNanos(Long.MAX_VALUE).plusNanos(1) : plusNanos(-nanosToSubtract)); } //----------------------------------------------------------------------- /** * Returns a copy of this duration multiplied by the scalar. * <p> * This instance is immutable and unaffected by this method call. * * @param multiplicand the value to multiply the duration by, positive or negative * @return a {@code Duration} based on this duration multiplied by the specified scalar, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration multipliedBy(long multiplicand) { if (multiplicand == 0) { return ZERO; } if (multiplicand == 1) { return this; } return create(toSeconds().multiply(BigDecimal.valueOf(multiplicand))); } /** * Returns a copy of this duration divided by the specified value. * <p> * This instance is immutable and unaffected by this method call. * * @param divisor the value to divide the duration by, positive or negative, not zero * @return a {@code Duration} based on this duration divided by the specified divisor, not null * @throws ArithmeticException if the divisor is zero * @throws ArithmeticException if numeric overflow occurs */ public Duration dividedBy(long divisor) { if (divisor == 0) { throw new ArithmeticException("Cannot divide by zero"); } if (divisor == 1) { return this; } return create(toSeconds().divide(BigDecimal.valueOf(divisor), RoundingMode.DOWN)); } /** * Converts this duration to the total length in seconds and * fractional nanoseconds expressed as a {@code BigDecimal}. * * @return the total length of the duration in seconds, with a scale of 9, not null */ private BigDecimal toSeconds() { return BigDecimal.valueOf(seconds).add(BigDecimal.valueOf(nanos, 9)); } /** * Creates an instance of {@code Duration} from a number of seconds. * * @param seconds the number of seconds, up to scale 9, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if numeric overflow occurs */ private static Duration create(BigDecimal seconds) { BigInteger nanos = seconds.movePointRight(9).toBigIntegerExact(); BigInteger[] divRem = nanos.divideAndRemainder(BI_NANOS_PER_SECOND); if (divRem[0].bitLength() > 63) { throw new ArithmeticException("Exceeds capacity of Duration: " + nanos); } return ofSeconds(divRem[0].longValue(), divRem[1].intValue()); } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the length negated. * <p> * This method swaps the sign of the total length of this duration. * For example, {@code PT1.3S} will be returned as {@code PT-1.3S}. * <p> * This instance is immutable and unaffected by this method call. * * @return a {@code Duration} based on this duration with the amount negated, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration negated() { return multipliedBy(-1); } /** * Returns a copy of this duration with a positive length. * <p> * This method returns a positive duration by effectively removing the sign from any negative total length. * For example, {@code PT-1.3S} will be returned as {@code PT1.3S}. * <p> * This instance is immutable and unaffected by this method call. * * @return a {@code Duration} based on this duration with an absolute length, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration abs() { return isNegative() ? negated() : this; } //------------------------------------------------------------------------- /** * Adds this duration to the specified date-time object. * <p> * This method is not intended to be called by application code directly. * Applications should use the {@code plus(PlusAdjuster)} method * on the date-time object passing this duration as the argument. * * @param dateTime the date-time object to adjust, not null * @return an object of the same type with the adjustment made, not null * @throws DateTimeException if unable to add * @throws ArithmeticException if numeric overflow occurs */ @Override public DateTime doAdd(DateTime dateTime) { long instantSecs = dateTime.getLong(INSTANT_SECONDS); long instantNanos = dateTime.getLong(NANO_OF_SECOND); instantSecs = DateTimes.safeAdd(instantSecs, seconds); instantNanos = DateTimes.safeAdd(instantNanos, nanos); instantSecs = DateTimes.safeAdd(instantSecs, DateTimes.floorDiv(instantNanos, DateTimes.NANOS_PER_SECOND)); instantNanos = DateTimes.floorMod(instantNanos, DateTimes.NANOS_PER_SECOND); return dateTime.with(INSTANT_SECONDS, instantSecs).with(NANO_OF_SECOND, instantNanos); } /** * Subtracts this duration from the specified date-time object. * <p> * This method is not intended to be called by application code directly. * Applications should use the {@code minus(MinusAdjuster)} method * on the date-time object passing this duration as the argument. * * @param dateTime the date-time object to adjust, not null * @return an object of the same type with the adjustment made, not null * @throws DateTimeException if unable to subtract * @throws ArithmeticException if numeric overflow occurs */ @Override public DateTime doSubtract(DateTime dateTime) { long instantSecs = dateTime.getLong(INSTANT_SECONDS); long instantNanos = dateTime.getLong(NANO_OF_SECOND); instantSecs = DateTimes.safeSubtract(instantSecs, seconds); instantNanos = DateTimes.safeSubtract(instantNanos, nanos); instantSecs = DateTimes.safeAdd(instantSecs, DateTimes.floorDiv(instantNanos, DateTimes.NANOS_PER_SECOND)); instantNanos = DateTimes.floorMod(instantNanos, DateTimes.NANOS_PER_SECOND); return dateTime.with(INSTANT_SECONDS, instantSecs).with(NANO_OF_SECOND, instantNanos); } //----------------------------------------------------------------------- /** * Converts this duration to the total length in milliseconds. * <p> * If this duration is too large to fit in a {@code long} milliseconds, then an * exception is thrown. * <p> * If this duration has greater than millisecond precision, then the conversion * will drop any excess precision information as though the amount in nanoseconds * was subject to integer division by one million. * * @return the total length of the duration in milliseconds * @throws ArithmeticException if numeric overflow occurs */ public long toMillis() { long millis = DateTimes.safeMultiply(seconds, 1000); millis = DateTimes.safeAdd(millis, nanos / 1000_000); return millis; } /** * Converts this duration to the total length in nanoseconds expressed as a {@code long}. * <p> * If this duration is too large to fit in a {@code long} nanoseconds, then an * exception is thrown. * * @return the total length of the duration in nanoseconds * @throws ArithmeticException if numeric overflow occurs */ public long toNanos() { long millis = DateTimes.safeMultiply(seconds, 1000_000_000); millis = DateTimes.safeAdd(millis, nanos); return millis; } //----------------------------------------------------------------------- /** * Compares this duration to the specified {@code Duration}. * <p> * The comparison is based on the total length of the durations. * * @param otherDuration the other duration to compare to, not null * @return the comparator value, negative if less, positive if greater */ public int compareTo(Duration otherDuration) { int cmp = Long.compare(seconds, otherDuration.seconds); if (cmp != 0) { return cmp; } return nanos - otherDuration.nanos; } /** * Checks if this duration is greater than the specified {@code Duration}. * <p> * The comparison is based on the total length of the durations. * * @param otherDuration the other duration to compare to, not null * @return true if this duration is greater than the specified duration */ public boolean isGreaterThan(Duration otherDuration) { return compareTo(otherDuration) > 0; } /** * Checks if this duration is less than the specified {@code Duration}. * <p> * The comparison is based on the total length of the durations. * * @param otherDuration the other duration to compare to, not null * @return true if this duration is less than the specified duration */ public boolean isLessThan(Duration otherDuration) { return compareTo(otherDuration) < 0; } //----------------------------------------------------------------------- /** * Checks if this duration is equal to the specified {@code Duration}. * <p> * The comparison is based on the total length of the durations. * * @param otherDuration the other duration, null returns false * @return true if the other duration is equal to this one */ @Override public boolean equals(Object otherDuration) { if (this == otherDuration) { return true; } if (otherDuration instanceof Duration) { Duration other = (Duration) otherDuration; return this.seconds == other.seconds && this.nanos == other.nanos; } return false; } /** * A hash code for this duration. * * @return a suitable hash code */ @Override public int hashCode() { return ((int) (seconds ^ (seconds >>> 32))) + (51 * nanos); } //----------------------------------------------------------------------- /** * A string representation of this duration using ISO-8601 seconds * based representation, such as {@code PT12.345S}. * <p> * The format of the returned string will be {@code PTnS} where n is * the seconds and fractional seconds of the duration. * * @return an ISO-8601 representation of this duration, not null */ @Override public String toString() { StringBuilder buf = new StringBuilder(24); buf.append("PT"); if (seconds < 0 && nanos > 0) { if (seconds == -1) { buf.append("-0"); } else { buf.append(seconds + 1); } } else { buf.append(seconds); } if (nanos > 0) { int pos = buf.length(); if (seconds < 0) { buf.append(2 * NANOS_PER_SECOND - nanos); } else { buf.append(nanos + NANOS_PER_SECOND); } while (buf.charAt(buf.length() - 1) == '0') { buf.setLength(buf.length() - 1); } buf.setCharAt(pos, '.'); } buf.append('S'); return buf.toString(); } }
src/main/java/javax/time/Duration.java
/* * Copyright (c) 2007-2012, Stephen Colebourne & Michael Nascimento Santos * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * * Neither the name of JSR-310 nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package javax.time; import static javax.time.calendrical.LocalDateTimeField.INSTANT_SECONDS; import static javax.time.calendrical.LocalDateTimeField.NANO_OF_SECOND; import static javax.time.calendrical.LocalPeriodUnit.DAYS; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.math.RoundingMode; import java.util.Objects; import javax.time.calendrical.DateTime; import javax.time.calendrical.DateTime.MinusAdjuster; import javax.time.calendrical.DateTime.PlusAdjuster; import javax.time.calendrical.DateTimeAccessor; import javax.time.calendrical.LocalPeriodUnit; import javax.time.calendrical.PeriodUnit; import javax.time.format.DateTimeParseException; /** * A duration between two instants on the time-line. * <p> * This class models a duration of time and is not tied to any instant. * The model is of a directed duration, meaning that the duration may be negative. * <p> * A physical duration could be of infinite length. * For practicality, the duration is stored with constraints similar to {@link Instant}. * The duration uses nanosecond resolution with a maximum value of the seconds that can * be held in a {@code long}. This is greater than the current estimated age of the universe. * <p> * The range of a duration requires the storage of a number larger than a {@code long}. * To achieve this, the class stores a {@code long} representing seconds and an {@code int} * representing nanosecond-of-second, which will always be between 0 and 999,999,999. * <p> * The duration is measured in "seconds", but these are not necessarily identical to * the scientific "SI second" definition based on atomic clocks. * This difference only impacts durations measured near a leap-second and should not affect * most applications. * See {@link Instant} for a discussion as to the meaning of the second and time-scales. * * <h4>Implementation notes</h4> * This class is immutable and thread-safe. */ public final class Duration implements PlusAdjuster, MinusAdjuster, Comparable<Duration>, Serializable { /** * Constant for a duration of zero. */ public static final Duration ZERO = new Duration(0, 0); /** * Serialization version. */ private static final long serialVersionUID = 1L; /** * Constant for nanos per second. */ private static final int NANOS_PER_SECOND = 1000_000_000; /** * Constant for nanos per second. */ private static final BigInteger BI_NANOS_PER_SECOND = BigInteger.valueOf(NANOS_PER_SECOND); /** * The number of seconds in the duration. */ private final long seconds; /** * The number of nanoseconds in the duration, expressed as a fraction of the * number of seconds. This is always positive, and never exceeds 999,999,999. */ private final int nanos; //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a number of seconds. * <p> * The nanosecond in second field is set to zero. * * @param seconds the number of seconds, positive or negative * @return a {@code Duration}, not null */ public static Duration ofSeconds(long seconds) { return create(seconds, 0); } /** * Obtains an instance of {@code Duration} from a number of seconds * and an adjustment in nanoseconds. * <p> * This method allows an arbitrary number of nanoseconds to be passed in. * The factory will alter the values of the second and nanosecond in order * to ensure that the stored nanosecond is in the range 0 to 999,999,999. * For example, the following will result in the exactly the same duration: * <pre> * Duration.ofSeconds(3, 1); * Duration.ofSeconds(4, -999_999_999); * Duration.ofSeconds(2, 1000_000_001); * </pre> * * @param seconds the number of seconds, positive or negative * @param nanoAdjustment the nanosecond adjustment to the number of seconds, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if the adjustment causes the seconds to exceed the capacity of {@code Duration} */ public static Duration ofSeconds(long seconds, long nanoAdjustment) { long secs = DateTimes.safeAdd(seconds, DateTimes.floorDiv(nanoAdjustment, NANOS_PER_SECOND)); int nos = DateTimes.floorMod(nanoAdjustment, NANOS_PER_SECOND); return create(secs, nos); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a number of milliseconds. * <p> * The seconds and nanoseconds are extracted from the specified milliseconds. * * @param millis the number of milliseconds, positive or negative * @return a {@code Duration}, not null */ public static Duration ofMillis(long millis) { long secs = millis / 1000; int mos = (int) (millis % 1000); if (mos < 0) { mos += 1000; secs--; } return create(secs, mos * 1000_000); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a number of nanoseconds. * <p> * The seconds and nanoseconds are extracted from the specified nanoseconds. * * @param nanos the number of nanoseconds, positive or negative * @return a {@code Duration}, not null */ public static Duration ofNanos(long nanos) { long secs = nanos / NANOS_PER_SECOND; int nos = (int) (nanos % NANOS_PER_SECOND); if (nos < 0) { nos += NANOS_PER_SECOND; secs--; } return create(secs, nos); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a number of standard length minutes. * <p> * The seconds are calculated based on the standard definition of a minute, * where each minute is 60 seconds. * The nanosecond in second field is set to zero. * * @param minutes the number of minutes, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if the input minutes exceeds the capacity of {@code Duration} */ public static Duration ofMinutes(long minutes) { return create(DateTimes.safeMultiply(minutes, 60), 0); } /** * Obtains an instance of {@code Duration} from a number of standard length hours. * <p> * The seconds are calculated based on the standard definition of an hour, * where each hour is 3600 seconds. * The nanosecond in second field is set to zero. * * @param hours the number of hours, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if the input hours exceeds the capacity of {@code Duration} */ public static Duration ofHours(long hours) { return create(DateTimes.safeMultiply(hours, 3600), 0); } /** * Obtains an instance of {@code Duration} from a number of standard 24 hour days. * <p> * The seconds are calculated based on the standard definition of a day, * where each day is 86400 seconds which implies a 24 hour day. * The nanosecond in second field is set to zero. * * @param days the number of days, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if the input days exceeds the capacity of {@code Duration} */ public static Duration ofDays(long days) { return create(DateTimes.safeMultiply(days, 86400), 0); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} from a duration in the specified unit. * <p> * The parameters represent the two parts of a phrase like '6 Hours'. For example: * <pre> * Duration.of(3, SECONDS); * Duration.of(465, HOURS); * </pre> * Only a subset of units are accepted by this method. * The unit must either have an {@link PeriodUnit#isDurationEstimated() exact duration} or * be {@link LocalPeriodUnit#DAYS} which is treated as 24 hours. Other units throw an exception. * * @param amount the amount of the duration, measured in terms of the unit, positive or negative * @param unit the unit that the duration is measured in, must have an exact duration, not null * @return a {@code Duration}, not null * @throws DateTimeException if the period unit has an estimated duration * @throws ArithmeticException if a numeric overflow occurs */ public static Duration of(long amount, PeriodUnit unit) { return ZERO.plus(amount, unit); } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} representing the duration between two instants. * <p> * A {@code Duration} represents a directed distance between two points on the time-line. * As such, this method will return a negative duration if the end is before the start. * To guarantee to obtain a positive duration call {@link #abs()} on the result of this factory. * * @param startInclusive the start instant, inclusive, not null * @param endExclusive the end instant, exclusive, not null * @return a {@code Duration}, not null * @throws ArithmeticException if the calculation exceeds the capacity of {@code Duration} */ public static Duration between(DateTimeAccessor startInclusive, DateTimeAccessor endExclusive) { long secs = DateTimes.safeSubtract(endExclusive.getLong(INSTANT_SECONDS), startInclusive.getLong(INSTANT_SECONDS)); long nanos = endExclusive.getLong(NANO_OF_SECOND) - startInclusive.getLong(NANO_OF_SECOND); secs = DateTimes.safeAdd(secs, DateTimes.floorDiv(nanos, DateTimes.NANOS_PER_SECOND)); nanos = DateTimes.floorMod(nanos, DateTimes.NANOS_PER_SECOND); return create(secs, (int) nanos); // safe from overflow } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} by parsing a text string. * <p> * This will parse the string produced by {@link #toString()} which is * the ISO-8601 format {@code PTnS} where {@code n} is * the number of seconds with optional decimal part. * The number must consist of ASCII numerals. * There must only be a negative sign at the start of the number and it can * only be present if the value is less than zero. * There must be at least one digit before any decimal point. * There must be between 1 and 9 inclusive digits after any decimal point. * The letters (P, T and S) will be accepted in upper or lower case. * The decimal point may be either a dot or a comma. * * @param text the text to parse, not null * @return a {@code Duration}, not null * @throws DateTimeParseException if the text cannot be parsed to a {@code Duration} */ public static Duration parse(final CharSequence text) { Objects.requireNonNull(text, "Text to parse"); int len = text.length(); if (len < 4 || (text.charAt(0) != 'P' && text.charAt(0) != 'p') || (text.charAt(1) != 'T' && text.charAt(1) != 't') || (text.charAt(len - 1) != 'S' && text.charAt(len - 1) != 's') || (len == 5 && text.charAt(2) == '-' && text.charAt(3) == '0')) { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 0); } String numberText = text.subSequence(2, len - 1).toString().replace(',', '.'); if (numberText.charAt(0) == '+') { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 2); } int dot = numberText.indexOf('.'); try { if (dot == -1) { // no decimal places if (numberText.startsWith("-0")) { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 2); } return create(Long.parseLong(numberText), 0); } // decimal places boolean negative = false; if (numberText.charAt(0) == '-') { negative = true; } long secs = Long.parseLong(numberText.substring(0, dot)); numberText = numberText.substring(dot + 1); len = numberText.length(); if (len == 0 || len > 9 || numberText.charAt(0) == '-' || numberText.charAt(0) == '+') { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 2); } int nanos = Integer.parseInt(numberText); switch (len) { case 1: nanos *= 100000000; break; case 2: nanos *= 10000000; break; case 3: nanos *= 1000000; break; case 4: nanos *= 100000; break; case 5: nanos *= 10000; break; case 6: nanos *= 1000; break; case 7: nanos *= 100; break; case 8: nanos *= 10; break; } return negative ? ofSeconds(secs, -nanos) : create(secs, nanos); } catch (ArithmeticException | NumberFormatException ex) { throw new DateTimeParseException("Duration could not be parsed: " + text, text, 2, ex); } } //----------------------------------------------------------------------- /** * Obtains an instance of {@code Duration} using seconds and nanoseconds. * * @param seconds the length of the duration in seconds, positive or negative * @param nanoAdjustment the nanosecond adjustment within the second, from 0 to 999,999,999 */ private static Duration create(long seconds, int nanoAdjustment) { if ((seconds | nanoAdjustment) == 0) { return ZERO; } return new Duration(seconds, nanoAdjustment); } /** * Constructs an instance of {@code Duration} using seconds and nanoseconds. * * @param seconds the length of the duration in seconds, positive or negative * @param nanos the nanoseconds within the second, from 0 to 999,999,999 */ private Duration(long seconds, int nanos) { super(); this.seconds = seconds; this.nanos = nanos; } /** * Resolves singletons. * * @return the resolved instance, not null */ private Object readResolve() { return (seconds | nanos) == 0 ? ZERO : this; } //----------------------------------------------------------------------- /** * Checks if this duration is zero length. * <p> * A {@code Duration} represents a directed distance between two points on * the time-line and can therefore be positive, zero or negative. * This method checks whether the length is zero. * * @return true if this duration has a total length equal to zero */ public boolean isZero() { return (seconds | nanos) == 0; } /** * Checks if this duration is positive, excluding zero. * <p> * A {@code Duration} represents a directed distance between two points on * the time-line and can therefore be positive, zero or negative. * This method checks whether the length is greater than zero. * * @return true if this duration has a total length greater than zero */ public boolean isPositive() { return seconds >= 0 && ((seconds | nanos) != 0); } /** * Checks if this duration is negative, excluding zero. * <p> * A {@code Duration} represents a directed distance between two points on * the time-line and can therefore be positive, zero or negative. * This method checks whether the length is less than zero. * * @return true if this duration has a total length less than zero */ public boolean isNegative() { return seconds < 0; } //----------------------------------------------------------------------- /** * Gets the number of seconds in this duration. * <p> * The length of the duration is stored using two fields - seconds and nanoseconds. * The nanoseconds part is a value from 0 to 999,999,999 that is an adjustment to * the length in seconds. * The total duration is defined by calling this method and {@link #getNano()}. * <p> * A {@code Duration} represents a directed distance between two points on the time-line. * A negative duration is expressed by the negative sign of the seconds part. * A duration of -1 nanosecond is stored as -1 seconds plus 999,999,999 nanoseconds. * * @return the whole seconds part of the length of the duration, positive or negative */ public long getSeconds() { return seconds; } /** * Gets the number of nanoseconds within the second in this duration. * <p> * The length of the duration is stored using two fields - seconds and nanoseconds. * The nanoseconds part is a value from 0 to 999,999,999 that is an adjustment to * the length in seconds. * The total duration is defined by calling this method and {@link #getSeconds()}. * <p> * A {@code Duration} represents a directed distance between two points on the time-line. * A negative duration is expressed by the negative sign of the seconds part. * A duration of -1 nanosecond is stored as -1 seconds plus 999,999,999 nanoseconds. * * @return the nanoseconds within the second part of the length of the duration, from 0 to 999,999,999 */ public int getNano() { return nanos; } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the specified duration added. * <p> * This instance is immutable and unaffected by this method call. * * @param duration the duration to add, positive or negative, not null * @return a {@code Duration} based on this duration with the specified duration added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plus(Duration duration) { return plus(duration.getSeconds(), duration.getNano()); } /** * Returns a copy of this duration with the specified duration added. * <p> * The duration amount is measured in terms of the specified unit. * Only a subset of units are accepted by this method. * The unit must either have an {@link PeriodUnit#isDurationEstimated() exact duration} or * be {@link LocalPeriodUnit#DAYS} which is treated as 24 hours. Other units throw an exception. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToAdd the amount of the period, measured in terms of the unit, positive or negative * @param unit the unit that the period is measured in, must have an exact duration, not null * @return a {@code Duration} based on this duration with the specified duration added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plus(long amountToAdd, PeriodUnit unit) { Objects.requireNonNull(unit, "PeriodUnit"); if (unit == DAYS) { return plus(DateTimes.safeMultiply(amountToAdd, DateTimes.SECONDS_PER_DAY), 0); } if (unit.isDurationEstimated()) { throw new DateTimeException("Unit must not have an estimated duration"); } if (amountToAdd == 0) { return this; } if (unit instanceof LocalPeriodUnit) { switch ((LocalPeriodUnit) unit) { case NANOS: return plusNanos(amountToAdd); case MICROS: return plusSeconds((amountToAdd / (1000_000L * 1000)) * 1000).plusNanos((amountToAdd % (1000_000L * 1000)) * 1000); case MILLIS: return plusMillis(amountToAdd); case SECONDS: return plusSeconds(amountToAdd); } return plusSeconds(DateTimes.safeMultiply(unit.getDuration().seconds, amountToAdd)); } Duration duration = unit.getDuration().multipliedBy(amountToAdd); return plusSeconds(duration.getSeconds()).plusNanos(duration.getNano()); } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the specified duration in seconds added. * <p> * This instance is immutable and unaffected by this method call. * * @param secondsToAdd the seconds to add, positive or negative * @return a {@code Duration} based on this duration with the specified seconds added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plusSeconds(long secondsToAdd) { return plus(secondsToAdd, 0); } /** * Returns a copy of this duration with the specified duration in milliseconds added. * <p> * This instance is immutable and unaffected by this method call. * * @param millisToAdd the milliseconds to add, positive or negative * @return a {@code Duration} based on this duration with the specified milliseconds added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plusMillis(long millisToAdd) { return plus(millisToAdd / 1000, (millisToAdd % 1000) * 1000_000); } /** * Returns a copy of this duration with the specified duration in nanoseconds added. * <p> * This instance is immutable and unaffected by this method call. * * @param nanosToAdd the nanoseconds to add, positive or negative * @return a {@code Duration} based on this duration with the specified nanoseconds added, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration plusNanos(long nanosToAdd) { return plus(0, nanosToAdd); } /** * Returns a copy of this duration with the specified duration added. * <p> * This instance is immutable and unaffected by this method call. * * @param secondsToAdd the seconds to add, positive or negative * @param nanosToAdd the nanos to add, positive or negative * @return a {@code Duration} based on this duration with the specified seconds added, not null * @throws ArithmeticException if numeric overflow occurs */ private Duration plus(long secondsToAdd, long nanosToAdd) { if ((secondsToAdd | nanosToAdd) == 0) { return this; } long epochSec = DateTimes.safeAdd(seconds, secondsToAdd); epochSec = DateTimes.safeAdd(epochSec, nanosToAdd / NANOS_PER_SECOND); nanosToAdd = nanosToAdd % NANOS_PER_SECOND; long nanoAdjustment = nanos + nanosToAdd; // safe int+NANOS_PER_SECOND return ofSeconds(epochSec, nanoAdjustment); } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the specified duration subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param duration the duration to subtract, positive or negative, not null * @return a {@code Duration} based on this duration with the specified duration subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minus(Duration duration) { long secsToSubtract = duration.getSeconds(); int nanosToSubtract = duration.getNano(); if (secsToSubtract == Long.MIN_VALUE) { return plus(Long.MAX_VALUE, -nanosToSubtract).plus(1, 0); } return plus(-secsToSubtract, -nanosToSubtract); } /** * Returns a copy of this duration with the specified duration subtracted. * <p> * The duration amount is measured in terms of the specified unit. * Only a subset of units are accepted by this method. * The unit must either have an {@link PeriodUnit#isDurationEstimated() exact duration} or * be {@link LocalPeriodUnit#DAYS} which is treated as 24 hours. Other units throw an exception. * <p> * This instance is immutable and unaffected by this method call. * * @param amountToSubtract the amount of the period, measured in terms of the unit, positive or negative * @param unit the unit that the period is measured in, must have an exact duration, not null * @return a {@code Duration} based on this duration with the specified duration subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minus(long amountToSubtract, PeriodUnit unit) { return (amountToSubtract == Long.MIN_VALUE ? plus(Long.MAX_VALUE, unit).plus(1, unit) : plus(-amountToSubtract, unit)); } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the specified duration in seconds subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param secondsToSubtract the seconds to subtract, positive or negative * @return a {@code Duration} based on this duration with the specified seconds subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minusSeconds(long secondsToSubtract) { return (secondsToSubtract == Long.MIN_VALUE ? plusSeconds(Long.MAX_VALUE).plusSeconds(1) : plusSeconds(-secondsToSubtract)); } /** * Returns a copy of this duration with the specified duration in milliseconds subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param millisToSubtract the milliseconds to subtract, positive or negative * @return a {@code Duration} based on this duration with the specified milliseconds subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minusMillis(long millisToSubtract) { return (millisToSubtract == Long.MIN_VALUE ? plusMillis(Long.MAX_VALUE).plusMillis(1) : plusMillis(-millisToSubtract)); } /** * Returns a copy of this duration with the specified duration in nanoseconds subtracted. * <p> * This instance is immutable and unaffected by this method call. * * @param nanosToSubtract the nanoseconds to subtract, positive or negative * @return a {@code Duration} based on this duration with the specified nanoseconds subtracted, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration minusNanos(long nanosToSubtract) { return (nanosToSubtract == Long.MIN_VALUE ? plusNanos(Long.MAX_VALUE).plusNanos(1) : plusNanos(-nanosToSubtract)); } //----------------------------------------------------------------------- /** * Returns a copy of this duration multiplied by the scalar. * <p> * This instance is immutable and unaffected by this method call. * * @param multiplicand the value to multiply the duration by, positive or negative * @return a {@code Duration} based on this duration multiplied by the specified scalar, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration multipliedBy(long multiplicand) { if (multiplicand == 0) { return ZERO; } if (multiplicand == 1) { return this; } return create(toSeconds().multiply(BigDecimal.valueOf(multiplicand))); } /** * Returns a copy of this duration divided by the specified value. * <p> * This instance is immutable and unaffected by this method call. * * @param divisor the value to divide the duration by, positive or negative, not zero * @return a {@code Duration} based on this duration divided by the specified divisor, not null * @throws ArithmeticException if the divisor is zero * @throws ArithmeticException if numeric overflow occurs */ public Duration dividedBy(long divisor) { if (divisor == 0) { throw new ArithmeticException("Cannot divide by zero"); } if (divisor == 1) { return this; } return create(toSeconds().divide(BigDecimal.valueOf(divisor), RoundingMode.DOWN)); } /** * Converts this duration to the total length in seconds and * fractional nanoseconds expressed as a {@code BigDecimal}. * * @return the total length of the duration in seconds, with a scale of 9, not null */ private BigDecimal toSeconds() { return BigDecimal.valueOf(seconds).add(BigDecimal.valueOf(nanos, 9)); } /** * Creates an instance of {@code Duration} from a number of seconds. * * @param seconds the number of seconds, up to scale 9, positive or negative * @return a {@code Duration}, not null * @throws ArithmeticException if numeric overflow occurs */ private static Duration create(BigDecimal seconds) { BigInteger nanos = seconds.movePointRight(9).toBigIntegerExact(); BigInteger[] divRem = nanos.divideAndRemainder(BI_NANOS_PER_SECOND); if (divRem[0].bitLength() > 63) { throw new ArithmeticException("Exceeds capacity of Duration: " + nanos); } return ofSeconds(divRem[0].longValue(), divRem[1].intValue()); } //----------------------------------------------------------------------- /** * Returns a copy of this duration with the length negated. * <p> * This method swaps the sign of the total length of this duration. * For example, {@code PT1.3S} will be returned as {@code PT-1.3S}. * <p> * This instance is immutable and unaffected by this method call. * * @return a {@code Duration} based on this period with the amount negated, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration negated() { return multipliedBy(-1); } /** * Returns a copy of this duration with a positive length. * <p> * This method returns a positive duration by effectively removing the sign from any negative total length. * For example, {@code PT-1.3S} will be returned as {@code PT1.3S}. * <p> * This instance is immutable and unaffected by this method call. * * @return a {@code Duration} based on this period with an absolute length, not null * @throws ArithmeticException if numeric overflow occurs */ public Duration abs() { return isNegative() ? negated() : this; } //------------------------------------------------------------------------- /** * Adds this period to the specified date-time object. * <p> * This method is not intended to be called by application code directly. * Applications should use the {@code plus(PlusAdjuster)} method * on the date-time object passing this period as the argument. * * @param dateTime the date-time object to adjust, not null * @return an object of the same type with the adjustment made, not null * @throws DateTimeException if unable to add * @throws ArithmeticException if numeric overflow occurs */ @Override public DateTime doAdd(DateTime dateTime) { long instantSecs = dateTime.getLong(INSTANT_SECONDS); long instantNanos = dateTime.getLong(NANO_OF_SECOND); instantSecs = DateTimes.safeAdd(instantSecs, seconds); instantNanos = DateTimes.safeAdd(instantNanos, nanos); instantSecs = DateTimes.safeAdd(instantSecs, DateTimes.floorDiv(instantNanos, DateTimes.NANOS_PER_SECOND)); instantNanos = DateTimes.floorMod(instantNanos, DateTimes.NANOS_PER_SECOND); return dateTime.with(INSTANT_SECONDS, instantSecs).with(NANO_OF_SECOND, instantNanos); } /** * Subtracts this period from the specified date-time object. * <p> * This method is not intended to be called by application code directly. * Applications should use the {@code minus(MinusAdjuster)} method * on the date-time object passing this period as the argument. * * @param dateTime the date-time object to adjust, not null * @return an object of the same type with the adjustment made, not null * @throws DateTimeException if unable to subtract * @throws ArithmeticException if numeric overflow occurs */ @Override public DateTime doSubtract(DateTime dateTime) { long instantSecs = dateTime.getLong(INSTANT_SECONDS); long instantNanos = dateTime.getLong(NANO_OF_SECOND); instantSecs = DateTimes.safeSubtract(instantSecs, seconds); instantNanos = DateTimes.safeSubtract(instantNanos, nanos); instantSecs = DateTimes.safeAdd(instantSecs, DateTimes.floorDiv(instantNanos, DateTimes.NANOS_PER_SECOND)); instantNanos = DateTimes.floorMod(instantNanos, DateTimes.NANOS_PER_SECOND); return dateTime.with(INSTANT_SECONDS, instantSecs).with(NANO_OF_SECOND, instantNanos); } //----------------------------------------------------------------------- /** * Converts this duration to the total length in milliseconds. * <p> * If this duration is too large to fit in a {@code long} milliseconds, then an * exception is thrown. * <p> * If this duration has greater than millisecond precision, then the conversion * will drop any excess precision information as though the amount in nanoseconds * was subject to integer division by one million. * * @return the total length of the duration in milliseconds * @throws ArithmeticException if numeric overflow occurs */ public long toMillis() { long millis = DateTimes.safeMultiply(seconds, 1000); millis = DateTimes.safeAdd(millis, nanos / 1000_000); return millis; } /** * Converts this duration to the total length in nanoseconds expressed as a {@code long}. * <p> * If this duration is too large to fit in a {@code long} nanoseconds, then an * exception is thrown. * * @return the total length of the duration in nanoseconds * @throws ArithmeticException if numeric overflow occurs */ public long toNanos() { long millis = DateTimes.safeMultiply(seconds, 1000_000_000); millis = DateTimes.safeAdd(millis, nanos); return millis; } //----------------------------------------------------------------------- /** * Compares this duration to the specified {@code Duration}. * <p> * The comparison is based on the total length of the durations. * * @param otherDuration the other duration to compare to, not null * @return the comparator value, negative if less, positive if greater */ public int compareTo(Duration otherDuration) { int cmp = Long.compare(seconds, otherDuration.seconds); if (cmp != 0) { return cmp; } return nanos - otherDuration.nanos; } /** * Checks if this duration is greater than the specified {@code Duration}. * <p> * The comparison is based on the total length of the durations. * * @param otherDuration the other duration to compare to, not null * @return true if this duration is greater than the specified duration */ public boolean isGreaterThan(Duration otherDuration) { return compareTo(otherDuration) > 0; } /** * Checks if this duration is less than the specified {@code Duration}. * <p> * The comparison is based on the total length of the durations. * * @param otherDuration the other duration to compare to, not null * @return true if this duration is less than the specified duration */ public boolean isLessThan(Duration otherDuration) { return compareTo(otherDuration) < 0; } //----------------------------------------------------------------------- /** * Checks if this duration is equal to the specified {@code Duration}. * <p> * The comparison is based on the total length of the durations. * * @param otherDuration the other duration, null returns false * @return true if the other duration is equal to this one */ @Override public boolean equals(Object otherDuration) { if (this == otherDuration) { return true; } if (otherDuration instanceof Duration) { Duration other = (Duration) otherDuration; return this.seconds == other.seconds && this.nanos == other.nanos; } return false; } /** * A hash code for this duration. * * @return a suitable hash code */ @Override public int hashCode() { return ((int) (seconds ^ (seconds >>> 32))) + (51 * nanos); } //----------------------------------------------------------------------- /** * A string representation of this duration using ISO-8601 seconds * based representation, such as {@code PT12.345S}. * <p> * The format of the returned string will be {@code PTnS} where n is * the seconds and fractional seconds of the duration. * * @return an ISO-8601 representation of this duration, not null */ @Override public String toString() { StringBuilder buf = new StringBuilder(24); buf.append("PT"); if (seconds < 0 && nanos > 0) { if (seconds == -1) { buf.append("-0"); } else { buf.append(seconds + 1); } } else { buf.append(seconds); } if (nanos > 0) { int pos = buf.length(); if (seconds < 0) { buf.append(2 * NANOS_PER_SECOND - nanos); } else { buf.append(nanos + NANOS_PER_SECOND); } while (buf.charAt(buf.length() - 1) == '0') { buf.setLength(buf.length() - 1); } buf.setCharAt(pos, '.'); } buf.append('S'); return buf.toString(); } }
Javadoc
src/main/java/javax/time/Duration.java
Javadoc
<ide><path>rc/main/java/javax/time/Duration.java <ide> * <p> <ide> * This instance is immutable and unaffected by this method call. <ide> * <del> * @return a {@code Duration} based on this period with the amount negated, not null <add> * @return a {@code Duration} based on this duration with the amount negated, not null <ide> * @throws ArithmeticException if numeric overflow occurs <ide> */ <ide> public Duration negated() { <ide> * <p> <ide> * This instance is immutable and unaffected by this method call. <ide> * <del> * @return a {@code Duration} based on this period with an absolute length, not null <add> * @return a {@code Duration} based on this duration with an absolute length, not null <ide> * @throws ArithmeticException if numeric overflow occurs <ide> */ <ide> public Duration abs() { <ide> <ide> //------------------------------------------------------------------------- <ide> /** <del> * Adds this period to the specified date-time object. <add> * Adds this duration to the specified date-time object. <ide> * <p> <ide> * This method is not intended to be called by application code directly. <ide> * Applications should use the {@code plus(PlusAdjuster)} method <del> * on the date-time object passing this period as the argument. <add> * on the date-time object passing this duration as the argument. <ide> * <ide> * @param dateTime the date-time object to adjust, not null <ide> * @return an object of the same type with the adjustment made, not null <ide> } <ide> <ide> /** <del> * Subtracts this period from the specified date-time object. <add> * Subtracts this duration from the specified date-time object. <ide> * <p> <ide> * This method is not intended to be called by application code directly. <ide> * Applications should use the {@code minus(MinusAdjuster)} method <del> * on the date-time object passing this period as the argument. <add> * on the date-time object passing this duration as the argument. <ide> * <ide> * @param dateTime the date-time object to adjust, not null <ide> * @return an object of the same type with the adjustment made, not null
Java
mit
0477970b00ff062ce9dc46b9880086981d7920cb
0
ripxfrostbite/karren-sama
package org.frostbite.karren; import org.frostbite.karren.listencast.Song; import org.slf4j.Logger; import java.sql.*; import java.util.ArrayList; import java.util.Date; public class MySQLInterface { private String sqlhost; private String sqluser; private String sqldb; private String sqlpass; private int sqlport; private String query; private boolean search; private boolean pstNeeded; private String overrideDB; private Logger log; private boolean rwEnabled; private ArrayList<String> sqlPayload = new ArrayList<>(); /* CONSTRUCTORS */ public MySQLInterface(BotConfiguration botConf, Logger log){ sqldb = botConf.getSqldb(); sqlhost = botConf.getSqlhost(); sqlpass = botConf.getSqlpass(); sqlport = Integer.parseInt(botConf.getSqlport()); sqluser = botConf.getSqluser(); rwEnabled = Boolean.parseBoolean(botConf.getAllowSQLRW()); this.log = log; } /* UTILITY OPERATIONS */ private void resetSQL(){ sqlPayload.clear(); query = null; search = false; pstNeeded = false; overrideDB = null; } public boolean isNewUser(String nick){ ArrayList<String> savedUsers = new ArrayList<>(); boolean userNew = true; try{ query = "SELECT user FROM users"; search = true; pstNeeded = false; ArrayList<Object> usrTemp = executeQuery(); for (Object user : usrTemp) { savedUsers.add((String) user); } } catch(SQLException e) { e.printStackTrace(); log.error("Error in SQL Operation:", e); } if(savedUsers.size() > 0){ for(String curUser : savedUsers){ if(nick.equalsIgnoreCase(curUser)){ userNew = false; } } } return userNew; } public void makeUser(String nick){ resetSQL(); query = "INSERT INTO users (ircuserid, user, botpart, timepart, linkCode) VALUES (null, ?, false, 0, null)"; sqlPayload.add(nick); try { search = false; pstNeeded = true; executeQuery(); } catch (SQLException e) { e.printStackTrace(); } } /* USER OPERATIONS */ public ArrayList<Object> getUserData(String nick) throws SQLException { ArrayList<Object> result; if(isNewUser(nick)) makeUser(nick); resetSQL(); query = "SELECT * FROM users WHERE user= ?"; sqlPayload.add(nick); search = true; pstNeeded = true; result = executeQuery(); return result; } /* Expected arguments: 1: Nick of user */ public void userOperation(String mod, String[] args) throws SQLException { Date date = new Date(); ArrayList<Object> userData = getUserData(args[0]); resetSQL(); switch(mod.toLowerCase()){ case "return": if((Boolean)userData.get(1)){ query = "UPDATE users SET botpart=false WHERE user= ?"; sqlPayload.add(args[0]); search = false; pstNeeded = true; executeQuery(); } break; case "part": if(!((Boolean)userData.get(1))){ query = "UPDATE users SET botpart=true, timepart= ? WHERE user= ?"; sqlPayload.add(String.valueOf(date.getTime())); sqlPayload.add(args[0]); search = false; pstNeeded = true; executeQuery(); } break; } } public void userLink(String uid, String linkCode) throws SQLException { resetSQL(); query = "UPDATE Users SET linkCode=? WHERE ID=?"; sqlPayload.add(linkCode); sqlPayload.add(uid); search = false; pstNeeded = true; executeQuery(); } /* RADIO OPERATIONS */ public ArrayList<Object> getUserFaves(Song song) throws SQLException { ArrayList<Object> result; resetSQL(); query = "SELECT User FROM userfaves WHERE SongID=?"; sqlPayload.add(String.valueOf(song.getSongID())); search = true; pstNeeded = true; result = executeQuery(); return result; } public boolean addFave(String user, Song song) throws SQLException { resetSQL(); query = "SELECT * FROM userfaves WHERE User=? AND SongID=?"; sqlPayload.add(user); sqlPayload.add(String.valueOf(song.getSongID())); search = true; pstNeeded = true; ArrayList<Object> returned = executeQuery(); if(returned.size()==0){ resetSQL(); query = "INSERT INTO UserFaves(ID, User, SongID) VALUES (null, ?, ?)"; sqlPayload.add(user); sqlPayload.add(String.valueOf(song.getSongID())); search = false; pstNeeded = true; executeQuery(); resetSQL(); query = "UPDATE songdb SET FavCount=FavCount+1 WHERE id=?"; sqlPayload.add(String.valueOf(song.getSongID())); search = false; pstNeeded = true; executeQuery(); return true; } else { return false; } } public ArrayList<Song> getOldSongDataFromDB() throws SQLException { ArrayList<Song> songs = new ArrayList<>(); resetSQL(); query = "SELECT * FROM songdb_old"; search = true; pstNeeded = false; ArrayList<Object> result = executeQuery(); int songCount = result.size()/7; for(int i=0; i<songCount; i++){ songs.add(new Song((String)result.get(1+(7*(i))),(int)result.get(3+(7*(i))),(int)result.get(4+(7*(i))),(boolean)result.get(6+(7*(i))),(String)result.get(2+(7*(i))), (long)result.get(5+(7*(i))))); } return songs; } public void insertSongData(Song song) throws SQLException { resetSQL(); int durLock = 0; if(song.isDurationLocked()) durLock = 1; query = "INSERT INTO songdb (ID, SongTitle, LPTime, PlayCount, FavCount, SongDuration, DurationLock) VALUES (null, ?, ?, ?, ?, ?, ?)"; sqlPayload.add(song.getSongName()); sqlPayload.add(String.valueOf(song.getLastPlayedRaw())); sqlPayload.add(String.valueOf(song.getPlayCount())); sqlPayload.add(String.valueOf(song.getFavCount())); sqlPayload.add(String.valueOf(song.getLastSongDuration())); sqlPayload.add(String.valueOf(durLock)); pstNeeded = true; search = false; executeQuery(); } public void updateDJActivity(String curDJ, String streamName) throws SQLException { if(curDJ.length()==0){ //Setting no DJ to active(Stream offair) resetSQL(); query = "UPDATE radio_dj SET active=false"; search = false; pstNeeded = false; executeQuery(); } else { resetSQL(); query = "INSERT INTO radio_dj(ID,displayName,connectName,streamName,djPicture,active) VALUES (null, ?, ?, ?, 'default', true) ON DUPLICATE KEY UPDATE active=true, streamName=?"; sqlPayload.add(curDJ); sqlPayload.add(curDJ); sqlPayload.add(streamName); sqlPayload.add(streamName); search = false; pstNeeded = true; executeQuery(); } } public void updateSongData(Song lastsong) throws SQLException{ if(!lastsong.isDurationLocked() || lastsong.getLastSongDuration() == 0) { resetSQL(); if (lastsong.getLastSongDuration() == lastsong.getSongDuration() && lastsong.getLastSongDuration() > 0) { query = "UPDATE songdb SET songduration=?, DurationLock=1 WHERE id=?"; log.debug("Setting \"" + lastsong.getSongName() + "\" duration lock to true"); } else { query = "UPDATE songdb SET songduration=? WHERE id=?"; } sqlPayload.add(String.valueOf(lastsong.getSongDuration())); sqlPayload.add(String.valueOf(lastsong.getSongID())); search = false; pstNeeded = true; executeQuery(); } } public void updateRadioDatabase(Song song) throws SQLException { if(rwEnabled) { resetSQL(); ArrayList<Object> returned; Long curTime; query = "SELECT ID FROM songdb WHERE SongTitle = ?"; pstNeeded = true; search = true; sqlPayload.add(song.getSongName()); returned = executeQuery(); resetSQL(); if (returned.size() > 0) { song.setSongID((int) returned.get(0)); returned.clear(); query = "SELECT * FROM songdb WHERE ID= ?"; sqlPayload.add(String.valueOf(song.getSongID())); search = true; pstNeeded = true; returned = executeQuery(); song.setFieldsFromSQL(returned); } else { returned.clear(); song.setSongID(0); returned.add(null); returned.add(null); returned.add((long) 0); returned.add(0); returned.add(0); returned.add(0); returned.add(false); song.setFieldsFromSQL(returned); } returned.clear(); Date date = new Date(); if (song.getSongID() == 0) { //Adding song to DB and getting new ID for song resetSQL(); query = "INSERT INTO songdb (ID, SongTitle, LPTime, PlayCount, FavCount, SongDuration, DurationLock) VALUES (null, ?, ?, 1, 0, 0, false)"; sqlPayload.add(song.getSongName()); curTime = date.getTime(); sqlPayload.add(curTime.toString()); search = false; pstNeeded = true; executeQuery(); resetSQL(); query = "SELECT ID FROM songdb WHERE SongTitle = ?"; sqlPayload.add(song.getSongName()); search = true; pstNeeded = true; returned = executeQuery(); if (returned.size() > 0) { song.setSongID((int) returned.get(0)); } resetSQL(); } else { resetSQL(); //Update info for song query = "UPDATE songdb SET LPTime= ?, PlayCount=PlayCount+1 WHERE ID=?"; curTime = date.getTime(); sqlPayload.add(curTime.toString()); sqlPayload.add(String.valueOf(song.getSongID())); search = false; pstNeeded = true; executeQuery(); } log.info("Now playing: " + song.getSongName() + ":" + song.getSongID() + ":" + song.getPlayCount()); } } /* SQL OPERATIONS */ public ArrayList<Object> executeQuery() throws SQLException { String targetDB = sqldb; ArrayList<Object> result = new ArrayList<>(); if(overrideDB != null) targetDB = overrideDB; Connection run = DriverManager.getConnection("jdbc:mysql://" + sqlhost + ":" + sqlport + "/" + targetDB + "?useUnicode=true&characterEncoding=UTF-8", sqluser, sqlpass); PreparedStatement pst; ResultSet rs; pst = run.prepareStatement(query); if(pstNeeded){ for(int i=0; i<sqlPayload.size(); i++){ pst.setString(i+1, sqlPayload.get(i)); } } if(search){ rs = pst.executeQuery(); ResultSetMetaData md = rs.getMetaData(); int cCount = md.getColumnCount(); while(rs.next()){ for(int i=1; i<=cCount; i++){ result.add(rs.getObject(i)); } } } if(!search) pst.execute(); run.close(); return result; } }
src/org/frostbite/karren/MySQLInterface.java
package org.frostbite.karren; import org.frostbite.karren.listencast.Song; import org.slf4j.Logger; import java.sql.*; import java.util.ArrayList; import java.util.Date; public class MySQLInterface { private String sqlhost; private String sqluser; private String sqldb; private String sqlpass; private int sqlport; private String query; private boolean search; private boolean pstNeeded; private String overrideDB; private Logger log; private boolean rwEnabled; private Date date = new Date(); private ArrayList<String> sqlPayload = new ArrayList<>(); /* CONSTRUCTORS */ public MySQLInterface(BotConfiguration botConf, Logger log){ sqldb = botConf.getSqldb(); sqlhost = botConf.getSqlhost(); sqlpass = botConf.getSqlpass(); sqlport = Integer.parseInt(botConf.getSqlport()); sqluser = botConf.getSqluser(); rwEnabled = Boolean.parseBoolean(botConf.getAllowSQLRW()); this.log = log; } /* UTILITY OPERATIONS */ private void resetSQL(){ sqlPayload.clear(); query = null; search = false; pstNeeded = false; overrideDB = null; } public boolean isNewUser(String nick){ ArrayList<String> savedUsers = new ArrayList<>(); boolean userNew = true; try{ query = "SELECT user FROM users"; search = true; pstNeeded = false; ArrayList<Object> usrTemp = executeQuery(); for (Object user : usrTemp) { savedUsers.add((String) user); } } catch(SQLException e) { e.printStackTrace(); log.error("Error in SQL Operation:", e); } if(savedUsers.size() > 0){ for(String curUser : savedUsers){ if(nick.equalsIgnoreCase(curUser)){ userNew = false; } } } return userNew; } public void makeUser(String nick){ resetSQL(); query = "INSERT INTO users (ircuserid, user, botpart, timepart, linkCode) VALUES (null, ?, false, 0, null)"; sqlPayload.add(nick); try { search = false; pstNeeded = true; executeQuery(); } catch (SQLException e) { e.printStackTrace(); } } /* USER OPERATIONS */ public ArrayList<Object> getUserData(String nick) throws SQLException { ArrayList<Object> result; if(isNewUser(nick)) makeUser(nick); resetSQL(); query = "SELECT * FROM users WHERE user= ?"; sqlPayload.add(nick); search = true; pstNeeded = true; result = executeQuery(); return result; } /* Expected arguments: 1: Nick of user */ public void userOperation(String mod, String[] args) throws SQLException { Date date = new Date(); ArrayList<Object> userData = getUserData(args[0]); resetSQL(); switch(mod.toLowerCase()){ case "return": if((Boolean)userData.get(1)){ query = "UPDATE users SET botpart=false WHERE user= ?"; sqlPayload.add(args[0]); search = false; pstNeeded = true; executeQuery(); } break; case "part": if(!((Boolean)userData.get(1))){ query = "UPDATE users SET botpart=true, timepart= ? WHERE user= ?"; sqlPayload.add(String.valueOf(date.getTime())); sqlPayload.add(args[0]); search = false; pstNeeded = true; executeQuery(); } break; } } public void userLink(String uid, String linkCode) throws SQLException { resetSQL(); query = "UPDATE Users SET linkCode=? WHERE ID=?"; sqlPayload.add(linkCode); sqlPayload.add(uid); search = false; pstNeeded = true; executeQuery(); } /* RADIO OPERATIONS */ public ArrayList<Object> getUserFaves(Song song) throws SQLException { ArrayList<Object> result; resetSQL(); query = "SELECT User FROM userfaves WHERE SongID=?"; sqlPayload.add(String.valueOf(song.getSongID())); search = true; pstNeeded = true; result = executeQuery(); return result; } public boolean addFave(String user, Song song) throws SQLException { resetSQL(); query = "SELECT * FROM userfaves WHERE User=? AND SongID=?"; sqlPayload.add(user); sqlPayload.add(String.valueOf(song.getSongID())); search = true; pstNeeded = true; ArrayList<Object> returned = executeQuery(); if(returned.size()==0){ resetSQL(); query = "INSERT INTO UserFaves(ID, User, SongID) VALUES (null, ?, ?)"; sqlPayload.add(user); sqlPayload.add(String.valueOf(song.getSongID())); search = false; pstNeeded = true; executeQuery(); resetSQL(); query = "UPDATE songdb SET FavCount=FavCount+1 WHERE id=?"; sqlPayload.add(String.valueOf(song.getSongID())); search = false; pstNeeded = true; executeQuery(); return true; } else { return false; } } public ArrayList<Song> getOldSongDataFromDB() throws SQLException { ArrayList<Song> songs = new ArrayList<>(); resetSQL(); query = "SELECT * FROM songdb_old"; search = true; pstNeeded = false; ArrayList<Object> result = executeQuery(); int songCount = result.size()/7; for(int i=0; i<songCount; i++){ songs.add(new Song((String)result.get(1+(7*(i))),(int)result.get(3+(7*(i))),(int)result.get(4+(7*(i))),(boolean)result.get(6+(7*(i))),(String)result.get(2+(7*(i))), (long)result.get(5+(7*(i))))); } return songs; } public void insertSongData(Song song) throws SQLException { resetSQL(); int durLock = 0; if(song.isDurationLocked()) durLock = 1; query = "INSERT INTO songdb (ID, SongTitle, LPTime, PlayCount, FavCount, SongDuration, DurationLock) VALUES (null, ?, ?, ?, ?, ?, ?)"; sqlPayload.add(song.getSongName()); sqlPayload.add(String.valueOf(song.getLastPlayedRaw())); sqlPayload.add(String.valueOf(song.getPlayCount())); sqlPayload.add(String.valueOf(song.getFavCount())); sqlPayload.add(String.valueOf(song.getLastSongDuration())); sqlPayload.add(String.valueOf(durLock)); pstNeeded = true; search = false; executeQuery(); } public void updateDJActivity(String curDJ, String streamName) throws SQLException { if(curDJ.length()==0){ //Setting no DJ to active(Stream offair) resetSQL(); query = "UPDATE radio_dj SET active=false"; search = false; pstNeeded = false; executeQuery(); } else { resetSQL(); query = "INSERT INTO radio_dj(ID,displayName,connectName,streamName,djPicture,active) VALUES (null, ?, ?, ?, 'default', true) ON DUPLICATE KEY UPDATE active=true, streamName=?"; sqlPayload.add(curDJ); sqlPayload.add(curDJ); sqlPayload.add(streamName); sqlPayload.add(streamName); search = false; pstNeeded = true; executeQuery(); } } public void updateSongData(Song lastsong) throws SQLException{ if(!lastsong.isDurationLocked() || lastsong.getLastSongDuration() == 0) { resetSQL(); if (lastsong.getLastSongDuration() == lastsong.getSongDuration() && lastsong.getLastSongDuration() > 0) { query = "UPDATE songdb SET songduration=?, DurationLock=1 WHERE id=?"; log.debug("Setting \"" + lastsong.getSongName() + "\" duration lock to true"); } else { query = "UPDATE songdb SET songduration=? WHERE id=?"; } sqlPayload.add(String.valueOf(lastsong.getSongDuration())); sqlPayload.add(String.valueOf(lastsong.getSongID())); search = false; pstNeeded = true; executeQuery(); } } public void updateRadioDatabase(Song song) throws SQLException { if(rwEnabled) { resetSQL(); ArrayList<Object> returned; Long curTime; query = "SELECT ID FROM songdb WHERE SongTitle = ?"; pstNeeded = true; search = true; sqlPayload.add(song.getSongName()); returned = executeQuery(); resetSQL(); if (returned.size() > 0) { song.setSongID((int) returned.get(0)); returned.clear(); query = "SELECT * FROM songdb WHERE ID= ?"; sqlPayload.add(String.valueOf(song.getSongID())); search = true; pstNeeded = true; returned = executeQuery(); song.setFieldsFromSQL(returned); } else { returned.clear(); song.setSongID(0); returned.add(null); returned.add(null); returned.add((long) 0); returned.add(0); returned.add(0); returned.add(0); returned.add(false); song.setFieldsFromSQL(returned); } returned.clear(); if (song.getSongID() == 0) { //Adding song to DB and getting new ID for song resetSQL(); query = "INSERT INTO songdb (ID, SongTitle, LPTime, PlayCount, FavCount, SongDuration, DurationLock) VALUES (null, ?, ?, 1, 0, 0, false)"; sqlPayload.add(song.getSongName()); curTime = date.getTime(); sqlPayload.add(curTime.toString()); search = false; pstNeeded = true; executeQuery(); resetSQL(); query = "SELECT ID FROM songdb WHERE SongTitle = ?"; sqlPayload.add(song.getSongName()); search = true; pstNeeded = true; returned = executeQuery(); if (returned.size() > 0) { song.setSongID((int) returned.get(0)); } resetSQL(); } else { resetSQL(); //Update info for song query = "UPDATE songdb SET LPTime= ?, PlayCount=PlayCount+1 WHERE ID=?"; curTime = date.getTime(); sqlPayload.add(curTime.toString()); sqlPayload.add(String.valueOf(song.getSongID())); search = false; pstNeeded = true; executeQuery(); } log.info("Now playing: " + song.getSongName() + ":" + song.getSongID() + ":" + song.getPlayCount()); } } /* SQL OPERATIONS */ public ArrayList<Object> executeQuery() throws SQLException { String targetDB = sqldb; ArrayList<Object> result = new ArrayList<>(); if(overrideDB != null) targetDB = overrideDB; Connection run = DriverManager.getConnection("jdbc:mysql://" + sqlhost + ":" + sqlport + "/" + targetDB + "?useUnicode=true&characterEncoding=UTF-8", sqluser, sqlpass); PreparedStatement pst; ResultSet rs; pst = run.prepareStatement(query); if(pstNeeded){ for(int i=0; i<sqlPayload.size(); i++){ pst.setString(i+1, sqlPayload.get(i)); } } if(search){ rs = pst.executeQuery(); ResultSetMetaData md = rs.getMetaData(); int cCount = md.getColumnCount(); while(rs.next()){ for(int i=1; i<=cCount; i++){ result.add(rs.getObject(i)); } } } if(!search) pst.execute(); run.close(); return result; } }
Forgot how date worked and made it so every song had the same start time.
src/org/frostbite/karren/MySQLInterface.java
Forgot how date worked and made it so every song had the same start time.
<ide><path>rc/org/frostbite/karren/MySQLInterface.java <ide> private String overrideDB; <ide> private Logger log; <ide> private boolean rwEnabled; <del> private Date date = new Date(); <ide> private ArrayList<String> sqlPayload = new ArrayList<>(); <ide> /* <ide> CONSTRUCTORS <ide> song.setFieldsFromSQL(returned); <ide> } <ide> returned.clear(); <add> Date date = new Date(); <ide> if (song.getSongID() == 0) { <ide> //Adding song to DB and getting new ID for song <ide> resetSQL();
Java
apache-2.0
011d23111240068b7c6a08ee896ed51a3efa3c57
0
tailanx/test,tailanx/test
package com.yidejia.app.mall.fragment; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.nostra13.universalimageloader.core.DisplayImageOptions; import com.nostra13.universalimageloader.core.ImageLoader; import com.nostra13.universalimageloader.core.assist.ImageLoadingListener; import com.nostra13.universalimageloader.core.assist.SimpleImageLoadingListener; import com.nostra13.universalimageloader.core.display.FadeInBitmapDisplayer; import com.yidejia.app.mall.MyApplication; import com.yidejia.app.mall.R; import com.yidejia.app.mall.datamanage.VoucherDataManage; import com.yidejia.app.mall.model.Specials; import com.yidejia.app.mall.util.Consts; import com.yidejia.app.mall.view.CstmPayActivity; public class ExchangeAdapter extends BaseAdapter { private Activity activity; private ArrayList<Specials> mlist; private static HashMap<Integer, Boolean> isSelected;// =״̬ public static List<HashMap<String, Float>> mlist1; public static List<HashMap<String, Object>> mlist2; private LayoutInflater inflater; private VoucherDataManage voucherDataManage;// 积分的信息 private double userVoucher;// 用户积分 private MyApplication myApplication; private InnerReciver receiver; public ExchangeAdapter(ArrayList<Specials> mList, Activity context) { this.activity = context; this.mlist = mList; this.inflater = LayoutInflater.from(context); receiver = new InnerReciver(); IntentFilter filter = new IntentFilter(); filter.addAction(Consts.EXCHANG_FREE); context.registerReceiver(receiver, filter); voucherDataManage = new VoucherDataManage(context); myApplication = (MyApplication) activity.getApplication(); Log.e("voucher",CstmPayActivity.voucherString1+""); userVoucher = Double.parseDouble(CstmPayActivity.voucherString1);//用户的积分 // userVoucher = Double.parseDouble(voucherDataManage.getUserVoucher( // myApplication.getUserId(), myApplication.getToken())); options = new DisplayImageOptions.Builder() .showStubImage(R.drawable.image_bg) .showImageOnFail(R.drawable.image_bg) .showImageForEmptyUri(R.drawable.image_bg).cacheInMemory(true) .cacheOnDisc(true).build(); isSelected = new HashMap<Integer, Boolean>(); mlist1 = new ArrayList<HashMap<String, Float>>(); mlist2 = new ArrayList<HashMap<String, Object>>(); initData(); } public ExchangeAdapter() { // TODO Auto-generated constructor stub } public static HashMap<Integer, Boolean> getIsSelected() { return isSelected; } public static void setIsSelected(HashMap<Integer, Boolean> isSelected) { ExchangeAdapter.isSelected = isSelected; } /** * ��ʼ��checkbox��ѡ��״̬ */ private void initData() { for (int i = 0; i < mlist.size(); i++) { // Log.i("info", mlist.size() + "size"); getIsSelected().put(i, false); } } @Override public int getCount() { // TODO Auto-generated method stub return mlist.size(); } @Override public Specials getItem(int arg0) { // TODO Auto-generated method stub return mlist.get(arg0); } @Override public long getItemId(int arg0) { // TODO Auto-generated method stub return Long.parseLong(mlist.get(arg0).getUId()); } static final List<String> displayedImages = Collections .synchronizedList(new LinkedList<String>()); private static class AnimateFirstDisplayListener extends SimpleImageLoadingListener { @Override public void onLoadingComplete(String imageUri, View view, Bitmap loadedImage) { if (loadedImage != null) { ImageView imageView = (ImageView) view; boolean firstDisplay = !displayedImages.contains(imageUri); if (firstDisplay) { FadeInBitmapDisplayer.animate(imageView, 500); displayedImages.add(imageUri); } } } } /** * ������ͼ */ private ImageLoadingListener animateFirstListener = new AnimateFirstDisplayListener(); private DisplayImageOptions options; protected ImageLoader imageLoader = ImageLoader.getInstance();// ����ͼƬ int i = 0; private Handler handler; @Override public View getView(final int postion, View covertView, ViewGroup arg2) { // TODO Auto-generated method stub final HashMap<String, Float> map = new HashMap<String, Float>(); final HashMap<String, Object> map1 = new HashMap<String, Object>(); final ViewHolder holder; if (covertView == null) { covertView = inflater.inflate(R.layout.exchange_produce_item, null); holder = new ViewHolder(); holder.iv = (ImageView) covertView .findViewById(R.id.exchange_produce_item__imageview1); holder.tvContent = (TextView) covertView .findViewById(R.id.exchange_produce_item_text); holder.tvPrice = (TextView) covertView .findViewById(R.id.exchange_produce_item_money); holder.subtract = (ImageView) covertView .findViewById(R.id.exchange_produce_item_subtract); holder.add = (ImageView) covertView .findViewById(R.id.exchange_produce_item_add); holder.count = (TextView) covertView .findViewById(R.id.exchange_produce_item_edit_number); holder.cb = (CheckBox) covertView .findViewById(R.id.exchange_produce_item_checkbox); covertView.setTag(holder); } else { holder = (ViewHolder) covertView.getTag(); } handler = new Handler() { public void handleMessage(Message msg) { if (msg.what == 113) { map.put("count", Float.parseFloat(holder.count.getText().toString())); map1.put("count1", Float.parseFloat(holder.count.getText().toString())); } if(msg.what == 114){ Log.i("info", " sum1"); for(int j=0;j<mlist2.size();j++){ HashMap<String, Object> map = mlist2.get(j); map.put("isCheck1", 1); holder.cb.setChecked(false); } } }; }; Specials s = mlist.get(postion); imageLoader.displayImage(s.getImgUrl(), holder.iv, options, animateFirstListener); holder.tvContent.setText(s.getBrief()); holder.tvPrice.setText(s.getScores()); holder.cb.setChecked(getIsSelected().get(postion)); holder.count.setText(1 + ""); holder.cb.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { // TODO Auto-generated method stub if (isChecked) { isSelected.put(postion, true); map.put("isCheck", (float) 0); map1.put("isCheck1", (float) 0); // map.put("isCheck", (float) 0 ); } else { isSelected.put(postion, false); map.put("isCheck", (float) 1); map1.put("isCheck1", (float) 1); holder.cb.setChecked(false); map1.put("price", 0); } // Message ms = new Message(); // ms.what = 115; // handler.sendMessage(ms); } }); holder.add.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { int sum = Integer.parseInt(holder.count.getText().toString()); if (sum >= 9999) { Toast.makeText( activity, activity.getResources().getString( R.string.price_error), Toast.LENGTH_LONG) .show(); } else { sum++; holder.count.setText(sum + ""); } Message ms = new Message(); ms.what = 113; handler.sendMessage(ms); } }); holder.subtract.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub int sum = Integer.parseInt(holder.count.getText().toString()); if (sum <= 1) { Toast.makeText(activity, activity.getResources().getString(R.string.mix), Toast.LENGTH_LONG).show(); } else { sum--; holder.count.setText(sum + ""); } Message ms = new Message(); ms.what = 113; handler.sendMessage(ms); } }); map1.put("isCheck1", (float) (holder.cb.isChecked() == true ? 0 : 1)); map1.put("cart", s); map1.put("price1", Float.parseFloat(s.getScores())); map1.put("count1", Float.parseFloat(holder.count.getText().toString())); map.put("price", Float.parseFloat(s.getScores())); map.put("count", Float.parseFloat(holder.count.getText().toString())); map.put("isCheck", (float) (holder.cb.isChecked() == true ? 0 : 1)); i++; mlist1.add(map); mlist2.add(map1); return covertView; } static class ViewHolder { CheckBox cb; private ImageView iv; private TextView tvContent; private TextView tvPrice; private ImageView subtract; private ImageView add; private TextView count; } public class InnerReciver extends BroadcastReceiver{ @Override public void onReceive(Context context, Intent intent) { // TODO Auto-generated method stub String action = intent.getAction(); if(Consts.EXCHANG_FREE.equals(action)){ initData(); notifyDataSetChanged(); } } } }
src/com/yidejia/app/mall/fragment/ExchangeAdapter.java
package com.yidejia.app.mall.fragment; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import android.app.Activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.graphics.Bitmap; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.Button; import android.widget.CheckBox; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.ImageView; import android.widget.TextView; import android.widget.Toast; import com.nostra13.universalimageloader.core.DisplayImageOptions; import com.nostra13.universalimageloader.core.ImageLoader; import com.nostra13.universalimageloader.core.assist.ImageLoadingListener; import com.nostra13.universalimageloader.core.assist.SimpleImageLoadingListener; import com.nostra13.universalimageloader.core.display.FadeInBitmapDisplayer; import com.yidejia.app.mall.MyApplication; import com.yidejia.app.mall.R; import com.yidejia.app.mall.datamanage.VoucherDataManage; import com.yidejia.app.mall.model.Specials; import com.yidejia.app.mall.util.Consts; import com.yidejia.app.mall.view.CstmPayActivity; public class ExchangeAdapter extends BaseAdapter { private Activity activity; private ArrayList<Specials> mlist; private static HashMap<Integer, Boolean> isSelected;// =״̬ public static List<HashMap<String, Float>> mlist1; public static List<HashMap<String, Object>> mlist2; private LayoutInflater inflater; private VoucherDataManage voucherDataManage;// 积分的信息 private double userVoucher;// 用户积分 private MyApplication myApplication; private InnerReciver receiver; public ExchangeAdapter(ArrayList<Specials> mList, Activity context) { this.activity = context; this.mlist = mList; this.inflater = LayoutInflater.from(context); receiver = new InnerReciver(); IntentFilter filter = new IntentFilter(); filter.addAction(Consts.EXCHANG_FREE); context.registerReceiver(receiver, filter); voucherDataManage = new VoucherDataManage(context); myApplication = (MyApplication) activity.getApplication(); Log.e("voucher",CstmPayActivity.voucherString1+""); userVoucher = Double.parseDouble(CstmPayActivity.voucherString1);//用户的积分 // userVoucher = Double.parseDouble(voucherDataManage.getUserVoucher( // myApplication.getUserId(), myApplication.getToken())); options = new DisplayImageOptions.Builder() .showStubImage(R.drawable.image_bg) .showImageOnFail(R.drawable.image_bg) .showImageForEmptyUri(R.drawable.image_bg).cacheInMemory(true) .cacheOnDisc(true).build(); isSelected = new HashMap<Integer, Boolean>(); mlist1 = new ArrayList<HashMap<String, Float>>(); mlist2 = new ArrayList<HashMap<String, Object>>(); initData(); } public ExchangeAdapter() { // TODO Auto-generated constructor stub } public static HashMap<Integer, Boolean> getIsSelected() { return isSelected; } public static void setIsSelected(HashMap<Integer, Boolean> isSelected) { ExchangeAdapter.isSelected = isSelected; } /** * ��ʼ��checkbox��ѡ��״̬ */ private void initData() { for (int i = 0; i < mlist.size(); i++) { // Log.i("info", mlist.size() + "size"); getIsSelected().put(i, false); } } @Override public int getCount() { // TODO Auto-generated method stub return mlist.size(); } @Override public Specials getItem(int arg0) { // TODO Auto-generated method stub return mlist.get(arg0); } @Override public long getItemId(int arg0) { // TODO Auto-generated method stub return Long.parseLong(mlist.get(arg0).getUId()); } static final List<String> displayedImages = Collections .synchronizedList(new LinkedList<String>()); private static class AnimateFirstDisplayListener extends SimpleImageLoadingListener { @Override public void onLoadingComplete(String imageUri, View view, Bitmap loadedImage) { if (loadedImage != null) { ImageView imageView = (ImageView) view; boolean firstDisplay = !displayedImages.contains(imageUri); if (firstDisplay) { FadeInBitmapDisplayer.animate(imageView, 500); displayedImages.add(imageUri); } } } } /** * ������ͼ */ private ImageLoadingListener animateFirstListener = new AnimateFirstDisplayListener(); private DisplayImageOptions options; protected ImageLoader imageLoader = ImageLoader.getInstance();// ����ͼƬ int i = 0; private Handler handler; @Override public View getView(final int postion, View covertView, ViewGroup arg2) { // TODO Auto-generated method stub final HashMap<String, Float> map = new HashMap<String, Float>(); final HashMap<String, Object> map1 = new HashMap<String, Object>(); final ViewHolder holder; if (covertView == null) { covertView = inflater.inflate(R.layout.exchange_produce_item, null); holder = new ViewHolder(); holder.iv = (ImageView) covertView .findViewById(R.id.exchange_produce_item__imageview1); holder.tvContent = (TextView) covertView .findViewById(R.id.exchange_produce_item_text); holder.tvPrice = (TextView) covertView .findViewById(R.id.exchange_produce_item_money); holder.subtract = (ImageView) covertView .findViewById(R.id.exchange_produce_item_subtract); holder.add = (ImageView) covertView .findViewById(R.id.exchange_produce_item_add); holder.count = (TextView) covertView .findViewById(R.id.exchange_produce_item_edit_number); holder.cb = (CheckBox) covertView .findViewById(R.id.exchange_produce_item_checkbox); covertView.setTag(holder); } else { holder = (ViewHolder) covertView.getTag(); } final Handler handler = new Handler() { public void handleMessage(Message msg) { if (msg.what == 113) { map.put("count", Float.parseFloat(holder.count.getText().toString())); map1.put("count1", Float.parseFloat(holder.count.getText().toString())); } if(msg.what == 114){ Log.i("info", " sum1"); holder.cb.setChecked(false); } }; }; Specials s = mlist.get(postion); imageLoader.displayImage(s.getImgUrl(), holder.iv, options, animateFirstListener); holder.tvContent.setText(s.getBrief()); holder.tvPrice.setText(s.getScores()); holder.cb.setChecked(getIsSelected().get(postion)); holder.count.setText(1 + ""); holder.cb.setOnCheckedChangeListener(new OnCheckedChangeListener() { @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { // TODO Auto-generated method stub if (isChecked) { isSelected.put(postion, true); map.put("isCheck", (float) 0); map1.put("isCheck1", (float) 0); // map.put("isCheck", (float) 0 ); } else { isSelected.put(postion, false); map.put("isCheck", (float) 1); map1.put("isCheck1", (float) 1); holder.cb.setChecked(false); map1.put("price", 0); } // Message ms = new Message(); // ms.what = 115; // handler.sendMessage(ms); } }); holder.add.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { int sum = Integer.parseInt(holder.count.getText().toString()); if (sum >= 9999) { Toast.makeText( activity, activity.getResources().getString( R.string.price_error), Toast.LENGTH_LONG) .show(); } else { sum++; holder.count.setText(sum + ""); } Message ms = new Message(); ms.what = 113; handler.sendMessage(ms); } }); holder.subtract.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { // TODO Auto-generated method stub int sum = Integer.parseInt(holder.count.getText().toString()); if (sum <= 1) { Toast.makeText(activity, activity.getResources().getString(R.string.mix), Toast.LENGTH_LONG).show(); } else { sum--; holder.count.setText(sum + ""); } Message ms = new Message(); ms.what = 113; handler.sendMessage(ms); } }); map1.put("isCheck1", (float) (holder.cb.isChecked() == true ? 0 : 1)); map1.put("cart", s); map1.put("price1", Float.parseFloat(s.getScores())); map1.put("count1", Float.parseFloat(holder.count.getText().toString())); map.put("price", Float.parseFloat(s.getScores())); map.put("count", Float.parseFloat(holder.count.getText().toString())); map.put("isCheck", (float) (holder.cb.isChecked() == true ? 0 : 1)); i++; mlist1.add(map); mlist2.add(map1); return covertView; } static class ViewHolder { CheckBox cb; private ImageView iv; private TextView tvContent; private TextView tvPrice; private ImageView subtract; private ImageView add; private TextView count; } public class InnerReciver extends BroadcastReceiver{ @Override public void onReceive(Context context, Intent intent) { // TODO Auto-generated method stub String action = intent.getAction(); if(Consts.EXCHANG_FREE.equals(action)){ // Log.i("info", action+" sum1"); Message ms = new Message(); ms.what = 114; handler.sendMessage(ms); } } } }
修改积分不够时,界面的选择情况
src/com/yidejia/app/mall/fragment/ExchangeAdapter.java
修改积分不够时,界面的选择情况
<ide><path>rc/com/yidejia/app/mall/fragment/ExchangeAdapter.java <ide> import android.content.IntentFilter; <ide> import android.graphics.Bitmap; <ide> import android.os.Handler; <add>import android.os.Looper; <ide> import android.os.Message; <ide> import android.util.Log; <ide> import android.view.LayoutInflater; <ide> // TODO Auto-generated method stub <ide> final HashMap<String, Float> map = new HashMap<String, Float>(); <ide> final HashMap<String, Object> map1 = new HashMap<String, Object>(); <del> final ViewHolder holder; <add> final ViewHolder holder; <ide> if (covertView == null) { <ide> covertView = inflater.inflate(R.layout.exchange_produce_item, null); <ide> holder = new ViewHolder(); <ide> } else { <ide> holder = (ViewHolder) covertView.getTag(); <ide> } <del> final Handler handler = new Handler() { <add> handler = new Handler() { <ide> public void handleMessage(Message msg) { <ide> if (msg.what == 113) { <ide> map.put("count", <ide> } <ide> if(msg.what == 114){ <ide> Log.i("info", " sum1"); <del> holder.cb.setChecked(false); <add> for(int j=0;j<mlist2.size();j++){ <add> HashMap<String, Object> map = mlist2.get(j); <add> map.put("isCheck1", 1); <add> holder.cb.setChecked(false); <add> } <ide> } <ide> <ide> }; <ide> // TODO Auto-generated method stub <ide> String action = intent.getAction(); <ide> if(Consts.EXCHANG_FREE.equals(action)){ <del>// Log.i("info", action+" sum1"); <del> Message ms = new Message(); <del> ms.what = 114; <del> handler.sendMessage(ms); <add> initData(); <add> notifyDataSetChanged(); <ide> } <ide> } <ide>
Java
agpl-3.0
4f8845b5a2fcb64b5b14abc7ab9f93bc63c1ee60
0
AlienQueen/HatchetHarry,AlienQueen/HatchetHarry,AlienQueen/HatchetHarry
package org.alienlabs.hatchetharry.view.component.card; import java.io.IOException; import java.util.HashMap; import org.alienlabs.hatchetharry.model.MagicCard; import org.alienlabs.hatchetharry.view.page.HomePage; import org.apache.wicket.Component; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.JavaScriptHeaderItem; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.Model; import org.apache.wicket.util.template.PackageTextTemplate; import org.apache.wicket.util.template.TextTemplate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = "SE_INNER_CLASS", justification = "In Wicket, serializable inner classes are common. And as the parent Page is serialized as well, this is no concern. This is no bad practice in Wicket.") public class CardInBattlefieldContextMenu extends Panel { private static final long serialVersionUID = 1L; static final Logger LOGGER = LoggerFactory.getLogger(CardInBattlefieldContextMenu.class); public CardInBattlefieldContextMenu(final String id, final Model<MagicCard> mc) { super(id, mc); final String uuidAsString = mc.getObject().getUuidObject().toString().replaceAll("-", "_"); final WebMarkupContainer cardInBattlefieldContextMenu = new WebMarkupContainer( "cardInBattlefieldContextMenu"); cardInBattlefieldContextMenu.setOutputMarkupId(true).setMarkupId( "cardInBattlefieldContextMenu" + uuidAsString); this.add(cardInBattlefieldContextMenu); final WebMarkupContainer putToHand = new WebMarkupContainer("putToHand"); putToHand.setOutputMarkupId(true).setMarkupId("putToHand" + uuidAsString); final WebMarkupContainer putToGraveyard = new WebMarkupContainer("putToGraveyard"); putToGraveyard.setOutputMarkupId(true).setMarkupId("putToGraveyard" + uuidAsString); final WebMarkupContainer putToExile = new WebMarkupContainer("putToExile"); putToExile.setOutputMarkupId(true).setMarkupId("putToExile" + uuidAsString); final WebMarkupContainer destroyToken = new WebMarkupContainer("destroyToken"); destroyToken.setOutputMarkupId(true).setMarkupId("destroyToken" + uuidAsString); cardInBattlefieldContextMenu.add(putToHand, putToGraveyard, putToExile, destroyToken); if (mc.getObject().getToken() != null) { putToHand.setVisible(false); putToGraveyard.setVisible(false); putToExile.setVisible(false); } else { destroyToken.setVisible(false); } this.add(new CardInBattlefieldContextMenuHeaderBehavior(uuidAsString)); } static class CardInBattlefieldContextMenuHeaderBehavior extends Behavior { private static final long serialVersionUID = 1L; private final String uuidAsString; public CardInBattlefieldContextMenuHeaderBehavior(String _uuidAsString) { this.uuidAsString = _uuidAsString; } @Override public void renderHead(final Component component, final IHeaderResponse response) { super.renderHead(component, response); final HashMap<String, Object> variables = new HashMap<String, Object>(); variables.put("uuidValidForJs", this.uuidAsString); final TextTemplate template = new PackageTextTemplate(HomePage.class, "script/contextmenu/cardInBattlefieldContextMenu.js"); template.interpolate(variables); response.render(JavaScriptHeaderItem.forScript(template.asString(), null)); try { template.close(); } catch (final IOException e) { CardInBattlefieldContextMenu.LOGGER .error("unable to close template in CardInBattlefieldContextMenu.CardInBattlefieldContextMenuHeaderBehavior#renderHead()!", e); } } } }
src/main/java/org/alienlabs/hatchetharry/view/component/card/CardInBattlefieldContextMenu.java
package org.alienlabs.hatchetharry.view.component.card; import java.io.IOException; import java.util.HashMap; import org.alienlabs.hatchetharry.model.MagicCard; import org.alienlabs.hatchetharry.view.page.HomePage; import org.apache.wicket.Component; import org.apache.wicket.behavior.Behavior; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.JavaScriptHeaderItem; import org.apache.wicket.markup.html.WebMarkupContainer; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.Model; import org.apache.wicket.util.template.PackageTextTemplate; import org.apache.wicket.util.template.TextTemplate; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = { "SE_INNER_CLASS", " SIC_INNER_SHOULD_BE_STATIC_ANON"}, justification = "1) In Wicket, serializable inner classes are common. And as the parent Page is serialized as well, this is no concern. This is no bad practice in Wicket. 2) Such inner class is common Wicket idiom.") public class CardInBattlefieldContextMenu extends Panel { private static final long serialVersionUID = 1L; static final Logger LOGGER = LoggerFactory.getLogger(CardInBattlefieldContextMenu.class); public CardInBattlefieldContextMenu(final String id, final Model<MagicCard> mc) { super(id, mc); final String uuidAsString = mc.getObject().getUuidObject().toString().replaceAll("-", "_"); final WebMarkupContainer cardInBattlefieldContextMenu = new WebMarkupContainer( "cardInBattlefieldContextMenu"); cardInBattlefieldContextMenu.setOutputMarkupId(true).setMarkupId( "cardInBattlefieldContextMenu" + uuidAsString); this.add(cardInBattlefieldContextMenu); final WebMarkupContainer putToHand = new WebMarkupContainer("putToHand"); putToHand.setOutputMarkupId(true).setMarkupId("putToHand" + uuidAsString); final WebMarkupContainer putToGraveyard = new WebMarkupContainer("putToGraveyard"); putToGraveyard.setOutputMarkupId(true).setMarkupId("putToGraveyard" + uuidAsString); final WebMarkupContainer putToExile = new WebMarkupContainer("putToExile"); putToExile.setOutputMarkupId(true).setMarkupId("putToExile" + uuidAsString); final WebMarkupContainer destroyToken = new WebMarkupContainer("destroyToken"); destroyToken.setOutputMarkupId(true).setMarkupId("destroyToken" + uuidAsString); cardInBattlefieldContextMenu.add(putToHand, putToGraveyard, putToExile, destroyToken); if (mc.getObject().getToken() != null) { putToHand.setVisible(false); putToGraveyard.setVisible(false); putToExile.setVisible(false); } else { destroyToken.setVisible(false); } this.add(new Behavior() { private static final long serialVersionUID = 1L; @Override public void renderHead(final Component component, final IHeaderResponse response) { super.renderHead(component, response); final HashMap<String, Object> variables = new HashMap<String, Object>(); variables.put("uuidValidForJs", uuidAsString); final TextTemplate template = new PackageTextTemplate(HomePage.class, "script/contextmenu/cardInBattlefieldContextMenu.js"); template.interpolate(variables); response.render(JavaScriptHeaderItem.forScript(template.asString(), null)); try { template.close(); } catch (final IOException e) { CardInBattlefieldContextMenu.LOGGER .error("unable to close template in CardInBattlefieldContextMenu#renderHead()!", e); } } }); } }
FindBugs
src/main/java/org/alienlabs/hatchetharry/view/component/card/CardInBattlefieldContextMenu.java
FindBugs
<ide><path>rc/main/java/org/alienlabs/hatchetharry/view/component/card/CardInBattlefieldContextMenu.java <ide> import org.slf4j.Logger; <ide> import org.slf4j.LoggerFactory; <ide> <del>@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = { "SE_INNER_CLASS", " SIC_INNER_SHOULD_BE_STATIC_ANON"}, justification = "1) In Wicket, serializable inner classes are common. And as the parent Page is serialized as well, this is no concern. This is no bad practice in Wicket. 2) Such inner class is common Wicket idiom.") <del>public class CardInBattlefieldContextMenu extends Panel <del>{ <add>@edu.umd.cs.findbugs.annotations.SuppressFBWarnings(value = "SE_INNER_CLASS", justification = "In Wicket, serializable inner classes are common. And as the parent Page is serialized as well, this is no concern. This is no bad practice in Wicket.") <add>public class CardInBattlefieldContextMenu extends Panel { <ide> private static final long serialVersionUID = 1L; <ide> static final Logger LOGGER = LoggerFactory.getLogger(CardInBattlefieldContextMenu.class); <ide> <del> public CardInBattlefieldContextMenu(final String id, final Model<MagicCard> mc) <del> { <add> public CardInBattlefieldContextMenu(final String id, final Model<MagicCard> mc) { <ide> super(id, mc); <ide> final String uuidAsString = mc.getObject().getUuidObject().toString().replaceAll("-", "_"); <ide> <ide> <ide> cardInBattlefieldContextMenu.add(putToHand, putToGraveyard, putToExile, destroyToken); <ide> <del> if (mc.getObject().getToken() != null) <del> { <add> if (mc.getObject().getToken() != null) { <ide> putToHand.setVisible(false); <ide> putToGraveyard.setVisible(false); <ide> putToExile.setVisible(false); <del> } <del> else <del> { <add> } else { <ide> destroyToken.setVisible(false); <ide> } <ide> <del> this.add(new Behavior() <add> this.add(new CardInBattlefieldContextMenuHeaderBehavior(uuidAsString)); <add> } <add> <add> static class CardInBattlefieldContextMenuHeaderBehavior extends Behavior { <add> private static final long serialVersionUID = 1L; <add> private final String uuidAsString; <add> <add> public CardInBattlefieldContextMenuHeaderBehavior(String _uuidAsString) <ide> { <del> private static final long serialVersionUID = 1L; <add> this.uuidAsString = _uuidAsString; <add> } <ide> <del> @Override <del> public void renderHead(final Component component, final IHeaderResponse response) <add> @Override <add> public void renderHead(final Component component, final IHeaderResponse response) <add> { <add> super.renderHead(component, response); <add> <add> final HashMap<String, Object> variables = new HashMap<String, Object>(); <add> variables.put("uuidValidForJs", this.uuidAsString); <add> <add> final TextTemplate template = new PackageTextTemplate(HomePage.class, <add> "script/contextmenu/cardInBattlefieldContextMenu.js"); <add> template.interpolate(variables); <add> <add> response.render(JavaScriptHeaderItem.forScript(template.asString(), null)); <add> try <ide> { <del> super.renderHead(component, response); <del> <del> final HashMap<String, Object> variables = new HashMap<String, Object>(); <del> variables.put("uuidValidForJs", uuidAsString); <del> <del> final TextTemplate template = new PackageTextTemplate(HomePage.class, <del> "script/contextmenu/cardInBattlefieldContextMenu.js"); <del> template.interpolate(variables); <del> <del> response.render(JavaScriptHeaderItem.forScript(template.asString(), null)); <del> try <del> { <del> template.close(); <del> } <del> catch (final IOException e) <del> { <del> CardInBattlefieldContextMenu.LOGGER <del> .error("unable to close template in CardInBattlefieldContextMenu#renderHead()!", <del> e); <del> } <add> template.close(); <add> } catch (final IOException e) <add> { <add> CardInBattlefieldContextMenu.LOGGER <add> .error("unable to close template in CardInBattlefieldContextMenu.CardInBattlefieldContextMenuHeaderBehavior#renderHead()!", <add> e); <ide> } <del> }); <add> } <ide> } <ide> <ide> }
JavaScript
lgpl-2.1
290c71bb7294380340415f6f61086c86745319bf
0
FernCreek/tinymce,tinymce/tinymce,tinymce/tinymce,tinymce/tinymce,TeamupCom/tinymce,FernCreek/tinymce,FernCreek/tinymce,TeamupCom/tinymce
/*eslint-env node */ let zipUtils = require('./tools/modules/zip-helper'); let gruntUtils = require('./tools/modules/grunt-utils'); let gruntWebPack = require('./tools/modules/grunt-webpack'); let swag = require('@ephox/swag'); let path = require('path'); let plugins = [ 'advlist', 'anchor', 'autolink', 'autoresize', 'autosave', 'bbcode', 'charmap', 'code', 'codesample', 'colorpicker', 'contextmenu', 'directionality', 'emoticons', 'help', 'fullpage', 'fullscreen', 'hr', 'image', 'imagetools', 'importcss', 'insertdatetime', 'legacyoutput', 'link', 'lists', 'media', 'nonbreaking', 'noneditable', 'pagebreak', 'paste', 'preview', 'print', 'save', 'searchreplace', 'spellchecker', 'tabfocus', 'table', 'template', 'textcolor', 'textpattern', 'toc', 'visualblocks', 'visualchars', 'wordcount', 'quickbars', ]; let themes = [ 'mobile', // 'modern', 'mobile', 'inlite', 'silver' 'silver' ]; module.exports = function (grunt) { var packageData = grunt.file.readJSON('package.json'); var changelogLine = grunt.file.read('changelog.txt').toString().split('\n')[0]; var BUILD_VERSION = packageData.version + (process.env.BUILD_NUMBER ? '-' + process.env.BUILD_NUMBER : ''); packageData.date = /^Version [^\(]+\(([^\)]+)\)/.exec(changelogLine)[1]; grunt.initConfig({ pkg: packageData, shell: { tsc: { command: 'node ./node_modules/typescript/bin/tsc' } }, tslint: { options: { configuration: 'tslint.json' }, files: { src: [ 'src/**/*.ts' ] } }, globals: { options: { configFile: 'src/core/main/json/globals.json', outputDir: 'lib/globals', templateFile: 'src/core/main/js/GlobalsTemplate.js' } }, rollup: Object.assign( { core: { options: { treeshake: true, name: 'tinymce', format: 'iife', banner: '(function () {', footer: '})();', onwarn: swag.onwarn, plugins: [ swag.nodeResolve({ basedir: __dirname, prefixes: { 'tinymce/core': 'lib/core/main/ts' } }), swag.remapImports() ] }, files:[ { src: 'lib/core/main/ts/api/Main.js', dest: 'js/tinymce/tinymce.js' } ] } }, gruntUtils.generate(plugins, 'plugin', (name) => { return { options: { treeshake: true, name: name, format: 'iife', banner: '(function () {', footer: '})();', onwarn: swag.onwarn, plugins: [ swag.nodeResolve({ basedir: __dirname, prefixes: gruntUtils.prefixes({ 'tinymce/core': 'lib/globals/tinymce/core' }, [ [`tinymce/plugins/${name}`, `lib/plugins/${name}/main/ts`] ]), mappers: [ swag.mappers.replaceDir('./lib/core/main/ts/api', './lib/globals/tinymce/core/api'), swag.mappers.invalidDir('./lib/core/main/ts') ] }), swag.remapImports() ] }, files:[ { src: `lib/plugins/${name}/main/ts/Plugin.js`, dest: `js/tinymce/plugins/${name}/plugin.js` } ] }; }), gruntUtils.generate(themes, 'theme', (name) => { return { options: { treeshake: true, name: name, format: 'iife', banner: '(function () {', footer: '})();', onwarn: swag.onwarn, plugins: [ swag.nodeResolve({ basedir: __dirname, prefixes: gruntUtils.prefixes({ 'tinymce/core': 'lib/globals/tinymce/core', 'tinymce/ui': 'lib/ui/main/ts' }, [ [`tinymce/themes/${name}`, `lib/themes/${name}/main/ts`] ]), mappers: [ swag.mappers.replaceDir('./lib/core/main/ts/api', './lib/globals/tinymce/core/api'), swag.mappers.invalidDir('./lib/core/main/ts') ] }), swag.remapImports() ] }, files:[ { src: `lib/themes/${name}/main/ts/Theme.js`, dest: `js/tinymce/themes/${name}/theme.js` } ] }; }) ), unicode: { 'emoticons-plugin': { files: [ { src: 'src/plugins/emoticons/main/js/*.js', dest: 'js/tinymce/plugins/emoticons/js/emojis.js' } ] } }, uglify: Object.assign( { options: { output: { ascii_only: true }, ie8: true }, core: { files: [ { src: 'js/tinymce/tinymce.js', dest: 'js/tinymce/tinymce.min.js' }, { src: 'src/core/main/js/JqueryIntegration.js', dest: 'js/tinymce/jquery.tinymce.min.js' } ] } }, gruntUtils.generate(plugins, 'plugin', (name) => { var pluginExtras = { emoticons: [ { src: 'js/tinymce/plugins/emoticons/js/emojis.js', dest: 'js/tinymce/plugins/emoticons/js/emojis.min.js' } ] }; return { files: [ { src: `js/tinymce/plugins/${name}/plugin.js`, dest: `js/tinymce/plugins/${name}/plugin.min.js` } ].concat(pluginExtras.hasOwnProperty(name) ? pluginExtras[name] : []) }; }), gruntUtils.generate(themes, 'theme', (name) => { return { files: [ { src: `js/tinymce/themes/${name}/theme.js`, dest: `js/tinymce/themes/${name}/theme.min.js` } ] }; }) ), webpack: Object.assign( {core: () => gruntWebPack.create('src/core/demo/ts/demo/Demos.ts', 'tsconfig.json', 'scratch/demos/core', 'demo.js')}, {plugins: () => gruntWebPack.allPluginDemos(plugins)}, {themes: () => { gruntWebPack.allThemeDemos(themes); gruntWebPack.allComponentDemos(themes); }}, gruntUtils.generate(plugins, 'plugin', (name) => () => gruntWebPack.createPlugin(name) ), gruntUtils.generate(themes, 'theme', (name) => () => gruntWebPack.createTheme(name) ) ), 'webpack-dev-server': { options: { webpack: gruntWebPack.all(plugins, themes), publicPath: '/', inline: false, port: grunt.option('webpack-port') !== undefined ? grunt.option('webpack-port') : 3000, host: '0.0.0.0', disableHostCheck: true, before: app => gruntWebPack.generateDemoIndex(grunt, app, plugins, themes) }, start: { } }, less: { mobile: { options: { plugins : [ new (require('less-plugin-autoprefix'))({ browsers : [ 'last 2 versions', /* for phantom */'safari >= 4' ] }) ], compress: true, yuicompress: true, sourceMap: true, sourceMapRootpath: '.', optimization: 2 }, files: { 'js/tinymce/skins/ui/oxide/skin.mobile.min.css': 'src/skins/oxide/main/less/mobile/app/mobile-less.less' } }, 'content-mobile': { options: { cleancss: true, strictImports: true, compress: true }, files: { 'js/tinymce/skins/ui/oxide/content.mobile.min.css': 'src/skins/oxide/main/less/mobile/content.less' } } }, copy: { core: { options: { process: function (content) { return content. replace('@@majorVersion@@', packageData.version.split('.')[0]). replace('@@minorVersion@@', packageData.version.split('.').slice(1).join('.')). replace('@@releaseDate@@', packageData.date); } }, files: [ { src: 'js/tinymce/tinymce.js', dest: 'js/tinymce/tinymce.js' }, { src: 'js/tinymce/tinymce.min.js', dest: 'js/tinymce/tinymce.min.js' }, { src: 'src/core/main/text/readme_lang.md', dest: 'js/tinymce/langs/readme.md' }, { src: 'LICENSE.TXT', dest: 'js/tinymce/license.txt' } ] }, 'ui-skins': { files: [ { expand: true, flatten: true, cwd: 'src/themes/mobile/main/fonts', src: [ '**', '!*.json', '!*.md' ], dest: 'js/tinymce/skins/ui/oxide/fonts' }, { expand: true, flatten: true, cwd: 'src/skins/oxide/main/img', src: '**', dest: 'js/tinymce/skins/ui/oxide/img' }, { expand: true, flatten: true, cwd: 'node_modules/@ephox/oxide/build/skins/oxide-default', src: [ '*.min.css', '*.min.css.map' ], dest: 'js/tinymce/skins/ui/oxide' }, { expand: true, flatten: true, cwd: 'src/skins/oxide/main/img', src: '**', dest: 'js/tinymce/skins/ui/oxide-dark/img' }, { expand: true, cwd: 'node_modules/@ephox/oxide/build/skins/oxide-dark', src: [ '*.min.css', '*.min.css.map' ], dest: 'js/tinymce/skins/ui/oxide-dark' } ] }, 'content-skins': { files: [ { expand: true, cwd: 'node_modules/@ephox/oxide/build/skins/content', src: '**', dest: 'js/tinymce/skins/content' }, ] }, 'visualblocks-plugin': { files: [ { src: 'src/plugins/visualblocks/main/css/visualblocks.css', dest: 'js/tinymce/plugins/visualblocks/css/visualblocks.css' } ] } }, moxiezip: { production: { options: { baseDir: 'tinymce', excludes: [ 'js/**/plugin.js', 'js/**/theme.js', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js', 'js/tinymce/plugins/moxiemanager', 'js/tinymce/plugins/visualblocks/img', 'js/tinymce/skins/*/fonts/*.json', 'js/tinymce/skins/*/fonts/readme.md', 'readme.md' ], to: 'tmp/tinymce_<%= pkg.version %>.zip' }, src: [ 'js/tinymce/langs', 'js/tinymce/plugins', 'js/tinymce/skins', 'js/tinymce/themes', 'js/tinymce/tinymce.min.js', 'js/tinymce/jquery.tinymce.min.js', 'js/tinymce/license.txt', 'changelog.txt', 'LICENSE.TXT', 'readme.md' ] }, development: { options: { baseDir: 'tinymce', excludes: [ 'src/**/dist', 'src/**/scratch', 'src/**/lib', 'src/**/dependency', 'js/tinymce/tinymce.full.min.js', 'js/tests/.jshintrc' ], to: 'tmp/tinymce_<%= pkg.version %>_dev.zip' }, src: [ 'config', 'src', 'js', 'tests', 'tools', 'changelog.txt', 'LICENSE.TXT', 'Gruntfile.js', 'readme.md', 'package.json', '.eslintrc', '.jscsrc', '.jshintrc' ] }, cdn: { options: { onBeforeSave: function (zip) { zip.addData('dist/version.txt', packageData.version); }, pathFilter: function (zipFilePath) { return zipFilePath.replace('js/tinymce/', 'dist/'); }, excludes: [ 'js/**/config', 'js/**/scratch', 'js/**/classes', 'js/**/lib', 'js/**/dependency', 'js/**/src', 'js/**/*.less', 'js/**/*.dev.js', 'js/**/*.dev.svg', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js', 'js/tinymce/plugins/moxiemanager', 'js/tinymce/plugins/visualblocks/img', 'js/tinymce/skins/*/fonts/*.json', 'js/tinymce/skins/*/fonts/*.dev.svg', 'js/tinymce/skins/*/fonts/readme.md', 'readme.md', 'js/tests/.jshintrc' ], concat: [ { src: [ 'js/tinymce/tinymce.min.js', 'js/tinymce/themes/*/theme.min.js', 'js/tinymce/plugins/*/plugin.min.js', '!js/tinymce/plugins/example/plugin.min.js', '!js/tinymce/plugins/example_dependency/plugin.min.js' ], dest: [ 'js/tinymce/tinymce.min.js' ] } ], to: 'tmp/tinymce_<%= pkg.version %>_cdn.zip' }, src: [ 'js/tinymce/jquery.tinymce.min.js', 'js/tinymce/tinymce.js', 'js/tinymce/langs', 'js/tinymce/plugins', 'js/tinymce/skins', 'js/tinymce/themes', 'js/tinymce/license.txt' ] }, component: { options: { excludes: [ 'js/**/config', 'js/**/scratch', 'js/**/classes', 'js/**/lib', 'js/**/dependency', 'js/**/src', 'js/**/*.less', 'js/**/*.dev.svg', 'js/**/*.dev.js', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js', 'js/tinymce/plugins/moxiemanager', 'js/tinymce/plugins/example', 'js/tinymce/plugins/example_dependency', 'js/tinymce/plugins/visualblocks/img', 'js/tinymce/skins/*/fonts/*.json', 'js/tinymce/skins/*/fonts/readme.md' ], pathFilter: function (zipFilePath) { if (zipFilePath.indexOf('js/tinymce/') === 0) { return zipFilePath.substr('js/tinymce/'.length); } return zipFilePath; }, onBeforeSave: function (zip) { function jsonToBuffer(json) { return new Buffer(JSON.stringify(json, null, '\t')); } zip.addData('bower.json', jsonToBuffer({ 'name': 'tinymce', 'description': 'Web based JavaScript HTML WYSIWYG editor control.', 'license': 'LGPL-2.1', 'keywords': ['editor', 'wysiwyg', 'tinymce', 'richtext', 'javascript', 'html'], 'homepage': 'http://www.tinymce.com', 'ignore': ['readme.md', 'composer.json', 'package.json', '.npmignore', 'changelog.txt'] })); zip.addData('package.json', jsonToBuffer({ 'name': 'tinymce', 'version': packageData.version, 'repository': { 'type': 'git', 'url': 'https://github.com/tinymce/tinymce-dist.git' }, 'description': 'Web based JavaScript HTML WYSIWYG editor control.', 'author': 'Ephox Corporation', 'main': 'tinymce.js', 'license': 'LGPL-2.1', 'keywords': ['editor', 'wysiwyg', 'tinymce', 'richtext', 'javascript', 'html'], 'bugs': { 'url': 'https://github.com/tinymce/tinymce/issues' } })); zip.addData('composer.json', jsonToBuffer({ 'name': 'tinymce/tinymce', 'version': packageData.version, 'description': 'Web based JavaScript HTML WYSIWYG editor control.', 'license': ['LGPL-2.1-only'], 'keywords': ['editor', 'wysiwyg', 'tinymce', 'richtext', 'javascript', 'html'], 'homepage': 'http://www.tinymce.com', 'type': 'component', 'extra': { 'component': { 'scripts': [ 'tinymce.js', 'plugins/*/plugin.js', 'themes/*/theme.js' ], 'files': [ 'tinymce.min.js', 'plugins/*/plugin.min.js', 'themes/*/theme.min.js', 'skins/**' ] } }, 'archive': { 'exclude': ['readme.md', 'bower.js', 'package.json', '.npmignore', 'changelog.txt'] } })); zip.addFile( 'jquery.tinymce.js', 'js/tinymce/jquery.tinymce.min.js' ); var getDirs = zipUtils.getDirectories(grunt, this.excludes); zipUtils.addIndexFiles( zip, getDirs('js/tinymce/plugins'), zipUtils.generateIndex('plugins', 'plugin') ); zipUtils.addIndexFiles( zip, getDirs('js/tinymce/themes'), zipUtils.generateIndex('themes', 'theme') ); }, to: 'tmp/tinymce_<%= pkg.version %>_component.zip' }, src: [ 'js/tinymce/skins', 'js/tinymce/plugins', 'js/tinymce/themes', 'js/tinymce/tinymce.js', 'js/tinymce/tinymce.min.js', 'js/tinymce/jquery.tinymce.min.js', 'js/tinymce/license.txt', 'changelog.txt', 'readme.md' ] } }, nugetpack: { main: { options: { id: 'TinyMCE', version: packageData.version, authors: 'Ephox Corp', owners: 'Ephox Corp', description: 'The best WYSIWYG editor! TinyMCE is a platform independent web based Javascript HTML WYSIWYG editor ' + 'control released as Open Source under LGPL by Ephox Corp. TinyMCE has the ability to convert HTML ' + 'TEXTAREA fields or other HTML elements to editor instances. TinyMCE is very easy to integrate ' + 'into other Content Management Systems.', releaseNotes: 'Release notes for my package.', summary: 'TinyMCE is a platform independent web based Javascript HTML WYSIWYG editor ' + 'control released as Open Source under LGPL by Ephox Corp.', projectUrl: 'http://www.tinymce.com/', iconUrl: 'http://www.tinymce.com/favicon.ico', licenseUrl: 'http://www.tinymce.com/license', requireLicenseAcceptance: true, tags: 'Editor TinyMCE HTML HTMLEditor', excludes: [ 'js/**/config', 'js/**/scratch', 'js/**/classes', 'js/**/lib', 'js/**/dependency', 'js/**/src', 'js/**/*.less', 'js/**/*.dev.svg', 'js/**/*.dev.js', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js' ], outputDir: 'tmp' }, files: [ { src: 'js/tinymce/langs', dest: '/content/scripts/tinymce/langs' }, { src: 'js/tinymce/plugins', dest: '/content/scripts/tinymce/plugins' }, { src: 'js/tinymce/themes', dest: '/content/scripts/tinymce/themes' }, { src: 'js/tinymce/skins', dest: '/content/scripts/tinymce/skins' }, { src: 'js/tinymce/tinymce.js', dest: '/content/scripts/tinymce/tinymce.js' }, { src: 'js/tinymce/tinymce.min.js', dest: '/content/scripts/tinymce/tinymce.min.js' }, { src: 'js/tinymce/jquery.tinymce.min.js', dest: '/content/scripts/tinymce/jquery.tinymce.min.js' }, { src: 'js/tinymce/license.txt', dest: '/content/scripts/tinymce/license.txt' } ] }, jquery: { options: { id: 'TinyMCE.jQuery', title: 'TinyMCE.jQuery [Deprecated]', version: packageData.version, authors: 'Ephox Corp', owners: 'Ephox Corp', description: 'This package has been deprecated use https://www.nuget.org/packages/TinyMCE/', releaseNotes: 'This package has been deprecated use https://www.nuget.org/packages/TinyMCE/', summary: 'This package has been deprecated use https://www.nuget.org/packages/TinyMCE/', projectUrl: 'http://www.tinymce.com/', iconUrl: 'http://www.tinymce.com/favicon.ico', licenseUrl: 'http://www.tinymce.com/license', requireLicenseAcceptance: true, tags: 'Editor TinyMCE HTML HTMLEditor', excludes: [ 'js/**/config', 'js/**/scratch', 'js/**/classes', 'js/**/lib', 'js/**/dependency', 'js/**/src', 'js/**/*.less', 'js/**/*.dev.svg', 'js/**/*.dev.js', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js' ], outputDir: 'tmp' }, files: [ { src: 'js/tinymce/langs', dest: '/content/scripts/tinymce/langs' }, { src: 'js/tinymce/plugins', dest: '/content/scripts/tinymce/plugins' }, { src: 'js/tinymce/themes', dest: '/content/scripts/tinymce/themes' }, { src: 'js/tinymce/skins', dest: '/content/scripts/tinymce/skins' }, { src: 'js/tinymce/tinymce.js', dest: '/content/scripts/tinymce/tinymce.js' }, { src: 'js/tinymce/tinymce.min.js', dest: '/content/scripts/tinymce/tinymce.min.js' }, { src: 'js/tinymce/jquery.tinymce.min.js', dest: '/content/scripts/tinymce/jquery.tinymce.min.js' }, { src: 'js/tinymce/license.txt', dest: '/content/scripts/tinymce/license.txt' } ] } }, bundle: { minified: { options: { themesDir: 'js/tinymce/themes', pluginsDir: 'js/tinymce/plugins', pluginFileName: 'plugin.min.js', themeFileName: 'theme.min.js', outputPath: 'js/tinymce/tinymce.full.min.js' }, src: [ 'js/tinymce/tinymce.min.js' ] }, source: { options: { themesDir: 'js/tinymce/themes', pluginsDir: 'js/tinymce/plugins', pluginFileName: 'plugin.js', themeFileName: 'theme.js', outputPath: 'js/tinymce/tinymce.full.js' }, src: [ 'js/tinymce/tinymce.js' ] } }, clean: { dist: ['js'], lib: ['lib'], scratch: ['scratch'], release: ['tmp'] }, 'bedrock-manual': { core: { config: 'tsconfig.json', projectdir: '.', stopOnFailure: true, testfiles: [ 'src/**/test/ts/atomic/**/*Test.ts', 'src/**/test/ts/browser/**/*Test.ts', 'src/**/test/ts/phantom/**/*Test.ts' ], customRoutes: 'src/core/test/json/routes.json' }, atomic: { config: 'tsconfig.json', projectdir: '.', stopOnFailure: true, testfiles: [ 'src/**/test/ts/atomic/**/*Test.ts', ], customRoutes: 'src/core/test/json/routes.json' }, apollo: { config: 'tsconfig.json', testfiles: ['src/themes/silver/test/ts/phantom/**/*Test.ts', 'src/themes/silver/test/ts/browser/**/*Test.ts', 'src/plugins/*/test/ts/browser/**/AG_*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'apollo-tests' }, silver: { config: 'tsconfig.json', testfiles: ['src/themes/silver/test/ts/phantom/**/*Test.ts', 'src/themes/silver/test/ts/browser/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'silver-tests' } }, 'bedrock-auto': { standard: { browser: grunt.option('bedrock-browser') !== undefined ? grunt.option('bedrock-browser') : 'chrome-headless', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], overallTimeout: 900000, singleTimeout: 30000, retries: 3, customRoutes: 'src/core/test/json/routes.json', name: grunt.option('bedrock-browser') !== undefined ? grunt.option('bedrock-browser') : 'chrome-headless' }, 'chrome-headless': { browser: 'chrome-headless', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'chrome-headless' }, 'firefox-headless': { browser: 'firefox-headless', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'firefox-headless' }, chrome: { browser: 'chrome', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'chrome' }, firefox: { browser: 'firefox', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'firefox' }, MicrosoftEdge: { browser: 'MicrosoftEdge', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'MicrosoftEdge' }, ie: { browser: 'ie', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'ie' }, silver: { browser: 'phantomjs', config: 'tsconfig.json', testfiles: ['src/themes/silver/test/ts/phantom/**/*Test.ts', 'src/themes/silver/test/ts/browser/**/*Test.ts', 'src/themes/silver/test/ts/webdriver/*/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'silver-tests' } }, watch: { skins: { files: ['src/skins/oxide/main/less/**/*'], tasks: ['less', 'copy:skins'], options: { spawn: false } }, } }); grunt.registerTask('version', 'Creates a version file', function () { grunt.file.write('tmp/version.txt', BUILD_VERSION); }); grunt.registerTask('build-headers', 'Appends build headers to js files', function () { var header = '// ' + packageData.version + ' (' + packageData.date + ')\n'; grunt.file.write('js/tinymce/tinymce.js', header + grunt.file.read('js/tinymce/tinymce.js')); grunt.file.write('js/tinymce/tinymce.min.js', header + grunt.file.read('js/tinymce/tinymce.min.js')); }); require('load-grunt-tasks')(grunt); grunt.loadTasks('tools/tasks'); grunt.loadNpmTasks('@ephox/bedrock'); grunt.loadNpmTasks('@ephox/swag'); grunt.loadNpmTasks('grunt-tslint'); grunt.registerTask('prod', [ // 'validateVersion', 'shell:tsc', 'tslint', 'globals', 'rollup', 'unicode', 'uglify', 'less', 'copy', 'build-headers', 'clean:release', 'moxiezip', 'nugetpack', 'version' ]); grunt.registerTask('dev', [ 'globals', 'shell:tsc', 'rollup', 'unicode', 'less', 'copy' ]); grunt.registerTask('start', ['webpack-dev-server']); grunt.registerTask('default', ['clean', 'prod']); grunt.registerTask('test', ['bedrock-auto:phantomjs']); };
Gruntfile.js
/*eslint-env node */ let zipUtils = require('./tools/modules/zip-helper'); let gruntUtils = require('./tools/modules/grunt-utils'); let gruntWebPack = require('./tools/modules/grunt-webpack'); let swag = require('@ephox/swag'); let path = require('path'); let plugins = [ 'advlist', 'anchor', 'autolink', 'autoresize', 'autosave', 'bbcode', 'charmap', 'code', 'codesample', 'colorpicker', 'contextmenu', 'directionality', 'emoticons', 'help', 'fullpage', 'fullscreen', 'hr', 'image', 'imagetools', 'importcss', 'insertdatetime', 'legacyoutput', 'link', 'lists', 'media', 'nonbreaking', 'noneditable', 'pagebreak', 'paste', 'preview', 'print', 'save', 'searchreplace', 'spellchecker', 'tabfocus', 'table', 'template', 'textcolor', 'textpattern', 'toc', 'visualblocks', 'visualchars', 'wordcount', 'quickbars', ]; let themes = [ 'mobile', // 'modern', 'mobile', 'inlite', 'silver' 'silver' ]; module.exports = function (grunt) { var packageData = grunt.file.readJSON('package.json'); var changelogLine = grunt.file.read('changelog.txt').toString().split('\n')[0]; var BUILD_VERSION = packageData.version + (process.env.BUILD_NUMBER ? '-' + process.env.BUILD_NUMBER : ''); packageData.date = /^Version [^\(]+\(([^\)]+)\)/.exec(changelogLine)[1]; grunt.initConfig({ pkg: packageData, shell: { tsc: { command: 'node ./node_modules/typescript/bin/tsc' } }, tslint: { options: { configuration: 'tslint.json' }, files: { src: [ 'src/**/*.ts' ] } }, globals: { options: { configFile: 'src/core/main/json/globals.json', outputDir: 'lib/globals', templateFile: 'src/core/main/js/GlobalsTemplate.js' } }, rollup: Object.assign( { core: { options: { treeshake: true, name: 'tinymce', format: 'iife', banner: '(function () {', footer: '})();', onwarn: swag.onwarn, plugins: [ swag.nodeResolve({ basedir: __dirname, prefixes: { 'tinymce/core': 'lib/core/main/ts' } }), swag.remapImports() ] }, files:[ { src: 'lib/core/main/ts/api/Main.js', dest: 'js/tinymce/tinymce.js' } ] } }, gruntUtils.generate(plugins, 'plugin', (name) => { return { options: { treeshake: true, name: name, format: 'iife', banner: '(function () {', footer: '})();', onwarn: swag.onwarn, plugins: [ swag.nodeResolve({ basedir: __dirname, prefixes: gruntUtils.prefixes({ 'tinymce/core': 'lib/globals/tinymce/core' }, [ [`tinymce/plugins/${name}`, `lib/plugins/${name}/main/ts`] ]), mappers: [ swag.mappers.replaceDir('./lib/core/main/ts/api', './lib/globals/tinymce/core/api'), swag.mappers.invalidDir('./lib/core/main/ts') ] }), swag.remapImports() ] }, files:[ { src: `lib/plugins/${name}/main/ts/Plugin.js`, dest: `js/tinymce/plugins/${name}/plugin.js` } ] }; }), gruntUtils.generate(themes, 'theme', (name) => { return { options: { treeshake: true, name: name, format: 'iife', banner: '(function () {', footer: '})();', onwarn: swag.onwarn, plugins: [ swag.nodeResolve({ basedir: __dirname, prefixes: gruntUtils.prefixes({ 'tinymce/core': 'lib/globals/tinymce/core', 'tinymce/ui': 'lib/ui/main/ts' }, [ [`tinymce/themes/${name}`, `lib/themes/${name}/main/ts`] ]), mappers: [ swag.mappers.replaceDir('./lib/core/main/ts/api', './lib/globals/tinymce/core/api'), swag.mappers.invalidDir('./lib/core/main/ts') ] }), swag.remapImports() ] }, files:[ { src: `lib/themes/${name}/main/ts/Theme.js`, dest: `js/tinymce/themes/${name}/theme.js` } ] }; }) ), unicode: { 'emoticons-plugin': { files: [ { src: 'src/plugins/emoticons/main/js/*.js', dest: 'js/tinymce/plugins/emoticons/js/emojis.js' } ] } }, uglify: Object.assign( { options: { output: { ascii_only: true }, ie8: true }, core: { files: [ { src: 'js/tinymce/tinymce.js', dest: 'js/tinymce/tinymce.min.js' }, { src: 'src/core/main/js/JqueryIntegration.js', dest: 'js/tinymce/jquery.tinymce.min.js' } ] } }, gruntUtils.generate(plugins, 'plugin', (name) => { var pluginExtras = { emoticons: [ { src: 'js/tinymce/plugins/emoticons/js/emojis.js', dest: 'js/tinymce/plugins/emoticons/js/emojis.min.js' } ] }; return { files: [ { src: `js/tinymce/plugins/${name}/plugin.js`, dest: `js/tinymce/plugins/${name}/plugin.min.js` } ].concat(pluginExtras.hasOwnProperty(name) ? pluginExtras[name] : []) }; }), gruntUtils.generate(themes, 'theme', (name) => { return { files: [ { src: `js/tinymce/themes/${name}/theme.js`, dest: `js/tinymce/themes/${name}/theme.min.js` } ] }; }) ), webpack: Object.assign( {core: () => gruntWebPack.create('src/core/demo/ts/demo/Demos.ts', 'tsconfig.json', 'scratch/demos/core', 'demo.js')}, {plugins: () => gruntWebPack.allPluginDemos(plugins)}, {themes: () => { gruntWebPack.allThemeDemos(themes); gruntWebPack.allComponentDemos(themes); }}, gruntUtils.generate(plugins, 'plugin', (name) => () => gruntWebPack.createPlugin(name) ), gruntUtils.generate(themes, 'theme', (name) => () => gruntWebPack.createTheme(name) ) ), 'webpack-dev-server': { options: { webpack: gruntWebPack.all(plugins, themes), publicPath: '/', inline: false, port: grunt.option('webpack-port') !== undefined ? grunt.option('webpack-port') : 3000, host: '0.0.0.0', disableHostCheck: true, before: app => gruntWebPack.generateDemoIndex(grunt, app, plugins, themes) }, start: { } }, less: { mobile: { options: { plugins : [ new (require('less-plugin-autoprefix'))({ browsers : [ 'last 2 versions', /* for phantom */'safari >= 4' ] }) ], compress: true, yuicompress: true, sourceMap: true, sourceMapRootpath: '.', optimization: 2 }, files: { 'js/tinymce/skins/ui/oxide/skin.mobile.min.css': 'src/skins/oxide/main/less/mobile/app/mobile-less.less' } }, 'content-mobile': { options: { cleancss: true, strictImports: true, compress: true }, files: { 'js/tinymce/skins/ui/oxide/content.mobile.min.css': 'src/skins/oxide/main/less/mobile/content.less' } } }, copy: { core: { options: { process: function (content) { return content. replace('@@majorVersion@@', packageData.version.split('.')[0]). replace('@@minorVersion@@', packageData.version.split('.').slice(1).join('.')). replace('@@releaseDate@@', packageData.date); } }, files: [ { src: 'js/tinymce/tinymce.js', dest: 'js/tinymce/tinymce.js' }, { src: 'js/tinymce/tinymce.min.js', dest: 'js/tinymce/tinymce.min.js' }, { src: 'src/core/main/text/readme_lang.md', dest: 'js/tinymce/langs/readme.md' }, { src: 'LICENSE.TXT', dest: 'js/tinymce/license.txt' } ] }, 'ui-skins': { files: [ { expand: true, flatten: true, cwd: 'src/themes/mobile/main/fonts', src: [ '**', '!*.json', '!*.md' ], dest: 'js/tinymce/skins/ui/oxide/fonts' }, { expand: true, flatten: true, cwd: 'src/skins/oxide/main/img', src: '**', dest: 'js/tinymce/skins/ui/oxide/img' }, { expand: true, flatten: true, cwd: 'node_modules/@ephox/oxide/build/skins/oxide-default', src: [ '*.min.css', '*.min.css.map' ], dest: 'js/tinymce/skins/ui/oxide' }, { expand: true, flatten: true, cwd: 'src/skins/oxide/main/img', src: '**', dest: 'js/tinymce/skins/ui/oxide-dark/img' }, { expand: true, cwd: 'node_modules/@ephox/oxide/build/skins/oxide-dark', src: [ '*.min.css', '*.min.css.map' ], dest: 'js/tinymce/skins/ui/oxide-dark' } ] }, 'content-skins': { files: [ { expand: true, cwd: 'node_modules/@ephox/oxide/build/skins/content', src: '**', dest: 'js/tinymce/skins/content' }, ] }, 'visualblocks-plugin': { files: [ { src: 'src/plugins/visualblocks/main/css/visualblocks.css', dest: 'js/tinymce/plugins/visualblocks/css/visualblocks.css' } ] } }, moxiezip: { production: { options: { baseDir: 'tinymce', excludes: [ 'js/**/plugin.js', 'js/**/theme.js', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js', 'js/tinymce/plugins/moxiemanager', 'js/tinymce/plugins/visualblocks/img', 'js/tinymce/skins/*/fonts/*.json', 'js/tinymce/skins/*/fonts/readme.md', 'readme.md' ], to: 'tmp/tinymce_<%= pkg.version %>.zip' }, src: [ 'js/tinymce/langs', 'js/tinymce/plugins', 'js/tinymce/skins', 'js/tinymce/themes', 'js/tinymce/tinymce.min.js', 'js/tinymce/jquery.tinymce.min.js', 'js/tinymce/license.txt', 'changelog.txt', 'LICENSE.TXT', 'readme.md' ] }, development: { options: { baseDir: 'tinymce', excludes: [ 'src/**/dist', 'src/**/scratch', 'src/**/lib', 'src/**/dependency', 'js/tinymce/tinymce.full.min.js', 'js/tests/.jshintrc' ], to: 'tmp/tinymce_<%= pkg.version %>_dev.zip' }, src: [ 'config', 'src', 'js', 'tests', 'tools', 'changelog.txt', 'LICENSE.TXT', 'Gruntfile.js', 'readme.md', 'package.json', '.eslintrc', '.jscsrc', '.jshintrc' ] }, cdn: { options: { onBeforeSave: function (zip) { zip.addData('dist/version.txt', packageData.version); }, pathFilter: function (zipFilePath) { return zipFilePath.replace('js/tinymce/', 'dist/'); }, excludes: [ 'js/**/config', 'js/**/scratch', 'js/**/classes', 'js/**/lib', 'js/**/dependency', 'js/**/src', 'js/**/*.less', 'js/**/*.dev.js', 'js/**/*.dev.svg', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js', 'js/tinymce/plugins/moxiemanager', 'js/tinymce/plugins/visualblocks/img', 'js/tinymce/skins/*/fonts/*.json', 'js/tinymce/skins/*/fonts/*.dev.svg', 'js/tinymce/skins/*/fonts/readme.md', 'readme.md', 'js/tests/.jshintrc' ], concat: [ { src: [ 'js/tinymce/tinymce.min.js', 'js/tinymce/themes/*/theme.min.js', 'js/tinymce/plugins/*/plugin.min.js', '!js/tinymce/plugins/example/plugin.min.js', '!js/tinymce/plugins/example_dependency/plugin.min.js' ], dest: [ 'js/tinymce/tinymce.min.js' ] } ], to: 'tmp/tinymce_<%= pkg.version %>_cdn.zip' }, src: [ 'js/tinymce/jquery.tinymce.min.js', 'js/tinymce/tinymce.js', 'js/tinymce/langs', 'js/tinymce/plugins', 'js/tinymce/skins', 'js/tinymce/themes', 'js/tinymce/license.txt' ] }, component: { options: { excludes: [ 'js/**/config', 'js/**/scratch', 'js/**/classes', 'js/**/lib', 'js/**/dependency', 'js/**/src', 'js/**/*.less', 'js/**/*.dev.svg', 'js/**/*.dev.js', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js', 'js/tinymce/plugins/moxiemanager', 'js/tinymce/plugins/example', 'js/tinymce/plugins/example_dependency', 'js/tinymce/plugins/visualblocks/img', 'js/tinymce/skins/*/fonts/*.json', 'js/tinymce/skins/*/fonts/readme.md' ], pathFilter: function (zipFilePath) { if (zipFilePath.indexOf('js/tinymce/') === 0) { return zipFilePath.substr('js/tinymce/'.length); } return zipFilePath; }, onBeforeSave: function (zip) { function jsonToBuffer(json) { return new Buffer(JSON.stringify(json, null, '\t')); } zip.addData('bower.json', jsonToBuffer({ 'name': 'tinymce', 'description': 'Web based JavaScript HTML WYSIWYG editor control.', 'license': 'LGPL-2.1', 'keywords': ['editor', 'wysiwyg', 'tinymce', 'richtext', 'javascript', 'html'], 'homepage': 'http://www.tinymce.com', 'ignore': ['readme.md', 'composer.json', 'package.json', '.npmignore', 'changelog.txt'] })); zip.addData('package.json', jsonToBuffer({ 'name': 'tinymce', 'version': packageData.version, 'repository': { 'type': 'git', 'url': 'https://github.com/tinymce/tinymce-dist.git' }, 'description': 'Web based JavaScript HTML WYSIWYG editor control.', 'author': 'Ephox Corporation', 'main': 'tinymce.js', 'license': 'LGPL-2.1', 'keywords': ['editor', 'wysiwyg', 'tinymce', 'richtext', 'javascript', 'html'], 'bugs': { 'url': 'https://github.com/tinymce/tinymce/issues' } })); zip.addData('composer.json', jsonToBuffer({ 'name': 'tinymce/tinymce', 'version': packageData.version, 'description': 'Web based JavaScript HTML WYSIWYG editor control.', 'license': ['LGPL-2.1-only'], 'keywords': ['editor', 'wysiwyg', 'tinymce', 'richtext', 'javascript', 'html'], 'homepage': 'http://www.tinymce.com', 'type': 'component', 'extra': { 'component': { 'scripts': [ 'tinymce.js', 'plugins/*/plugin.js', 'themes/*/theme.js' ], 'files': [ 'tinymce.min.js', 'plugins/*/plugin.min.js', 'themes/*/theme.min.js', 'skins/**' ] } }, 'archive': { 'exclude': ['readme.md', 'bower.js', 'package.json', '.npmignore', 'changelog.txt'] } })); zip.addFile( 'jquery.tinymce.js', 'js/tinymce/jquery.tinymce.min.js' ); var getDirs = zipUtils.getDirectories(grunt, this.excludes); zipUtils.addIndexFiles( zip, getDirs('js/tinymce/plugins'), zipUtils.generateIndex('plugins', 'plugin') ); zipUtils.addIndexFiles( zip, getDirs('js/tinymce/themes'), zipUtils.generateIndex('themes', 'theme') ); }, to: 'tmp/tinymce_<%= pkg.version %>_component.zip' }, src: [ 'js/tinymce/skins', 'js/tinymce/plugins', 'js/tinymce/themes', 'js/tinymce/tinymce.js', 'js/tinymce/tinymce.min.js', 'js/tinymce/jquery.tinymce.min.js', 'js/tinymce/license.txt', 'changelog.txt', 'readme.md' ] } }, nugetpack: { main: { options: { id: 'TinyMCE', version: packageData.version, authors: 'Ephox Corp', owners: 'Ephox Corp', description: 'The best WYSIWYG editor! TinyMCE is a platform independent web based Javascript HTML WYSIWYG editor ' + 'control released as Open Source under LGPL by Ephox Corp. TinyMCE has the ability to convert HTML ' + 'TEXTAREA fields or other HTML elements to editor instances. TinyMCE is very easy to integrate ' + 'into other Content Management Systems.', releaseNotes: 'Release notes for my package.', summary: 'TinyMCE is a platform independent web based Javascript HTML WYSIWYG editor ' + 'control released as Open Source under LGPL by Ephox Corp.', projectUrl: 'http://www.tinymce.com/', iconUrl: 'http://www.tinymce.com/favicon.ico', licenseUrl: 'http://www.tinymce.com/license', requireLicenseAcceptance: true, tags: 'Editor TinyMCE HTML HTMLEditor', excludes: [ 'js/**/config', 'js/**/scratch', 'js/**/classes', 'js/**/lib', 'js/**/dependency', 'js/**/src', 'js/**/*.less', 'js/**/*.dev.svg', 'js/**/*.dev.js', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js' ], outputDir: 'tmp' }, files: [ { src: 'js/tinymce/langs', dest: '/content/scripts/tinymce/langs' }, { src: 'js/tinymce/plugins', dest: '/content/scripts/tinymce/plugins' }, { src: 'js/tinymce/themes', dest: '/content/scripts/tinymce/themes' }, { src: 'js/tinymce/skins', dest: '/content/scripts/tinymce/skins' }, { src: 'js/tinymce/tinymce.js', dest: '/content/scripts/tinymce/tinymce.js' }, { src: 'js/tinymce/tinymce.min.js', dest: '/content/scripts/tinymce/tinymce.min.js' }, { src: 'js/tinymce/jquery.tinymce.min.js', dest: '/content/scripts/tinymce/jquery.tinymce.min.js' }, { src: 'js/tinymce/license.txt', dest: '/content/scripts/tinymce/license.txt' } ] }, jquery: { options: { id: 'TinyMCE.jQuery', title: 'TinyMCE.jQuery [Deprecated]', version: packageData.version, authors: 'Ephox Corp', owners: 'Ephox Corp', description: 'This package has been deprecated use https://www.nuget.org/packages/TinyMCE/', releaseNotes: 'This package has been deprecated use https://www.nuget.org/packages/TinyMCE/', summary: 'This package has been deprecated use https://www.nuget.org/packages/TinyMCE/', projectUrl: 'http://www.tinymce.com/', iconUrl: 'http://www.tinymce.com/favicon.ico', licenseUrl: 'http://www.tinymce.com/license', requireLicenseAcceptance: true, tags: 'Editor TinyMCE HTML HTMLEditor', excludes: [ 'js/**/config', 'js/**/scratch', 'js/**/classes', 'js/**/lib', 'js/**/dependency', 'js/**/src', 'js/**/*.less', 'js/**/*.dev.svg', 'js/**/*.dev.js', 'js/**/*.map', 'js/tinymce/tinymce.full.min.js' ], outputDir: 'tmp' }, files: [ { src: 'js/tinymce/langs', dest: '/content/scripts/tinymce/langs' }, { src: 'js/tinymce/plugins', dest: '/content/scripts/tinymce/plugins' }, { src: 'js/tinymce/themes', dest: '/content/scripts/tinymce/themes' }, { src: 'js/tinymce/skins', dest: '/content/scripts/tinymce/skins' }, { src: 'js/tinymce/tinymce.js', dest: '/content/scripts/tinymce/tinymce.js' }, { src: 'js/tinymce/tinymce.min.js', dest: '/content/scripts/tinymce/tinymce.min.js' }, { src: 'js/tinymce/jquery.tinymce.min.js', dest: '/content/scripts/tinymce/jquery.tinymce.min.js' }, { src: 'js/tinymce/license.txt', dest: '/content/scripts/tinymce/license.txt' } ] } }, bundle: { minified: { options: { themesDir: 'js/tinymce/themes', pluginsDir: 'js/tinymce/plugins', pluginFileName: 'plugin.min.js', themeFileName: 'theme.min.js', outputPath: 'js/tinymce/tinymce.full.min.js' }, src: [ 'js/tinymce/tinymce.min.js' ] }, source: { options: { themesDir: 'js/tinymce/themes', pluginsDir: 'js/tinymce/plugins', pluginFileName: 'plugin.js', themeFileName: 'theme.js', outputPath: 'js/tinymce/tinymce.full.js' }, src: [ 'js/tinymce/tinymce.js' ] } }, clean: { dist: ['js'], lib: ['lib'], scratch: ['scratch'], release: ['tmp'] }, 'bedrock-manual': { core: { config: 'tsconfig.json', projectdir: '.', stopOnFailure: true, testfiles: [ 'src/**/test/ts/atomic/**/*Test.ts', 'src/**/test/ts/browser/**/*Test.ts', 'src/**/test/ts/phantom/**/*Test.ts' ], customRoutes: 'src/core/test/json/routes.json' }, atomic: { config: 'tsconfig.json', projectdir: '.', stopOnFailure: true, testfiles: [ 'src/**/test/ts/atomic/**/*Test.ts', ], customRoutes: 'src/core/test/json/routes.json' }, apollo: { config: 'tsconfig.json', testfiles: ['src/themes/silver/test/ts/phantom/**/*Test.ts', 'src/themes/silver/test/ts/browser/**/*Test.ts', 'src/plugins/*/test/ts/browser/**/AG_*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'apollo-tests' }, silver: { config: 'tsconfig.json', testfiles: ['src/themes/silver/test/ts/phantom/**/*Test.ts', 'src/themes/silver/test/ts/browser/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'silver-tests' } }, 'bedrock-auto': { standard: { browser: grunt.option('bedrock-browser') !== undefined ? grunt.option('bedrock-browser') : 'chrome-headless', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], overallTimeout: 900000, singleTimeout: 30000, retries: 3, customRoutes: 'src/core/test/json/routes.json', name: grunt.option('bedrock-browser') !== undefined ? grunt.option('bedrock-browser') : 'chrome-headless' }, 'chrome-headless': { browser: 'chrome-headless', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'chrome-headless' }, 'firefox-headless': { browser: 'firefox-headless', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'firefox-headless' }, chrome: { browser: 'chrome', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'chrome' }, firefox: { browser: 'firefox', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'firefox' }, MicrosoftEdge: { browser: 'MicrosoftEdge', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'MicrosoftEdge' }, ie: { browser: 'ie', config: 'tsconfig.json', testfiles: ['src/**/test/ts/**/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'ie' }, silver: { browser: 'phantomjs', config: 'tsconfig.json', testfiles: ['src/themes/silver/test/ts/phantom/**/*Test.ts', 'src/themes/silver/test/ts/browser/**/*Test.ts', 'src/themes/silver/test/ts/webdriver/*/*Test.ts'], stopOnFailure: true, overallTimeout: 600000, singleTimeout: 300000, customRoutes: 'src/core/test/json/routes.json', name: 'silver-tests' } }, watch: { skins: { files: ['src/skins/oxide/main/less/**/*'], tasks: ['less', 'copy:skins'], options: { spawn: false } }, } }); grunt.registerTask('version', 'Creates a version file', function () { grunt.file.write('tmp/version.txt', BUILD_VERSION); }); grunt.registerTask('build-headers', 'Appends build headers to js files', function () { var header = '// ' + packageData.version + ' (' + packageData.date + ')\n'; grunt.file.write('js/tinymce/tinymce.js', header + grunt.file.read('js/tinymce/tinymce.js')); grunt.file.write('js/tinymce/tinymce.min.js', header + grunt.file.read('js/tinymce/tinymce.min.js')); }); require('load-grunt-tasks')(grunt); grunt.loadTasks('tools/tasks'); grunt.loadNpmTasks('@ephox/bedrock'); grunt.loadNpmTasks('@ephox/swag'); grunt.loadNpmTasks('grunt-tslint'); grunt.registerTask('prod', [ // 'validateVersion', 'shell:tsc', 'tslint', 'globals', 'rollup', 'unicode', 'uglify', 'less', 'copy', 'build-headers', 'clean:release', 'moxiezip', 'nugetpack', 'version' ]); grunt.registerTask('dev', [ 'globals', 'shell:tsc', 'rollup', 'unicode', 'less', 'copy' ]); grunt.registerTask('start', ['webpack-dev-server']); grunt.registerTask('default', ['prod']); grunt.registerTask('test', ['bedrock-auto:phantomjs']); };
TINY-3095: Make the default grunt command do a clean first, before doing a prod build
Gruntfile.js
TINY-3095: Make the default grunt command do a clean first, before doing a prod build
<ide><path>runtfile.js <ide> <ide> grunt.registerTask('start', ['webpack-dev-server']); <ide> <del> grunt.registerTask('default', ['prod']); <add> grunt.registerTask('default', ['clean', 'prod']); <ide> grunt.registerTask('test', ['bedrock-auto:phantomjs']); <ide> };
Java
apache-2.0
43fb7ed16f551bfa78a0b7214faf66f60f439639
0
Cosium/AxonFramework,phaas/AxonFramework,bojanv55/AxonFramework,soulrebel/AxonFramework,fpape/AxonFramework,AxonFramework/AxonFramework,adinath/AxonFramework,BrentDouglas/AxonFramework,christiaandejong/AxonFramework,oiavorskyi/AxonFramework,krosenvold/AxonFramework
/* * Copyright (c) 2010-2011. Axon Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.axonframework.eventstore.jpa; import org.axonframework.domain.AggregateIdentifier; import org.axonframework.domain.DomainEvent; import org.axonframework.domain.DomainEventStream; import org.axonframework.eventstore.EventSerializer; import org.axonframework.eventstore.EventStoreManagement; import org.axonframework.eventstore.EventStreamNotFoundException; import org.axonframework.eventstore.EventVisitor; import org.axonframework.eventstore.SnapshotEventStore; import org.axonframework.eventstore.XStreamEventSerializer; import org.axonframework.repository.ConcurrencyException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.sql.DataSource; /** * An EventStore implementation that uses JPA to store DomainEvents in a database. The actual DomainEvent is stored as * a * serialized blob of bytes. Other columns are used to store meta-data that allow quick finding of DomainEvents for a * specific aggregate in the correct order. * <p/> * This EventStore supports snapshots pruning, which can enabled by configuring a {@link #setMaxSnapshotsArchived(int) * maximum number of snapshots to archive}. By default snapshot pruning is configured to archive only {@value * #DEFAULT_MAX_SNAPSHOTS_ARCHIVED} snapshot per aggregate. * <p/> * The serializer used to serialize the events is configurable. By default, the {@link XStreamEventSerializer} is used. * * @author Allard Buijze * @since 0.5 */ public class JpaEventStore implements SnapshotEventStore, EventStoreManagement { private static final Logger logger = LoggerFactory.getLogger(JpaEventStore.class); private EntityManager entityManager; private final EventSerializer eventSerializer; private static final int DEFAULT_BATCH_SIZE = 100; private int batchSize = DEFAULT_BATCH_SIZE; private static final int DEFAULT_MAX_SNAPSHOTS_ARCHIVED = 1; private int maxSnapshotsArchived = DEFAULT_MAX_SNAPSHOTS_ARCHIVED; private PersistenceExceptionResolver persistenceExceptionResolver; /** * Initialize a JpaEventStore using an {@link org.axonframework.eventstore.XStreamEventSerializer}, which * serializes * events as XML. */ public JpaEventStore() { this(new XStreamEventSerializer()); } /** * Initialize a JpaEventStore which serializes events using the given {@link org.axonframework.eventstore.EventSerializer}. * * @param eventSerializer The serializer to (de)serialize domain events with. */ public JpaEventStore(EventSerializer eventSerializer) { this.eventSerializer = eventSerializer; } /** * {@inheritDoc} */ @Override public void appendEvents(String type, DomainEventStream events) { DomainEvent event = null; try { while (events.hasNext()) { event = events.next(); DomainEventEntry entry = new DomainEventEntry(type, event, eventSerializer); entityManager.persist(entry); } } catch (RuntimeException exception) { if (persistenceExceptionResolver != null && persistenceExceptionResolver.isDuplicateKeyViolation(exception)) { throw new ConcurrencyException( String.format("Concurrent modification detected for Aggregate identifier [%s], sequence: [%s]", event.getAggregateIdentifier(), event.getSequenceNumber().toString()), exception); } throw exception; } } /** * {@inheritDoc} */ @Override public DomainEventStream readEvents(String type, AggregateIdentifier identifier) { long snapshotSequenceNumber = -1; SnapshotEventEntry lastSnapshotEvent = loadLastSnapshotEvent(type, identifier); DomainEvent snapshotEvent = null; if (lastSnapshotEvent != null) { try { snapshotEvent = lastSnapshotEvent.getDomainEvent(eventSerializer); snapshotSequenceNumber = lastSnapshotEvent.getSequenceNumber(); } catch (RuntimeException ex) { logger.warn("Error while reading snapshot event entry. " + "Reconstructing aggregate on entire event stream. Caused by: {} {}", ex.getClass().getName(), ex.getMessage()); } } List<DomainEvent> events = fetchBatch(type, identifier, snapshotSequenceNumber + 1); if (snapshotEvent != null) { events.add(0, snapshotEvent); } if (events.isEmpty()) { throw new EventStreamNotFoundException(type, identifier); } return new BatchingDomainEventStream(events, identifier, type); } @SuppressWarnings({"unchecked"}) private List<DomainEvent> fetchBatch(String type, AggregateIdentifier identifier, long firstSequenceNumber) { List<byte[]> entries = (List<byte[]>) entityManager.createQuery( "SELECT e.serializedEvent " + "FROM DomainEventEntry e " + "WHERE e.aggregateIdentifier = :id AND e.type = :type AND e.sequenceNumber >= :seq " + "ORDER BY e.sequenceNumber ASC") .setParameter("id", identifier.asString()) .setParameter("type", type) .setParameter("seq", firstSequenceNumber) .setMaxResults(batchSize) .getResultList(); List<DomainEvent> events = new ArrayList<DomainEvent>(entries.size()); for (byte[] entry : entries) { events.add(eventSerializer.deserialize(entry)); } return events; } @SuppressWarnings({"unchecked"}) private SnapshotEventEntry loadLastSnapshotEvent(String type, AggregateIdentifier identifier) { List<SnapshotEventEntry> entries = entityManager.createQuery( "SELECT e FROM SnapshotEventEntry e " + "WHERE e.aggregateIdentifier = :id AND e.type = :type " + "ORDER BY e.sequenceNumber DESC") .setParameter("id", identifier.asString()) .setParameter("type", type) .setMaxResults(1) .setFirstResult(0) .getResultList(); if (entries.size() < 1) { return null; } return entries.get(0); } /** * {@inheritDoc} * <p/> * Upon appending a snapshot, this particular EventStore implementation also prunes snapshots which * are considered redundant because they fall outside of the range of maximum snapshots to archive. */ @Override public void appendSnapshotEvent(String type, DomainEvent snapshotEvent) { // Persist snapshot before pruning redundant archived ones, in order to prevent snapshot misses when reloading // an aggregate, which may occur when a READ_UNCOMMITTED transaction isolation level is used. entityManager.persist(new SnapshotEventEntry(type, snapshotEvent, eventSerializer)); if (maxSnapshotsArchived > 0) { pruneSnapshots(type, snapshotEvent); } } /** * Prunes snapshots which are considered redundant because they fall outside of the range of maximum snapshots to * archive. * * @param type the type of the aggregate for which to prune snapshots * @param mostRecentSnapshotEvent the last appended snapshot event */ private void pruneSnapshots(String type, DomainEvent mostRecentSnapshotEvent) { Iterator<Long> redundantSnapshots = findRedundantSnapshots(type, mostRecentSnapshotEvent); if (redundantSnapshots.hasNext()) { Long sequenceOfFirstSnapshotToPrune = redundantSnapshots.next(); entityManager.createQuery("DELETE FROM SnapshotEventEntry e " + "WHERE e.type = :type " + "AND e.aggregateIdentifier = :aggregateIdentifier " + "AND e.sequenceNumber <= :sequenceOfFirstSnapshotToPrune") .setParameter("type", type) .setParameter("aggregateIdentifier", mostRecentSnapshotEvent.getAggregateIdentifier().asString()) .setParameter("sequenceOfFirstSnapshotToPrune", sequenceOfFirstSnapshotToPrune) .executeUpdate(); } } /** * Finds the first of redundant snapshots, returned as an iterator for convenience purposes. * * @param type the type of the aggregate for which to find redundant snapshots * @param snapshotEvent the last appended snapshot event * @return an iterator over the snapshots found */ @SuppressWarnings({"unchecked"}) private Iterator<Long> findRedundantSnapshots(String type, DomainEvent snapshotEvent) { return entityManager.createQuery( "SELECT e.sequenceNumber FROM SnapshotEventEntry e " + "WHERE e.type = :type AND e.aggregateIdentifier = :aggregateIdentifier " + "ORDER BY e.sequenceNumber DESC") .setParameter("type", type) .setParameter("aggregateIdentifier", snapshotEvent.getAggregateIdentifier().asString()) .setFirstResult(maxSnapshotsArchived) .setMaxResults(1) .getResultList().iterator(); } @Override public void visitEvents(EventVisitor visitor) { int first = 0; List<byte[]> batch; boolean shouldContinue = true; while (shouldContinue) { batch = fetchBatch(first); for (byte[] entry : batch) { visitor.doWithEvent(eventSerializer.deserialize(entry)); } shouldContinue = (batch.size() >= batchSize); first += batchSize; } } @SuppressWarnings({"unchecked"}) private List<byte[]> fetchBatch(int startPosition) { return entityManager.createQuery( "SELECT e.serializedEvent FROM DomainEventEntry e ORDER BY e.timeStamp ASC, e.sequenceNumber ASC") .setFirstResult(startPosition) .setMaxResults(batchSize) .getResultList(); } /** * Sets the EntityManager for this EventStore to use. This EntityManager must be assigned to a persistence context * that contains the {@link DomainEventEntry} as one of the managed entity types. * * @param entityManager the EntityManager to use. */ @PersistenceContext public void setEntityManager(EntityManager entityManager) { this.entityManager = entityManager; } /** * Registers the data source that allows the EventStore to detect the database type and define the error codes that * represent concurrent access failures. * <p/> * Should not be used in combination with {@link #setPersistenceExceptionResolver(PersistenceExceptionResolver)}, * but rather as a shorthand alternative for most common database types. * * @param dataSource A data source providing access to the backing database * @throws SQLException If an error occurs while accessing the dataSource */ public void setDataSource(DataSource dataSource) throws SQLException { if (persistenceExceptionResolver == null) { persistenceExceptionResolver = new SQLErrorCodesResolver(dataSource); } } /** * Sets the persistenceExceptionResolver that will help detect concurrency exceptions from the backing database. * * @param persistenceExceptionResolver the persistenceExceptionResolver that will help detect concurrency * exceptions */ public void setPersistenceExceptionResolver(PersistenceExceptionResolver persistenceExceptionResolver) { this.persistenceExceptionResolver = persistenceExceptionResolver; } /** * Sets the number of events that should be read at each database access. When more than this number of events must * be read to rebuild an aggregate's state, the events are read in batches of this size. Defaults to 100. * <p/> * Tip: if you use a snapshotter, make sure to choose snapshot trigger and batch size such that a single batch will * generally retrieve all events required to rebuild an aggregate's state. * * @param batchSize the number of events to read on each database access. Default to 100. */ public void setBatchSize(int batchSize) { this.batchSize = batchSize; } /** * Sets the maximum number of snapshots to archive for an aggregate. The EventStore will keep at most this number * of snapshots per aggregate. * <p/> * Defaults to {@value #DEFAULT_MAX_SNAPSHOTS_ARCHIVED}. * * @param maxSnapshotsArchived The maximum number of snapshots to archive for an aggregate. A value less than 1 * disables pruning of snapshots. */ public void setMaxSnapshotsArchived(int maxSnapshotsArchived) { this.maxSnapshotsArchived = maxSnapshotsArchived; } private final class BatchingDomainEventStream implements DomainEventStream { private int currentBatchSize; private Iterator<DomainEvent> currentBatch; private DomainEvent next; private final AggregateIdentifier id; private final String typeId; private BatchingDomainEventStream(List<DomainEvent> firstBatch, AggregateIdentifier id, String typeId) { this.id = id; this.typeId = typeId; this.currentBatchSize = firstBatch.size(); this.currentBatch = firstBatch.iterator(); if (currentBatch.hasNext()) { next = currentBatch.next(); } } @Override public boolean hasNext() { return next != null; } @Override public DomainEvent next() { DomainEvent nextEvent = next; if (!currentBatch.hasNext() && currentBatchSize >= batchSize) { logger.debug("Fetching new batch for Aggregate [{}]", id.asString()); currentBatch = fetchBatch(typeId, id, next.getSequenceNumber() + 1).iterator(); } next = currentBatch.hasNext() ? currentBatch.next() : null; return nextEvent; } @Override public DomainEvent peek() { return next; } } }
core/src/main/java/org/axonframework/eventstore/jpa/JpaEventStore.java
/* * Copyright (c) 2010-2011. Axon Framework * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.axonframework.eventstore.jpa; import org.axonframework.domain.AggregateIdentifier; import org.axonframework.domain.DomainEvent; import org.axonframework.domain.DomainEventStream; import org.axonframework.eventstore.EventSerializer; import org.axonframework.eventstore.EventStoreManagement; import org.axonframework.eventstore.EventStreamNotFoundException; import org.axonframework.eventstore.EventVisitor; import org.axonframework.eventstore.SnapshotEventStore; import org.axonframework.eventstore.XStreamEventSerializer; import org.axonframework.repository.ConcurrencyException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.sql.DataSource; /** * An EventStore implementation that uses JPA to store DomainEvents in a database. The actual DomainEvent is stored as * a * serialized blob of bytes. Other columns are used to store meta-data that allow quick finding of DomainEvents for a * specific aggregate in the correct order. * <p/> * This EventStore supports snapshots pruning, which can enabled by configuring a {@link #setMaxSnapshotsArchived(int) * maximum number of snapshots to archive}. By default snapshot pruning is configured to archive only {@value * #DEFAULT_MAX_SNAPSHOTS_ARCHIVED} snapshot per aggregate. * <p/> * The serializer used to serialize the events is configurable. By default, the {@link XStreamEventSerializer} is used. * * @author Allard Buijze * @since 0.5 */ public class JpaEventStore implements SnapshotEventStore, EventStoreManagement { private static final Logger logger = LoggerFactory.getLogger(JpaEventStore.class); private EntityManager entityManager; private final EventSerializer eventSerializer; private static final int DEFAULT_BATCH_SIZE = 100; private int batchSize = DEFAULT_BATCH_SIZE; private static final int DEFAULT_MAX_SNAPSHOTS_ARCHIVED = 1; private int maxSnapshotsArchived = DEFAULT_MAX_SNAPSHOTS_ARCHIVED; private PersistenceExceptionResolver persistenceExceptionResolver; /** * Initialize a JpaEventStore using an {@link org.axonframework.eventstore.XStreamEventSerializer}, which * serializes * events as XML. */ public JpaEventStore() { this(new XStreamEventSerializer()); } /** * Initialize a JpaEventStore which serializes events using the given {@link org.axonframework.eventstore.EventSerializer}. * * @param eventSerializer The serializer to (de)serialize domain events with. */ public JpaEventStore(EventSerializer eventSerializer) { this.eventSerializer = eventSerializer; } /** * {@inheritDoc} */ @Override public void appendEvents(String type, DomainEventStream events) { DomainEvent event = null; try { while (events.hasNext()) { event = events.next(); DomainEventEntry entry = new DomainEventEntry(type, event, eventSerializer); entityManager.persist(entry); } } catch (RuntimeException exception) { if (persistenceExceptionResolver != null && persistenceExceptionResolver.isDuplicateKeyViolation(exception)) { throw new ConcurrencyException( String.format("Concurrent modification detected for Aggregate identifier [%s], sequence: [%s]", event.getAggregateIdentifier(), event.getSequenceNumber().toString()), exception); } throw exception; } } /** * {@inheritDoc} */ @Override public DomainEventStream readEvents(String type, AggregateIdentifier identifier) { long snapshotSequenceNumber = -1; SnapshotEventEntry lastSnapshotEvent = loadLastSnapshotEvent(type, identifier); DomainEvent snapshotEvent = null; if (lastSnapshotEvent != null) { try { snapshotEvent = lastSnapshotEvent.getDomainEvent(eventSerializer); snapshotSequenceNumber = lastSnapshotEvent.getSequenceNumber(); } catch (RuntimeException ex) { logger.warn("Error while reading snapshot event entry. " + "Reconstructing aggregate on entire event stream. Caused by: {} {}", ex.getClass().getName(), ex.getMessage()); } } List<DomainEvent> events = fetchBatch(type, identifier, snapshotSequenceNumber + 1); if (snapshotEvent != null) { events.add(0, snapshotEvent); } if (events.isEmpty()) { throw new EventStreamNotFoundException(type, identifier); } return new BatchingDomainEventStream(events, identifier, type); } @SuppressWarnings({"unchecked"}) private List<DomainEvent> fetchBatch(String type, AggregateIdentifier identifier, long firstSequenceNumber) { List<byte[]> entries = (List<byte[]>) entityManager.createQuery( "SELECT e.serializedEvent " + "FROM DomainEventEntry e " + "WHERE e.aggregateIdentifier = :id AND e.type = :type AND e.sequenceNumber >= :seq " + "ORDER BY e.sequenceNumber ASC") .setParameter("id", identifier.asString()) .setParameter("type", type) .setParameter("seq", firstSequenceNumber) .setMaxResults(batchSize) .getResultList(); List<DomainEvent> events = new ArrayList<DomainEvent>(entries.size()); for (byte[] entry : entries) { events.add(eventSerializer.deserialize(entry)); } return events; } @SuppressWarnings({"unchecked"}) private SnapshotEventEntry loadLastSnapshotEvent(String type, AggregateIdentifier identifier) { List<SnapshotEventEntry> entries = entityManager.createQuery( "SELECT e FROM SnapshotEventEntry e " + "WHERE e.aggregateIdentifier = :id AND e.type = :type " + "ORDER BY e.sequenceNumber DESC") .setParameter("id", identifier.asString()) .setParameter("type", type) .setMaxResults(1) .setFirstResult(0) .getResultList(); if (entries.size() < 1) { return null; } return entries.get(0); } /** * {@inheritDoc} * <p/> * Upon appending a snapshot, this particular EventStore implementation also prunes snapshots which * are considered redundant because they fall outside of the range of maximum snapshots to archive. */ @Override public void appendSnapshotEvent(String type, DomainEvent snapshotEvent) { // Persist snapshot before pruning redundant archived ones, in order to prevent snapshot misses when reloading // an aggregate, which may occur when a READ_UNCOMMITTED transaction isolation level is used. entityManager.persist(new SnapshotEventEntry(type, snapshotEvent, eventSerializer)); if (maxSnapshotsArchived > 0) { pruneSnapshots(type, snapshotEvent); } } /** * Prunes snapshots which are considered redundant because they fall outside of the range of maximum snapshots to * archive. * * @param type the type of the aggregate for which to prune snapshots * @param mostRecentSnapshotEvent the last appended snapshot event */ private void pruneSnapshots(String type, DomainEvent mostRecentSnapshotEvent) { Iterator<Long> redundantSnapshots = findRedundantSnapshots(type, mostRecentSnapshotEvent); if (redundantSnapshots.hasNext()) { Long sequenceOfFirstSnapshotToPrune = redundantSnapshots.next(); entityManager.createQuery("DELETE FROM SnapshotEventEntry e " + "WHERE e.type = :type " + "AND e.aggregateIdentifier = :aggregateIdentifier " + "AND e.sequenceNumber <= :sequenceOfFirstSnapshotToPrune") .setParameter("type", type) .setParameter("aggregateIdentifier", mostRecentSnapshotEvent.getAggregateIdentifier().asString()) .setParameter("sequenceOfFirstSnapshotToPrune", sequenceOfFirstSnapshotToPrune) .executeUpdate(); } } /** * Finds the first of redundant snapshots, returned as an iterator for convenience purposes. * * @param type the type of the aggregate for which to find redundant snapshots * @param snapshotEvent the last appended snapshot event * @return an iterator over the snapshots found */ @SuppressWarnings({"unchecked"}) private Iterator<Long> findRedundantSnapshots(String type, DomainEvent snapshotEvent) { return entityManager.createQuery( "SELECT e.sequenceNumber FROM SnapshotEventEntry e " + "WHERE e.type = :type AND e.aggregateIdentifier = :aggregateIdentifier " + "ORDER BY e.sequenceNumber DESC") .setParameter("type", type) .setParameter("aggregateIdentifier", snapshotEvent.getAggregateIdentifier().asString()) .setFirstResult(maxSnapshotsArchived) .setMaxResults(1) .getResultList().iterator(); } @Override public void visitEvents(EventVisitor visitor) { int first = 0; List<DomainEventEntry> batch; boolean shouldContinue = true; while (shouldContinue) { batch = fetchBatch(first); for (DomainEventEntry entry : batch) { visitor.doWithEvent(entry.getDomainEvent(eventSerializer)); } shouldContinue = (batch.size() >= batchSize); first += batchSize; } } @SuppressWarnings({"unchecked"}) private List<DomainEventEntry> fetchBatch(int startPosition) { List resultList = entityManager.createQuery( "SELECT e FROM DomainEventEntry e ORDER BY e.timeStamp ASC, e.sequenceNumber ASC") .setFirstResult(startPosition) .setMaxResults(batchSize) .getResultList(); entityManager.flush(); entityManager.clear(); return resultList; } /** * Sets the EntityManager for this EventStore to use. This EntityManager must be assigned to a persistence context * that contains the {@link DomainEventEntry} as one of the managed entity types. * * @param entityManager the EntityManager to use. */ @PersistenceContext public void setEntityManager(EntityManager entityManager) { this.entityManager = entityManager; } /** * Registers the data source that allows the EventStore to detect the database type and define the error codes that * represent concurrent access failures. * <p/> * Should not be used in combination with {@link #setPersistenceExceptionResolver(PersistenceExceptionResolver)}, * but rather as a shorthand alternative for most common database types. * * @param dataSource A data source providing access to the backing database * @throws SQLException If an error occurs while accessing the dataSource */ public void setDataSource(DataSource dataSource) throws SQLException { if (persistenceExceptionResolver == null) { persistenceExceptionResolver = new SQLErrorCodesResolver(dataSource); } } /** * Sets the persistenceExceptionResolver that will help detect concurrency exceptions from the backing database. * * @param persistenceExceptionResolver the persistenceExceptionResolver that will help detect concurrency * exceptions */ public void setPersistenceExceptionResolver(PersistenceExceptionResolver persistenceExceptionResolver) { this.persistenceExceptionResolver = persistenceExceptionResolver; } /** * Sets the number of events that should be read at each database access. When more than this number of events must * be read to rebuild an aggregate's state, the events are read in batches of this size. Defaults to 100. * <p/> * Tip: if you use a snapshotter, make sure to choose snapshot trigger and batch size such that a single batch will * generally retrieve all events required to rebuild an aggregate's state. * * @param batchSize the number of events to read on each database access. Default to 100. */ public void setBatchSize(int batchSize) { this.batchSize = batchSize; } /** * Sets the maximum number of snapshots to archive for an aggregate. The EventStore will keep at most this number * of snapshots per aggregate. * <p/> * Defaults to {@value #DEFAULT_MAX_SNAPSHOTS_ARCHIVED}. * * @param maxSnapshotsArchived The maximum number of snapshots to archive for an aggregate. A value less than 1 * disables pruning of snapshots. */ public void setMaxSnapshotsArchived(int maxSnapshotsArchived) { this.maxSnapshotsArchived = maxSnapshotsArchived; } private final class BatchingDomainEventStream implements DomainEventStream { private int currentBatchSize; private Iterator<DomainEvent> currentBatch; private DomainEvent next; private final AggregateIdentifier id; private final String typeId; private BatchingDomainEventStream(List<DomainEvent> firstBatch, AggregateIdentifier id, String typeId) { this.id = id; this.typeId = typeId; this.currentBatchSize = firstBatch.size(); this.currentBatch = firstBatch.iterator(); if (currentBatch.hasNext()) { next = currentBatch.next(); } } @Override public boolean hasNext() { return next != null; } @Override public DomainEvent next() { DomainEvent nextEvent = next; if (!currentBatch.hasNext() && currentBatchSize >= batchSize) { logger.debug("Fetching new batch for Aggregate [{}]", id.asString()); currentBatch = fetchBatch(typeId, id, next.getSequenceNumber() + 1).iterator(); } next = currentBatch.hasNext() ? currentBatch.next() : null; return nextEvent; } @Override public DomainEvent peek() { return next; } } }
Update issue #191 Fix in 1.2 development branch Visit events now uses the same optimization mechanism
core/src/main/java/org/axonframework/eventstore/jpa/JpaEventStore.java
Update issue #191 Fix in 1.2 development branch Visit events now uses the same optimization mechanism
<ide><path>ore/src/main/java/org/axonframework/eventstore/jpa/JpaEventStore.java <ide> @Override <ide> public void visitEvents(EventVisitor visitor) { <ide> int first = 0; <del> List<DomainEventEntry> batch; <add> List<byte[]> batch; <ide> boolean shouldContinue = true; <ide> while (shouldContinue) { <ide> batch = fetchBatch(first); <del> for (DomainEventEntry entry : batch) { <del> visitor.doWithEvent(entry.getDomainEvent(eventSerializer)); <add> for (byte[] entry : batch) { <add> visitor.doWithEvent(eventSerializer.deserialize(entry)); <ide> } <ide> shouldContinue = (batch.size() >= batchSize); <ide> first += batchSize; <ide> } <ide> <ide> @SuppressWarnings({"unchecked"}) <del> private List<DomainEventEntry> fetchBatch(int startPosition) { <del> List resultList = entityManager.createQuery( <del> "SELECT e FROM DomainEventEntry e ORDER BY e.timeStamp ASC, e.sequenceNumber ASC") <add> private List<byte[]> fetchBatch(int startPosition) { <add> return entityManager.createQuery( <add> "SELECT e.serializedEvent FROM DomainEventEntry e ORDER BY e.timeStamp ASC, e.sequenceNumber ASC") <ide> .setFirstResult(startPosition) <ide> .setMaxResults(batchSize) <ide> .getResultList(); <del> entityManager.flush(); <del> entityManager.clear(); <del> return resultList; <ide> } <ide> <ide> /**
Java
apache-2.0
9070bd634d5ef298b89181c566086d97a428bd89
0
atlasapi/atlas-deer,atlasapi/atlas-deer
package org.atlasapi.content.v2.model; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; /** This combines the two volatile update time fields with the rest of the fields * from {@link org.atlasapi.entity.Identified} to make the full interface. Most things want to use * this, unless they need the update times split off for reasons of key stability. */ @JsonIgnoreProperties(ignoreUnknown = true) public interface Identified extends WithUpdateTimes, IdentifiedWithoutUpdateTimes { }
atlas-cassandra/src/main/java/org/atlasapi/content/v2/model/Identified.java
package org.atlasapi.content.v2.model; /** This combines the two volatile update time fields with the rest of the fields * from {@link org.atlasapi.entity.Identified} to make the full interface. Most things want to use * this, unless they need the update times split off for reasons of key stability. */ public interface Identified extends WithUpdateTimes, IdentifiedWithoutUpdateTimes { }
ignore unknown properties for cassandra deserialization
atlas-cassandra/src/main/java/org/atlasapi/content/v2/model/Identified.java
ignore unknown properties for cassandra deserialization
<ide><path>tlas-cassandra/src/main/java/org/atlasapi/content/v2/model/Identified.java <ide> package org.atlasapi.content.v2.model; <add> <add>import com.fasterxml.jackson.annotation.JsonIgnoreProperties; <ide> <ide> /** This combines the two volatile update time fields with the rest of the fields <ide> * from {@link org.atlasapi.entity.Identified} to make the full interface. Most things want to use <ide> * this, unless they need the update times split off for reasons of key stability. <ide> */ <add>@JsonIgnoreProperties(ignoreUnknown = true) <ide> public interface Identified extends WithUpdateTimes, IdentifiedWithoutUpdateTimes { <ide> <ide> }
Java
apache-2.0
b4aecce5456368b13fd1df5a37ad69d2c40968ab
0
nssales/OG-Platform,jeorme/OG-Platform,ChinaQuants/OG-Platform,codeaudit/OG-Platform,jeorme/OG-Platform,codeaudit/OG-Platform,jerome79/OG-Platform,McLeodMoores/starling,DevStreet/FinanceAnalytics,jerome79/OG-Platform,DevStreet/FinanceAnalytics,codeaudit/OG-Platform,DevStreet/FinanceAnalytics,ChinaQuants/OG-Platform,jeorme/OG-Platform,McLeodMoores/starling,jeorme/OG-Platform,nssales/OG-Platform,jerome79/OG-Platform,nssales/OG-Platform,DevStreet/FinanceAnalytics,McLeodMoores/starling,codeaudit/OG-Platform,nssales/OG-Platform,ChinaQuants/OG-Platform,McLeodMoores/starling,jerome79/OG-Platform,ChinaQuants/OG-Platform
/** * Copyright (C) 2009 - 2010 by OpenGamma Inc. * * Please see distribution for license. */ package com.opengamma.livedata.server; import java.util.HashSet; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.Lifecycle; import com.opengamma.id.DomainSpecificIdentifier; import com.opengamma.livedata.LiveDataSpecificationImpl; import com.opengamma.util.ArgumentChecker; /** * Stores persistent subscriptions in persistent storage so they're not lost if * the server crashes. * <p> * If you modify the list of persistent subscriptions in persistent storage by * editing the persistent storage (DB/file/whatever) using external tools while * the server is down, these changes will be reflected on the server the next * time it starts. * <p> * This beans depends-on the Live Data Server, and any Spring configuration must reflect * this. See {@link http://jira.springframework.org/browse/SPR-2325}. * * @author pietari */ abstract public class AbstractPersistentSubscriptionManager implements Lifecycle { private static final Logger s_logger = LoggerFactory .getLogger(AbstractPersistentSubscriptionManager.class); public static final long DEFAULT_SAVE_PERIOD = 60000; private final AbstractLiveDataServer _server; private final Timer _timer; private final long _savePeriod; private final SaveTask _saveTask = new SaveTask(); private Set<PersistentSubscription> _previousSavedState = null; private Set<PersistentSubscription> _persistentSubscriptions = new HashSet<PersistentSubscription>(); private volatile boolean _isRunning = false; public AbstractPersistentSubscriptionManager(AbstractLiveDataServer server) { this(server, new Timer("PersistentSubscriptionManager Timer"), DEFAULT_SAVE_PERIOD); } public AbstractPersistentSubscriptionManager(AbstractLiveDataServer server, Timer timer, long savePeriod) { ArgumentChecker.checkNotNull(server, "Live Data Server"); ArgumentChecker.checkNotNull(timer, "Timer"); if (savePeriod <= 0) { throw new IllegalArgumentException("Please give positive save period"); } _server = server; _timer = timer; _savePeriod = savePeriod; } private class SaveTask extends TimerTask { @Override public void run() { try { save(); } catch (RuntimeException e) { s_logger.error("Saving persistent subscriptions to storage failed", e); } } } @Override public boolean isRunning() { return _isRunning; } @Override public void start() { refresh(); _timer.schedule(_saveTask, _savePeriod, _savePeriod); _isRunning = true; } @Override public void stop() { _saveTask.cancel(); _isRunning = false; } public synchronized void refresh() { s_logger.debug("Refreshing persistent subscriptions from storage"); clear(); readFromStorage(); readFromServer(); updateServer(); s_logger.info("Refreshed persistent subscriptions from storage. There are currently " + _persistentSubscriptions.size() + " persistent subscriptions."); } /** * Creates a persistent subscription on the server for any persistent * subscriptions which are not yet there. */ private void updateServer() { for (PersistentSubscription sub : _persistentSubscriptions) { LiveDataSpecificationImpl spec = new LiveDataSpecificationImpl( new DomainSpecificIdentifier(_server.getUniqueIdDomain(), sub.getId())); Subscription existingSub = _server.getSubscription(spec); if (existingSub == null || !existingSub.isPersistent()) { s_logger.info("Creating a persistent subscription on server for " + spec); try { _server.subscribe(spec, true); } catch (Exception e) { s_logger.error("Creating a persistent subscription failed for " + spec, e); } } } } public synchronized void save() { s_logger.debug("Dumping persistent subscriptions to storage"); clear(); readFromServer(); // Only save if changed if (_previousSavedState == null || !_previousSavedState.equals(_persistentSubscriptions)) { s_logger.info("A change to persistent subscriptions detected, saving " + _persistentSubscriptions.size() + " subscriptions to storage."); saveToStorage(_persistentSubscriptions); _previousSavedState = new HashSet<PersistentSubscription>(_persistentSubscriptions); } else { s_logger.debug("No changes to persistent subscriptions detected."); } s_logger.debug("Dumped persistent subscriptions to storage"); } public synchronized Set<String> getPersistentSubscriptions() { clear(); readFromServer(); HashSet<String> returnValue = new HashSet<String>(); for (PersistentSubscription ps : _persistentSubscriptions) { returnValue.add(ps.getId()); } return returnValue; } public synchronized void addPersistentSubscription(String securityUniqueId) { addPersistentSubscription(new PersistentSubscription(securityUniqueId)); updateServer(); } public synchronized boolean removePersistentSubscription( String securityUniqueId) { PersistentSubscription ps = new PersistentSubscription(securityUniqueId); boolean removed = _persistentSubscriptions.remove(ps); Subscription sub = _server.getSubscription(securityUniqueId); if (sub != null && sub.isPersistent()) { _server.changePersistent(sub, false); } return removed; } private void clear() { _persistentSubscriptions.clear(); } protected void addPersistentSubscription(PersistentSubscription sub) { _persistentSubscriptions.add(sub); } /** * Refreshes persistent subscriptions from the latest status on the server. */ private void readFromServer() { for (Subscription sub : _server.getSubscriptions()) { if (sub.isPersistent()) { addPersistentSubscription(new PersistentSubscription(sub .getSecurityUniqueId())); } } } /** * Reads entries from persistent storage (DB, flat file, ...) and calls * {@link addPersistentSubscription(PersistentSubscription sub)} for each one. */ protected abstract void readFromStorage(); /** * Saves entries to persistent storage (DB, flat file, ...) */ public abstract void saveToStorage(Set<PersistentSubscription> newState); }
src/com/opengamma/livedata/server/AbstractPersistentSubscriptionManager.java
/** * Copyright (C) 2009 - 2010 by OpenGamma Inc. * * Please see distribution for license. */ package com.opengamma.livedata.server; import java.util.HashSet; import java.util.Set; import java.util.Timer; import java.util.TimerTask; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.opengamma.id.DomainSpecificIdentifier; import com.opengamma.livedata.LiveDataSpecificationImpl; import com.opengamma.util.ArgumentChecker; /** * Stores persistent subscriptions in persistent storage so they're not lost if * the server crashes. * <p> * If you modify the list of persistent subscriptions in persistent storage by * editing the persistent storage (DB/file/whatever) using external tools while * the server is down, these changes will be reflected on the server the next * time it starts. * * @author pietari */ abstract public class AbstractPersistentSubscriptionManager { private static final Logger s_logger = LoggerFactory .getLogger(AbstractPersistentSubscriptionManager.class); public static final long DEFAULT_SAVE_PERIOD = 60000; private final AbstractLiveDataServer _server; private Set<PersistentSubscription> _previousSavedState = null; private Set<PersistentSubscription> _persistentSubscriptions = new HashSet<PersistentSubscription>(); private volatile boolean _initialised = false; public AbstractPersistentSubscriptionManager(AbstractLiveDataServer server) { this(server, new Timer("PersistentSubscriptionManager Timer"), DEFAULT_SAVE_PERIOD); } public AbstractPersistentSubscriptionManager(AbstractLiveDataServer server, Timer timer, long savePeriod) { ArgumentChecker.checkNotNull(server, "Live Data Server"); _server = server; timer.schedule(new SaveTask(), savePeriod, savePeriod); } private class SaveTask extends TimerTask { @Override public void run() { try { if (!_initialised) { refresh(); _initialised = true; } save(); } catch (RuntimeException e) { s_logger.error("Saving persistent subscriptions to storage failed", e); } } } public synchronized void refresh() { s_logger.debug("Refreshing persistent subscriptions from storage"); clear(); readFromStorage(); readFromServer(); updateServer(); s_logger.info("Refreshed persistent subscriptions from storage. There are currently " + _persistentSubscriptions.size() + " persistent subscriptions."); } /** * Creates a persistent subscription on the server for any persistent * subscriptions which are not yet there. */ private void updateServer() { for (PersistentSubscription sub : _persistentSubscriptions) { LiveDataSpecificationImpl spec = new LiveDataSpecificationImpl( new DomainSpecificIdentifier(_server.getUniqueIdDomain(), sub.getId())); Subscription existingSub = _server.getSubscription(spec); if (existingSub == null || !existingSub.isPersistent()) { s_logger.info("Creating a persistent subscription on server for " + spec); try { _server.subscribe(spec, true); } catch (Exception e) { s_logger.error("Creating a persistent subscription failed for " + spec, e); } } } } public synchronized void save() { s_logger.debug("Dumping persistent subscriptions to storage"); clear(); readFromServer(); // Only save if changed if (_previousSavedState == null || !_previousSavedState.equals(_persistentSubscriptions)) { s_logger.info("A change to persistent subscriptions detected, saving " + _persistentSubscriptions.size() + " subscriptions to storage."); saveToStorage(_persistentSubscriptions); _previousSavedState = new HashSet<PersistentSubscription>(_persistentSubscriptions); } else { s_logger.debug("No changes to persistent subscriptions detected."); } s_logger.debug("Dumped persistent subscriptions to storage"); } public synchronized Set<String> getPersistentSubscriptions() { clear(); readFromServer(); HashSet<String> returnValue = new HashSet<String>(); for (PersistentSubscription ps : _persistentSubscriptions) { returnValue.add(ps.getId()); } return returnValue; } public synchronized void addPersistentSubscription(String securityUniqueId) { addPersistentSubscription(new PersistentSubscription(securityUniqueId)); updateServer(); } public synchronized boolean removePersistentSubscription( String securityUniqueId) { PersistentSubscription ps = new PersistentSubscription(securityUniqueId); boolean removed = _persistentSubscriptions.remove(ps); Subscription sub = _server.getSubscription(securityUniqueId); if (sub != null && sub.isPersistent()) { _server.changePersistent(sub, false); } return removed; } private void clear() { _persistentSubscriptions.clear(); } protected void addPersistentSubscription(PersistentSubscription sub) { _persistentSubscriptions.add(sub); } /** * Refreshes persistent subscriptions from the latest status on the server. */ private void readFromServer() { for (Subscription sub : _server.getSubscriptions()) { if (sub.isPersistent()) { addPersistentSubscription(new PersistentSubscription(sub .getSecurityUniqueId())); } } } /** * Reads entries from persistent storage (DB, flat file, ...) and calls * {@link addPersistentSubscription(PersistentSubscription sub)} for each one. */ protected abstract void readFromStorage(); /** * Saves entries to persistent storage (DB, flat file, ...) */ public abstract void saveToStorage(Set<PersistentSubscription> newState); }
Proper dependencies
src/com/opengamma/livedata/server/AbstractPersistentSubscriptionManager.java
Proper dependencies
<ide><path>rc/com/opengamma/livedata/server/AbstractPersistentSubscriptionManager.java <ide> <ide> import org.slf4j.Logger; <ide> import org.slf4j.LoggerFactory; <add>import org.springframework.context.Lifecycle; <ide> <ide> import com.opengamma.id.DomainSpecificIdentifier; <ide> import com.opengamma.livedata.LiveDataSpecificationImpl; <ide> * editing the persistent storage (DB/file/whatever) using external tools while <ide> * the server is down, these changes will be reflected on the server the next <ide> * time it starts. <add> * <p> <add> * This beans depends-on the Live Data Server, and any Spring configuration must reflect <add> * this. See {@link http://jira.springframework.org/browse/SPR-2325}. <ide> * <ide> * @author pietari <ide> */ <del>abstract public class AbstractPersistentSubscriptionManager { <add>abstract public class AbstractPersistentSubscriptionManager implements Lifecycle { <ide> <ide> private static final Logger s_logger = LoggerFactory <ide> .getLogger(AbstractPersistentSubscriptionManager.class); <ide> public static final long DEFAULT_SAVE_PERIOD = 60000; <ide> <ide> private final AbstractLiveDataServer _server; <add> private final Timer _timer; <add> private final long _savePeriod; <add> private final SaveTask _saveTask = new SaveTask(); <ide> <ide> private Set<PersistentSubscription> _previousSavedState = null; <ide> private Set<PersistentSubscription> _persistentSubscriptions = new HashSet<PersistentSubscription>(); <del> private volatile boolean _initialised = false; <add> private volatile boolean _isRunning = false; <ide> <ide> public AbstractPersistentSubscriptionManager(AbstractLiveDataServer server) { <ide> this(server, new Timer("PersistentSubscriptionManager Timer"), <ide> public AbstractPersistentSubscriptionManager(AbstractLiveDataServer server, <ide> Timer timer, long savePeriod) { <ide> ArgumentChecker.checkNotNull(server, "Live Data Server"); <add> ArgumentChecker.checkNotNull(timer, "Timer"); <add> if (savePeriod <= 0) { <add> throw new IllegalArgumentException("Please give positive save period"); <add> } <add> <ide> _server = server; <del> timer.schedule(new SaveTask(), savePeriod, savePeriod); <add> _timer = timer; <add> _savePeriod = savePeriod; <ide> } <ide> <ide> private class SaveTask extends TimerTask { <ide> @Override <ide> public void run() { <ide> try { <del> if (!_initialised) { <del> refresh(); <del> _initialised = true; <del> } <ide> save(); <ide> } catch (RuntimeException e) { <ide> s_logger.error("Saving persistent subscriptions to storage failed", e); <ide> } <ide> } <add> } <add> <add> <add> @Override <add> public boolean isRunning() { <add> return _isRunning; <add> } <add> <add> @Override <add> public void start() { <add> refresh(); <add> _timer.schedule(_saveTask, _savePeriod, _savePeriod); <add> _isRunning = true; <add> } <add> <add> @Override <add> public void stop() { <add> _saveTask.cancel(); <add> _isRunning = false; <ide> } <ide> <ide> public synchronized void refresh() {
Java
bsd-2-clause
08b8537e1a9e45272ed2ebd34025b1ddf8ec803a
0
alpha-asp/Alpha,AntoniusW/Alpha,alpha-asp/Alpha
/** * Copyright (c) 2016-2019, the Alpha Team. * All rights reserved. * * Additional changes made by Siemens. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1) Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package at.ac.tuwien.kr.alpha.solver; import at.ac.tuwien.kr.alpha.common.*; import at.ac.tuwien.kr.alpha.common.atoms.Atom; import at.ac.tuwien.kr.alpha.common.atoms.BasicAtom; import at.ac.tuwien.kr.alpha.common.atoms.ComparisonAtom; import at.ac.tuwien.kr.alpha.common.atoms.Literal; import at.ac.tuwien.kr.alpha.common.terms.ConstantTerm; import at.ac.tuwien.kr.alpha.grounder.Grounder; import at.ac.tuwien.kr.alpha.grounder.NonGroundRule; import at.ac.tuwien.kr.alpha.grounder.ProgramAnalyzingGrounder; import at.ac.tuwien.kr.alpha.grounder.Substitution; import at.ac.tuwien.kr.alpha.grounder.atoms.RuleAtom; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristic; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristicFactory; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristicFactory.Heuristic; import at.ac.tuwien.kr.alpha.solver.heuristics.ChainedBranchingHeuristics; import at.ac.tuwien.kr.alpha.solver.heuristics.NaiveHeuristic; import at.ac.tuwien.kr.alpha.solver.learning.GroundConflictNoGoodLearner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.function.Consumer; import static at.ac.tuwien.kr.alpha.Util.oops; import static at.ac.tuwien.kr.alpha.common.Literals.*; import static at.ac.tuwien.kr.alpha.solver.ThriceTruth.MBT; import static at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristic.DEFAULT_CHOICE_LITERAL; import static at.ac.tuwien.kr.alpha.solver.learning.GroundConflictNoGoodLearner.ConflictAnalysisResult.UNSAT; /** * The new default solver employed in Alpha. * Copyright (c) 2016-2019, the Alpha Team. */ public class DefaultSolver extends AbstractSolver implements SolverMaintainingStatistics { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultSolver.class); private final NoGoodStore store; private final ChoiceManager choiceManager; private final WritableAssignment assignment; private final GroundConflictNoGoodLearner learner; private final BranchingHeuristic branchingHeuristic; private boolean initialize = true; private int mbtAtFixpoint; private int conflictsAfterClosing; private final boolean disableJustifications; private boolean disableJustificationAfterClosing = true; // Keep disabled for now, case not fully worked out yet. private final PerformanceLog performanceLog; public DefaultSolver(AtomStore atomStore, Grounder grounder, NoGoodStore store, WritableAssignment assignment, Random random, Heuristic branchingHeuristic, boolean debugInternalChecks, boolean disableJustifications) { super(atomStore, grounder); this.assignment = assignment; this.store = store; this.choiceManager = new ChoiceManager(assignment, store, debugInternalChecks); this.learner = new GroundConflictNoGoodLearner(assignment); this.branchingHeuristic = ChainedBranchingHeuristics.chainOf( BranchingHeuristicFactory.getInstance(branchingHeuristic, grounder, assignment, choiceManager, random), new NaiveHeuristic(choiceManager)); this.disableJustifications = disableJustifications; this.performanceLog = new PerformanceLog(choiceManager, (TrailAssignment) assignment, 1000); } @Override protected boolean tryAdvance(Consumer<? super AnswerSet> action) { boolean didChange = false; // Initially, get NoGoods from grounder. if (initialize) { performanceLog.initialize(); Map<Integer, NoGood> obtained = grounder.getNoGoods(assignment); didChange = !obtained.isEmpty(); if (!ingest(obtained)) { logStats(); return false; } initialize = false; } else if (assignment.getDecisionLevel() == 0) { logStats(); return false; } else { // We already found one Answer-Set and are requested to find another one. // Create enumeration NoGood to avoid finding the same Answer-Set twice. final NoGood enumerationNoGood = choiceManager.computeEnumeration(); final int backjumpLevel = assignment.minimumConflictLevel(enumerationNoGood); if (backjumpLevel == -1) { throw oops("Enumeration nogood is not violated"); } if (backjumpLevel == 0) { // Search space exhausted (only happens if first choice is for TRUE at decision level 1 for an atom that was MBT at decision level 0 already). return false; } // Backjump instead of backtrackSlow, enumerationNoGood will invert last choice. choiceManager.backjump(backjumpLevel - 1); LOGGER.debug("Adding enumeration nogood: {}", enumerationNoGood); if (!addAndBackjumpIfNecessary(grounder.register(enumerationNoGood), enumerationNoGood)) { return false; } } boolean afterAllAtomsAssigned = false; // Try all assignments until grounder reports no more NoGoods and all of them are satisfied while (true) { performanceLog.infoIfTimeForOutput(LOGGER); ConflictCause conflictCause = store.propagate(); didChange |= store.didPropagate(); LOGGER.trace("Assignment after propagation is: {}", assignment); if (conflictCause != null) { // Learn from conflict. NoGood violatedNoGood = conflictCause.getViolatedNoGood(); LOGGER.debug("Violating assignment is: {}", assignment); branchingHeuristic.violatedNoGood(violatedNoGood); if (!afterAllAtomsAssigned) { if (!learnBackjumpAddFromConflict(conflictCause)) { logStats(); return false; } } else { LOGGER.debug("Assignment is violated after all unassigned atoms have been assigned false."); conflictsAfterClosing++; if (!treatConflictAfterClosing(violatedNoGood)) { return false; } afterAllAtomsAssigned = false; } } else if (didChange) { // Ask the grounder for new NoGoods, then propagate (again). LOGGER.trace("Doing propagation step."); grounder.updateAssignment(assignment.getNewPositiveAssignmentsIterator()); Map<Integer, NoGood> obtained = grounder.getNoGoods(assignment); didChange = !obtained.isEmpty(); if (!ingest(obtained)) { logStats(); return false; } } else if (choose()) { LOGGER.debug("Did choice."); didChange = true; } else if (close()) { LOGGER.debug("Closed unassigned known atoms (assigning FALSE)."); afterAllAtomsAssigned = true; } else if (assignment.getMBTCount() == 0) { // NOTE: If we would do optimization, we would now have a guaranteed upper bound. AnswerSet as = translate(assignment.getTrueAssignments()); LOGGER.debug("Answer-Set found: {}", as); action.accept(as); logStats(); return true; } else { LOGGER.debug("Backtracking from wrong choices ({} MBTs).", assignment.getMBTCount()); if (!justifyMbtAndBacktrack()) { return false; } afterAllAtomsAssigned = false; } } } /** * Adds a noGood to the store and in case of out-of-order literals causing another conflict, triggers further backjumping. * @param noGoodId * @param noGood */ private boolean addAndBackjumpIfNecessary(int noGoodId, NoGood noGood) { while (store.add(noGoodId, noGood) != null) { LOGGER.debug("Adding noGood (again) caused conflict, computing real backjumping level now."); int backjumpLevel = learner.computeConflictFreeBackjumpingLevel(noGood); if (backjumpLevel < 0) { return false; } choiceManager.backjump(backjumpLevel); if (store.propagate() != null) { throw oops("Violated NoGood after backtracking."); } } return true; } /** * Analyzes the conflict and either learns a new NoGood (causing backjumping and addition to the NoGood store), * or backtracks the choice causing the conflict. * @return false iff the analysis result shows that the set of NoGoods is unsatisfiable. */ private boolean learnBackjumpAddFromConflict(ConflictCause conflictCause) { GroundConflictNoGoodLearner.ConflictAnalysisResult analysisResult = learner.analyzeConflictingNoGood(conflictCause.getViolatedNoGood()); LOGGER.debug("Analysis result: {}", analysisResult); if (analysisResult == UNSAT) { // Halt if unsatisfiable. return false; } branchingHeuristic.analyzedConflict(analysisResult); if (analysisResult.learnedNoGood == null && analysisResult.clearLastChoiceAfterBackjump) { // TODO: Temporarily abort resolution with backtrackFast instead of learning a too large nogood. return backtrack(); } if (analysisResult.learnedNoGood != null) { choiceManager.backjump(analysisResult.backjumpLevel); final NoGood learnedNoGood = analysisResult.learnedNoGood; int noGoodId = grounder.register(learnedNoGood); if (!addAndBackjumpIfNecessary(noGoodId, learnedNoGood)) { return false; } return true; } choiceManager.backjump(analysisResult.backjumpLevel); choiceManager.backtrackFast(); if (store.propagate() != null) { throw oops("Violated NoGood after backtracking."); } if (!store.didPropagate()) { throw oops("Nothing to propagate after backtracking from conflict-causing choice"); } return true; } private boolean justifyMbtAndBacktrack() { mbtAtFixpoint++; // Run justification only if enabled and possible. if (disableJustifications || !(grounder instanceof ProgramAnalyzingGrounder)) { if (!backtrack()) { logStats(); return false; } return true; } ProgramAnalyzingGrounder analyzingGrounder = (ProgramAnalyzingGrounder) grounder; // Justify one MBT assigned atom. Integer atomToJustify = assignment.getBasicAtomAssignedMBT(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching for justification of {} / {}", atomToJustify, atomStore.atomToString(atomToJustify)); LOGGER.debug("Assignment is (TRUE part only): {}", translate(assignment.getTrueAssignments())); } Set<Literal> reasonsForUnjustified = analyzingGrounder.justifyAtom(atomToJustify, assignment); NoGood noGood = noGoodFromJustificationReasons(atomToJustify, reasonsForUnjustified); int noGoodID = grounder.register(noGood); Map<Integer, NoGood> obtained = new LinkedHashMap<>(); obtained.put(noGoodID, noGood); LOGGER.debug("Learned NoGood is: {}", atomStore.noGoodToString(noGood)); // Add NoGood and trigger backjumping. if (!ingest(obtained)) { logStats(); return false; } return true; } private NoGood noGoodFromJustificationReasons(int atomToJustify, Set<Literal> reasonsForUnjustified) { // Turn the justification into a NoGood. int[] reasons = new int[reasonsForUnjustified.size() + 1]; reasons[0] = atomToLiteral(atomToJustify); int arrpos = 1; for (Literal literal : reasonsForUnjustified) { reasons[arrpos++] = atomToLiteral(atomStore.get(literal.getAtom()), !literal.isNegated()); } return new NoGood(reasons); } private boolean treatConflictAfterClosing(NoGood violatedNoGood) { if (disableJustificationAfterClosing || disableJustifications || !(grounder instanceof ProgramAnalyzingGrounder)) { // Will not learn from violated NoGood, do simple backtrack. LOGGER.debug("NoGood was violated after all unassigned atoms were assigned to false; will not learn from it; skipping."); if (!backtrack()) { logStats(); return false; } return true; } ProgramAnalyzingGrounder analyzingGrounder = (ProgramAnalyzingGrounder) grounder; LOGGER.debug("Justifying atoms in violated nogood."); LinkedHashSet<Integer> toJustify = new LinkedHashSet<>(); // Find those literals in violatedNoGood that were just assigned false. for (Integer literal : violatedNoGood) { if (assignment.getImpliedBy(atomOf(literal)) == TrailAssignment.CLOSING_INDICATOR_NOGOOD) { toJustify.add(literal); } } // Since the violatedNoGood may contain atoms other than BasicAtom, these have to be treated. Map<Integer, NoGood> obtained = new LinkedHashMap<>(); Iterator<Integer> toJustifyIterator = toJustify.iterator(); ArrayList<Integer> ruleAtomReplacements = new ArrayList<>(); while (toJustifyIterator.hasNext()) { Integer literal = toJustifyIterator.next(); Atom atom = atomStore.get(atomOf(literal)); if (atom instanceof BasicAtom) { continue; } if (!(atom instanceof RuleAtom)) { // Ignore atoms other than RuleAtom. toJustifyIterator.remove(); continue; } // For RuleAtoms in toJustify the corresponding ground body contains BasicAtoms that have been assigned FALSE in the closing. // First, translate RuleAtom back to NonGroundRule + Substitution. String ruleId = (String) ((ConstantTerm<?>)atom.getTerms().get(0)).getObject(); NonGroundRule nonGroundRule = analyzingGrounder.getNonGroundRule(Integer.parseInt(ruleId)); String substitution = (String) ((ConstantTerm<?>)atom.getTerms().get(1)).getObject(); Substitution groundingSubstitution = Substitution.fromString(substitution); Rule rule = nonGroundRule.getRule(); // Find ground literals in the body that have been assigned false and justify those. for (Literal bodyLiteral : rule.getBody()) { Atom groundAtom = bodyLiteral.getAtom().substitute(groundingSubstitution); if (groundAtom instanceof ComparisonAtom || analyzingGrounder.isFact(groundAtom)) { // Facts and ComparisonAtoms are always true, no justification needed. continue; } int groundAtomId = atomStore.get(groundAtom); Assignment.Entry entry = assignment.get(groundAtomId); // Check if atom was assigned to FALSE during the closing. if (entry.getImpliedBy() == TrailAssignment.CLOSING_INDICATOR_NOGOOD) { ruleAtomReplacements.add(atomToNegatedLiteral(groundAtomId)); } } toJustifyIterator.remove(); } toJustify.addAll(ruleAtomReplacements); for (Integer literalToJustify : toJustify) { LOGGER.debug("Searching for justification(s) of {} / {}", toJustify, atomStore.atomToString(atomOf(literalToJustify))); Set<Literal> reasonsForUnjustified = analyzingGrounder.justifyAtom(atomOf(literalToJustify), assignment); NoGood noGood = noGoodFromJustificationReasons(atomOf(literalToJustify), reasonsForUnjustified); int noGoodID = grounder.register(noGood); obtained.put(noGoodID, noGood); LOGGER.debug("Learned NoGood is: {}", atomStore.noGoodToString(noGood)); } // Backtrack to remove the violation. if (!backtrack()) { logStats(); return false; } // Add newly obtained noGoods. if (!ingest(obtained)) { logStats(); return false; } return true; } private boolean close() { return assignment.closeUnassignedAtoms(); } /** * Iterative implementation of recursive backtracking. * * @return {@code true} iff it is possible to backtrack even further, {@code false} otherwise */ private boolean backtrack() { while (assignment.getDecisionLevel() != 0) { final Assignment.Entry choice = choiceManager.backtrackSlow(); store.propagate(); if (choice == null) { LOGGER.debug("Backtracking further, because last choice was already backtracked."); continue; } final int lastChoice = choice.getAtom(); final boolean choiceValue = choice.getTruth().toBoolean(); // Chronological backtracking: choose inverse now. // Choose FALSE if the previous choice was for TRUE and the atom was not already MBT at that time. ThriceTruth lastChoiceTruth = assignment.getTruth(lastChoice); if (choiceValue && MBT.equals(lastChoiceTruth)) { LOGGER.debug("Backtracking further, because last choice was MBT before choosing TRUE."); continue; } // If choice was assigned at lower decision level (due to added NoGoods), no inverted choice should be done. if (choice.getImpliedBy() != null) { LOGGER.debug("Last choice is now implied by {}", choice.getImpliedBy()); //if (choice.getDecisionLevel() == assignment.getDecisionLevel() + 1) { // throw oops("Choice was assigned but not at a lower decision level"); //} LOGGER.debug("Backtracking further, because last choice was assigned at a lower decision level."); continue; } // Choose inverse if it is not yet already assigned TRUE or FALSE. if (lastChoiceTruth == null || (lastChoiceTruth.isMBT() && !choiceValue)) { LOGGER.debug("Choosing inverse."); choiceManager.choose(new Choice(lastChoice, !choiceValue, true)); break; } // Continue backtracking. } return assignment.getDecisionLevel() != 0; } private boolean ingest(Map<Integer, NoGood> obtained) { branchingHeuristic.newNoGoods(obtained.values()); assignment.growForMaxAtomId(); store.growForMaxAtomId(atomStore.getMaxAtomId()); LinkedList<Map.Entry<Integer, NoGood>> noGoodsToAdd = new LinkedList<>(obtained.entrySet()); Map.Entry<Integer, NoGood> entry; while ((entry = noGoodsToAdd.poll()) != null) { if (NoGood.UNSAT.equals(entry.getValue())) { // Empty NoGood cannot be satisfied, program is unsatisfiable. return false; } final ConflictCause conflictCause = store.add(entry.getKey(), entry.getValue()); if (conflictCause == null) { // There is no conflict, all is fine. Just skip conflict treatment and carry on. continue; } final NoGood learnedNoGood = fixContradiction(entry, conflictCause); if (learnedNoGood != null) { noGoodsToAdd.addFirst(new AbstractMap.SimpleEntry<>(grounder.register(learnedNoGood), learnedNoGood)); } } return true; } private NoGood fixContradiction(Map.Entry<Integer, NoGood> noGoodEntry, ConflictCause conflictCause) { LOGGER.debug("Attempting to fix violation of {} caused by {}", noGoodEntry.getValue(), conflictCause); if (conflictCause.getViolatedChoice() != null) { choiceManager.backjump(conflictCause.getViolatedChoice().getDecisionLevel()); choiceManager.backtrackFast(); return null; } GroundConflictNoGoodLearner.ConflictAnalysisResult conflictAnalysisResult = learner.analyzeConflictingNoGood(conflictCause.getViolatedNoGood()); if (conflictAnalysisResult == UNSAT) { return NoGood.UNSAT; } branchingHeuristic.analyzedConflict(conflictAnalysisResult); choiceManager.backjump(conflictAnalysisResult.backjumpLevel); if (conflictAnalysisResult.clearLastChoiceAfterBackjump) { choiceManager.backtrackFast(); } // If NoGood was learned, add it to the store. // Note that the learned NoGood may cause further conflicts, since propagation on lower decision levels is lazy, // hence backtracking once might not be enough to remove the real conflict cause. if (!addAndBackjumpIfNecessary(noGoodEntry.getKey(), noGoodEntry.getValue())) { return NoGood.UNSAT; } return conflictAnalysisResult.learnedNoGood; } private boolean choose() { choiceManager.addChoiceInformation(grounder.getChoiceAtoms()); choiceManager.updateAssignments(); // Hint: for custom heuristics, evaluate them here and pick a value if the heuristics suggests one. int literal; if ((literal = branchingHeuristic.chooseLiteral()) == DEFAULT_CHOICE_LITERAL) { LOGGER.debug("No choices!"); return false; } else if (LOGGER.isDebugEnabled()) { LOGGER.debug("Branching heuristic chose literal {}", atomStore.literalToString(literal)); } choiceManager.choose(new Choice(literal, false)); return true; } @Override public int getNumberOfChoices() { return choiceManager.getChoices(); } @Override public int getNumberOfBacktracks() { return choiceManager.getBacktracks(); } @Override public int getNumberOfBacktracksWithinBackjumps() { return choiceManager.getBacktracksWithinBackjumps(); } @Override public int getNumberOfBackjumps() { return choiceManager.getBackjumps(); } @Override public int getNumberOfBacktracksDueToRemnantMBTs() { return mbtAtFixpoint; } @Override public int getNumberOfConflictsAfterClosing() { return conflictsAfterClosing; } private void logStats() { LOGGER.debug(getStatisticsString()); } }
src/main/java/at/ac/tuwien/kr/alpha/solver/DefaultSolver.java
/** * Copyright (c) 2016-2019, the Alpha Team. * All rights reserved. * * Additional changes made by Siemens. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1) Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2) Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package at.ac.tuwien.kr.alpha.solver; import at.ac.tuwien.kr.alpha.common.*; import at.ac.tuwien.kr.alpha.common.atoms.Atom; import at.ac.tuwien.kr.alpha.common.atoms.BasicAtom; import at.ac.tuwien.kr.alpha.common.atoms.ComparisonAtom; import at.ac.tuwien.kr.alpha.common.atoms.Literal; import at.ac.tuwien.kr.alpha.common.terms.ConstantTerm; import at.ac.tuwien.kr.alpha.grounder.Grounder; import at.ac.tuwien.kr.alpha.grounder.NonGroundRule; import at.ac.tuwien.kr.alpha.grounder.ProgramAnalyzingGrounder; import at.ac.tuwien.kr.alpha.grounder.Substitution; import at.ac.tuwien.kr.alpha.grounder.atoms.RuleAtom; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristic; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristicFactory; import at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristicFactory.Heuristic; import at.ac.tuwien.kr.alpha.solver.heuristics.ChainedBranchingHeuristics; import at.ac.tuwien.kr.alpha.solver.heuristics.NaiveHeuristic; import at.ac.tuwien.kr.alpha.solver.learning.GroundConflictNoGoodLearner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.*; import java.util.function.Consumer; import static at.ac.tuwien.kr.alpha.Util.oops; import static at.ac.tuwien.kr.alpha.common.Literals.*; import static at.ac.tuwien.kr.alpha.solver.ThriceTruth.MBT; import static at.ac.tuwien.kr.alpha.solver.heuristics.BranchingHeuristic.DEFAULT_CHOICE_LITERAL; import static at.ac.tuwien.kr.alpha.solver.learning.GroundConflictNoGoodLearner.ConflictAnalysisResult.UNSAT; /** * The new default solver employed in Alpha. * Copyright (c) 2016-2019, the Alpha Team. */ public class DefaultSolver extends AbstractSolver implements SolverMaintainingStatistics { private static final Logger LOGGER = LoggerFactory.getLogger(DefaultSolver.class); private final NoGoodStore store; private final ChoiceManager choiceManager; private final WritableAssignment assignment; private final GroundConflictNoGoodLearner learner; private final BranchingHeuristic branchingHeuristic; private boolean initialize = true; private int mbtAtFixpoint; private int conflictsAfterClosing; private final boolean disableJustifications; private boolean disableJustificationAfterClosing = true; // Keep disabled for now, case not fully worked out yet. private final PerformanceLog performanceLog; public DefaultSolver(AtomStore atomStore, Grounder grounder, NoGoodStore store, WritableAssignment assignment, Random random, Heuristic branchingHeuristic, boolean debugInternalChecks, boolean disableJustifications) { super(atomStore, grounder); this.assignment = assignment; this.store = store; this.choiceManager = new ChoiceManager(assignment, store, debugInternalChecks); this.learner = new GroundConflictNoGoodLearner(assignment); this.branchingHeuristic = ChainedBranchingHeuristics.chainOf( BranchingHeuristicFactory.getInstance(branchingHeuristic, grounder, assignment, choiceManager, random), new NaiveHeuristic(choiceManager)); this.disableJustifications = disableJustifications; this.performanceLog = new PerformanceLog(choiceManager, (TrailAssignment) assignment, 1000l); } @Override protected boolean tryAdvance(Consumer<? super AnswerSet> action) { boolean didChange = false; // Initially, get NoGoods from grounder. if (initialize) { performanceLog.initialize(); Map<Integer, NoGood> obtained = grounder.getNoGoods(assignment); didChange = !obtained.isEmpty(); if (!ingest(obtained)) { logStats(); return false; } initialize = false; } else if (assignment.getDecisionLevel() == 0) { logStats(); return false; } else { // We already found one Answer-Set and are requested to find another one. // Create enumeration NoGood to avoid finding the same Answer-Set twice. final NoGood enumerationNoGood = choiceManager.computeEnumeration(); final int backjumpLevel = assignment.minimumConflictLevel(enumerationNoGood); if (backjumpLevel == -1) { throw oops("Enumeration nogood is not violated"); } if (backjumpLevel == 0) { // Search space exhausted (only happens if first choice is for TRUE at decision level 1 for an atom that was MBT at decision level 0 already). return false; } // Backjump instead of backtrackSlow, enumerationNoGood will invert last choice. choiceManager.backjump(backjumpLevel - 1); LOGGER.debug("Adding enumeration nogood: {}", enumerationNoGood); if (!addAndBackjumpIfNecessary(grounder.register(enumerationNoGood), enumerationNoGood)) { return false; } } boolean afterAllAtomsAssigned = false; // Try all assignments until grounder reports no more NoGoods and all of them are satisfied while (true) { performanceLog.infoIfTimeForOutput(LOGGER); ConflictCause conflictCause = store.propagate(); didChange |= store.didPropagate(); LOGGER.trace("Assignment after propagation is: {}", assignment); if (conflictCause != null) { // Learn from conflict. NoGood violatedNoGood = conflictCause.getViolatedNoGood(); LOGGER.debug("Violating assignment is: {}", assignment); branchingHeuristic.violatedNoGood(violatedNoGood); if (!afterAllAtomsAssigned) { if (!learnBackjumpAddFromConflict(conflictCause)) { logStats(); return false; } } else { LOGGER.debug("Assignment is violated after all unassigned atoms have been assigned false."); conflictsAfterClosing++; if (!treatConflictAfterClosing(violatedNoGood)) { return false; } afterAllAtomsAssigned = false; } } else if (didChange) { // Ask the grounder for new NoGoods, then propagate (again). LOGGER.trace("Doing propagation step."); grounder.updateAssignment(assignment.getNewPositiveAssignmentsIterator()); Map<Integer, NoGood> obtained = grounder.getNoGoods(assignment); didChange = !obtained.isEmpty(); if (!ingest(obtained)) { logStats(); return false; } } else if (choose()) { LOGGER.debug("Did choice."); didChange = true; } else if (close()) { LOGGER.debug("Closed unassigned known atoms (assigning FALSE)."); afterAllAtomsAssigned = true; } else if (assignment.getMBTCount() == 0) { // NOTE: If we would do optimization, we would now have a guaranteed upper bound. AnswerSet as = translate(assignment.getTrueAssignments()); LOGGER.debug("Answer-Set found: {}", as); action.accept(as); logStats(); return true; } else { LOGGER.debug("Backtracking from wrong choices ({} MBTs).", assignment.getMBTCount()); if (!justifyMbtAndBacktrack()) { return false; } afterAllAtomsAssigned = false; } } } /** * Adds a noGood to the store and in case of out-of-order literals causing another conflict, triggers further backjumping. * @param noGoodId * @param noGood */ private boolean addAndBackjumpIfNecessary(int noGoodId, NoGood noGood) { while (store.add(noGoodId, noGood) != null) { LOGGER.debug("Adding noGood (again) caused conflict, computing real backjumping level now."); int backjumpLevel = learner.computeConflictFreeBackjumpingLevel(noGood); if (backjumpLevel < 0) { return false; } choiceManager.backjump(backjumpLevel); if (store.propagate() != null) { throw oops("Violated NoGood after backtracking."); } } return true; } /** * Analyzes the conflict and either learns a new NoGood (causing backjumping and addition to the NoGood store), * or backtracks the choice causing the conflict. * @return false iff the analysis result shows that the set of NoGoods is unsatisfiable. */ private boolean learnBackjumpAddFromConflict(ConflictCause conflictCause) { GroundConflictNoGoodLearner.ConflictAnalysisResult analysisResult = learner.analyzeConflictingNoGood(conflictCause.getViolatedNoGood()); LOGGER.debug("Analysis result: {}", analysisResult); if (analysisResult == UNSAT) { // Halt if unsatisfiable. return false; } branchingHeuristic.analyzedConflict(analysisResult); if (analysisResult.learnedNoGood == null && analysisResult.clearLastChoiceAfterBackjump) { // TODO: Temporarily abort resolution with backtrackFast instead of learning a too large nogood. return backtrack(); } if (analysisResult.learnedNoGood != null) { choiceManager.backjump(analysisResult.backjumpLevel); final NoGood learnedNoGood = analysisResult.learnedNoGood; int noGoodId = grounder.register(learnedNoGood); if (!addAndBackjumpIfNecessary(noGoodId, learnedNoGood)) { return false; } return true; } choiceManager.backjump(analysisResult.backjumpLevel); choiceManager.backtrackFast(); if (store.propagate() != null) { throw oops("Violated NoGood after backtracking."); } if (!store.didPropagate()) { throw oops("Nothing to propagate after backtracking from conflict-causing choice"); } return true; } private boolean justifyMbtAndBacktrack() { mbtAtFixpoint++; // Run justification only if enabled and possible. if (disableJustifications || !(grounder instanceof ProgramAnalyzingGrounder)) { if (!backtrack()) { logStats(); return false; } return true; } ProgramAnalyzingGrounder analyzingGrounder = (ProgramAnalyzingGrounder) grounder; // Justify one MBT assigned atom. Integer atomToJustify = assignment.getBasicAtomAssignedMBT(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("Searching for justification of {} / {}", atomToJustify, atomStore.atomToString(atomToJustify)); LOGGER.debug("Assignment is (TRUE part only): {}", translate(assignment.getTrueAssignments())); } Set<Literal> reasonsForUnjustified = analyzingGrounder.justifyAtom(atomToJustify, assignment); NoGood noGood = noGoodFromJustificationReasons(atomToJustify, reasonsForUnjustified); int noGoodID = grounder.register(noGood); Map<Integer, NoGood> obtained = new LinkedHashMap<>(); obtained.put(noGoodID, noGood); LOGGER.debug("Learned NoGood is: {}", atomStore.noGoodToString(noGood)); // Add NoGood and trigger backjumping. if (!ingest(obtained)) { logStats(); return false; } return true; } private NoGood noGoodFromJustificationReasons(int atomToJustify, Set<Literal> reasonsForUnjustified) { // Turn the justification into a NoGood. int[] reasons = new int[reasonsForUnjustified.size() + 1]; reasons[0] = atomToLiteral(atomToJustify); int arrpos = 1; for (Literal literal : reasonsForUnjustified) { reasons[arrpos++] = atomToLiteral(atomStore.get(literal.getAtom()), !literal.isNegated()); } return new NoGood(reasons); } private boolean treatConflictAfterClosing(NoGood violatedNoGood) { if (disableJustificationAfterClosing || disableJustifications || !(grounder instanceof ProgramAnalyzingGrounder)) { // Will not learn from violated NoGood, do simple backtrack. LOGGER.debug("NoGood was violated after all unassigned atoms were assigned to false; will not learn from it; skipping."); if (!backtrack()) { logStats(); return false; } return true; } ProgramAnalyzingGrounder analyzingGrounder = (ProgramAnalyzingGrounder) grounder; LOGGER.debug("Justifying atoms in violated nogood."); LinkedHashSet<Integer> toJustify = new LinkedHashSet<>(); // Find those literals in violatedNoGood that were just assigned false. for (Integer literal : violatedNoGood) { if (assignment.getImpliedBy(atomOf(literal)) == TrailAssignment.CLOSING_INDICATOR_NOGOOD) { toJustify.add(literal); } } // Since the violatedNoGood may contain atoms other than BasicAtom, these have to be treated. Map<Integer, NoGood> obtained = new LinkedHashMap<>(); Iterator<Integer> toJustifyIterator = toJustify.iterator(); ArrayList<Integer> ruleAtomReplacements = new ArrayList<>(); while (toJustifyIterator.hasNext()) { Integer literal = toJustifyIterator.next(); Atom atom = atomStore.get(atomOf(literal)); if (atom instanceof BasicAtom) { continue; } if (!(atom instanceof RuleAtom)) { // Ignore atoms other than RuleAtom. toJustifyIterator.remove(); continue; } // For RuleAtoms in toJustify the corresponding ground body contains BasicAtoms that have been assigned FALSE in the closing. // First, translate RuleAtom back to NonGroundRule + Substitution. String ruleId = (String) ((ConstantTerm<?>)atom.getTerms().get(0)).getObject(); NonGroundRule nonGroundRule = analyzingGrounder.getNonGroundRule(Integer.parseInt(ruleId)); String substitution = (String) ((ConstantTerm<?>)atom.getTerms().get(1)).getObject(); Substitution groundingSubstitution = Substitution.fromString(substitution); Rule rule = nonGroundRule.getRule(); // Find ground literals in the body that have been assigned false and justify those. for (Literal bodyLiteral : rule.getBody()) { Atom groundAtom = bodyLiteral.getAtom().substitute(groundingSubstitution); if (groundAtom instanceof ComparisonAtom || analyzingGrounder.isFact(groundAtom)) { // Facts and ComparisonAtoms are always true, no justification needed. continue; } int groundAtomId = atomStore.get(groundAtom); Assignment.Entry entry = assignment.get(groundAtomId); // Check if atom was assigned to FALSE during the closing. if (entry.getImpliedBy() == TrailAssignment.CLOSING_INDICATOR_NOGOOD) { ruleAtomReplacements.add(atomToNegatedLiteral(groundAtomId)); } } toJustifyIterator.remove(); } toJustify.addAll(ruleAtomReplacements); for (Integer literalToJustify : toJustify) { LOGGER.debug("Searching for justification(s) of {} / {}", toJustify, atomStore.atomToString(atomOf(literalToJustify))); Set<Literal> reasonsForUnjustified = analyzingGrounder.justifyAtom(atomOf(literalToJustify), assignment); NoGood noGood = noGoodFromJustificationReasons(atomOf(literalToJustify), reasonsForUnjustified); int noGoodID = grounder.register(noGood); obtained.put(noGoodID, noGood); LOGGER.debug("Learned NoGood is: {}", atomStore.noGoodToString(noGood)); } // Backtrack to remove the violation. if (!backtrack()) { logStats(); return false; } // Add newly obtained noGoods. if (!ingest(obtained)) { logStats(); return false; } return true; } private boolean close() { return assignment.closeUnassignedAtoms(); } /** * Iterative implementation of recursive backtracking. * * @return {@code true} iff it is possible to backtrack even further, {@code false} otherwise */ private boolean backtrack() { while (assignment.getDecisionLevel() != 0) { final Assignment.Entry choice = choiceManager.backtrackSlow(); store.propagate(); if (choice == null) { LOGGER.debug("Backtracking further, because last choice was already backtracked."); continue; } final int lastChoice = choice.getAtom(); final boolean choiceValue = choice.getTruth().toBoolean(); // Chronological backtracking: choose inverse now. // Choose FALSE if the previous choice was for TRUE and the atom was not already MBT at that time. ThriceTruth lastChoiceTruth = assignment.getTruth(lastChoice); if (choiceValue && MBT.equals(lastChoiceTruth)) { LOGGER.debug("Backtracking further, because last choice was MBT before choosing TRUE."); continue; } // If choice was assigned at lower decision level (due to added NoGoods), no inverted choice should be done. if (choice.getImpliedBy() != null) { LOGGER.debug("Last choice is now implied by {}", choice.getImpliedBy()); //if (choice.getDecisionLevel() == assignment.getDecisionLevel() + 1) { // throw oops("Choice was assigned but not at a lower decision level"); //} LOGGER.debug("Backtracking further, because last choice was assigned at a lower decision level."); continue; } // Choose inverse if it is not yet already assigned TRUE or FALSE. if (lastChoiceTruth == null || (lastChoiceTruth.isMBT() && !choiceValue)) { LOGGER.debug("Choosing inverse."); choiceManager.choose(new Choice(lastChoice, !choiceValue, true)); break; } // Continue backtracking. } return assignment.getDecisionLevel() != 0; } private boolean ingest(Map<Integer, NoGood> obtained) { branchingHeuristic.newNoGoods(obtained.values()); assignment.growForMaxAtomId(); store.growForMaxAtomId(atomStore.getMaxAtomId()); LinkedList<Map.Entry<Integer, NoGood>> noGoodsToAdd = new LinkedList<>(obtained.entrySet()); Map.Entry<Integer, NoGood> entry; while ((entry = noGoodsToAdd.poll()) != null) { if (NoGood.UNSAT.equals(entry.getValue())) { // Empty NoGood cannot be satisfied, program is unsatisfiable. return false; } final ConflictCause conflictCause = store.add(entry.getKey(), entry.getValue()); if (conflictCause == null) { // There is no conflict, all is fine. Just skip conflict treatment and carry on. continue; } final NoGood learnedNoGood = fixContradiction(entry, conflictCause); if (learnedNoGood != null) { noGoodsToAdd.addFirst(new AbstractMap.SimpleEntry<>(grounder.register(learnedNoGood), learnedNoGood)); } } return true; } private NoGood fixContradiction(Map.Entry<Integer, NoGood> noGoodEntry, ConflictCause conflictCause) { LOGGER.debug("Attempting to fix violation of {} caused by {}", noGoodEntry.getValue(), conflictCause); if (conflictCause.getViolatedChoice() != null) { choiceManager.backjump(conflictCause.getViolatedChoice().getDecisionLevel()); choiceManager.backtrackFast(); return null; } GroundConflictNoGoodLearner.ConflictAnalysisResult conflictAnalysisResult = learner.analyzeConflictingNoGood(conflictCause.getViolatedNoGood()); if (conflictAnalysisResult == UNSAT) { return NoGood.UNSAT; } branchingHeuristic.analyzedConflict(conflictAnalysisResult); choiceManager.backjump(conflictAnalysisResult.backjumpLevel); if (conflictAnalysisResult.clearLastChoiceAfterBackjump) { choiceManager.backtrackFast(); } // If NoGood was learned, add it to the store. // Note that the learned NoGood may cause further conflicts, since propagation on lower decision levels is lazy, // hence backtracking once might not be enough to remove the real conflict cause. if (!addAndBackjumpIfNecessary(noGoodEntry.getKey(), noGoodEntry.getValue())) { return NoGood.UNSAT; } return conflictAnalysisResult.learnedNoGood; } private boolean choose() { choiceManager.addChoiceInformation(grounder.getChoiceAtoms()); choiceManager.updateAssignments(); // Hint: for custom heuristics, evaluate them here and pick a value if the heuristics suggests one. int literal; if ((literal = branchingHeuristic.chooseLiteral()) == DEFAULT_CHOICE_LITERAL) { LOGGER.debug("No choices!"); return false; } else if (LOGGER.isDebugEnabled()) { LOGGER.debug("Branching heuristic chose literal {}", atomStore.literalToString(literal)); } choiceManager.choose(new Choice(literal, false)); return true; } @Override public int getNumberOfChoices() { return choiceManager.getChoices(); } @Override public int getNumberOfBacktracks() { return choiceManager.getBacktracks(); } @Override public int getNumberOfBacktracksWithinBackjumps() { return choiceManager.getBacktracksWithinBackjumps(); } @Override public int getNumberOfBackjumps() { return choiceManager.getBackjumps(); } @Override public int getNumberOfBacktracksDueToRemnantMBTs() { return mbtAtFixpoint; } @Override public int getNumberOfConflictsAfterClosing() { return conflictsAfterClosing; } private void logStats() { LOGGER.debug(getStatisticsString()); } }
Replace 1000l by 1000
src/main/java/at/ac/tuwien/kr/alpha/solver/DefaultSolver.java
Replace 1000l by 1000
<ide><path>rc/main/java/at/ac/tuwien/kr/alpha/solver/DefaultSolver.java <ide> BranchingHeuristicFactory.getInstance(branchingHeuristic, grounder, assignment, choiceManager, random), <ide> new NaiveHeuristic(choiceManager)); <ide> this.disableJustifications = disableJustifications; <del> this.performanceLog = new PerformanceLog(choiceManager, (TrailAssignment) assignment, 1000l); <add> this.performanceLog = new PerformanceLog(choiceManager, (TrailAssignment) assignment, 1000); <ide> } <ide> <ide> @Override
JavaScript
mit
d52c22875f9e3b7b832494c1868cedbf2aca3a4d
0
pdfernhout/Twirlip7,pdfernhout/Twirlip7,pdfernhout/Twirlip7
// Collage application // Compendium/IBIS-like app // Thanks for the inspiration, Al, and good luck with whatever you are up to now... /************************************** # Conceptual references Dialogue Mapping: Building Shared Understanding of Wicked Problems by Jeff Conklin https://www.amazon.com/Dialogue-Mapping-Building-Understanding-Problems/dp/0470017686 The book explains how we can visualize discussions on complex topics using the IBIS notation (Questions/Issues, Ideas, Reasons/Pros&Cons) which provides just enough structure to aid a group's short-term memory without getting in the way. What might be arguments over the best way to proceed become collaborations in constructing a dialogue map exploring all the possibilities and their evaluations. More on Dialog Mapping can be found at Jeff Conklin's website here: http://cognexus.org/id41.htm Compendium desktop software for IBIS: http://compendium.open.ac.uk/ Constructing Knowledge Art: An Experiential Perspective on Crafting Participatory Representations by Al Selvin and Simon Buckingham Shum (who created the Compendium software) https://www.amazon.com/gp/product/1627052593 This is a broader exploration of dialog mapping and similar participatory technologies from an advanced facilitator's perspective. Most people would probably want to read Jeff Conklin's "how to" book on Dialogue Mapping first, and then move onto this one once they are ready to grow further as a facilitator of group work. # Programming references arrow marker: https://stackoverflow.com/questions/12680166/how-to-use-an-arrow-marker-on-an-svg-line-element arrowhead derived from: https://stackoverflow.com/questions/11808860/how-to-place-arrow-head-triangles-on-svg-lines marker-end: http://tutorials.jenkov.com/svg/marker-element.html line to edge of circle: https://stackoverflow.com/questions/13165913/draw-an-arrow-between-two-circles#13234898 **************************************/ /* eslint-disable no-console */ /* global CompendiumIcons */ "use strict" // defines CompendiumIcons import "./examples/ibis_icons.js" // defines m import "./vendor/mithril.js" import { StoreUsingServer } from "./StoreUsingServer.js" import { HashUUIDTracker } from "./HashUUIDTracker.js" import { Pointrel20190914 } from "./Pointrel20190914.js" import { CanonicalJSON } from "./CanonicalJSON.js" import { UUID } from "./UUID.js" const p = new Pointrel20190914() // import { FileUtils } from "./FileUtils.js" // import { UUID } from "./UUID.js" let userID = localStorage.getItem("userID") || "anonymous" let collageUUID function userIDChange(event) { userID = event.target.value backend.configure(undefined, userID) localStorage.setItem("userID", userID) } /* let diagram = { width: 800, height: 500, diagramName: "Untitled IBIS Diagram", elements: [], textLocation: "right" } let isItemPanelDisplayed = false let diagramJSON = JSON.stringify(diagram, null, 4) let isJSONPanelDisplayed = false let outlineText = "" let isImportOutlinePanelDisplayed = false // tiny stack for connecting items let earlierDraggedItem = null let laterDraggedItem = null let draggedItem = null let dragStart = {x: 0, y: 0} let objectStart = {x: 0, y: 0} const messages = [] let unsaved = false const delta = 60 let lastClickPosition = {x: delta, y: delta} function onmousedownBackground(event) { event.preventDefault() if (draggedItem) return // TODO: Rubber band selection } function onmousedown(element, event) { event.preventDefault() earlierDraggedItem = laterDraggedItem laterDraggedItem = element draggedItem = element dragStart = { x: event.clientX, y: event.clientY } objectStart = { x: element.x, y: element.y } } function onmousemoveBackground(event) { event.preventDefault() if (draggedItem) { const dx = event.clientX - dragStart.x const dy = event.clientY - dragStart.y const newX = objectStart.x + dx const newY = objectStart.y + dy draggedItem.x = newX draggedItem.y = newY } } function onmouseupBackground(event) { event.preventDefault() const rect = event.target.getBoundingClientRect() if (draggedItem) { lastClickPosition = { x: draggedItem.x, y: draggedItem.y } updateJSONFromDiagram() } else { lastClickPosition = { x: event.clientX - rect.left, y: event.clientY - rect.top } } draggedItem = null } function onkeydown(event) { console.log("onkeydown", event) } function addElement(type, name, parentId) { if (!name) name = prompt(type + " name") if (!name) return const x = lastClickPosition.x + delta const y = lastClickPosition.y + delta const element = { type: type, name: name, x: x, y: y, notes: "", id: UUID.uuidv4() } if (parentId) element.parentId = parentId diagram.elements.unshift(element) if (lastClickPosition) { lastClickPosition.x += delta lastClickPosition.y += delta } earlierDraggedItem = laterDraggedItem laterDraggedItem = element updateJSONFromDiagram() return element } function addLink() { if (!earlierDraggedItem) return if (!laterDraggedItem) return if (earlierDraggedItem === laterDraggedItem) return laterDraggedItem.parentId = earlierDraggedItem.id updateJSONFromDiagram() } // Need to add undo function deleteLink() { if (!laterDraggedItem) return laterDraggedItem.parentId = undefined updateJSONFromDiagram() } function deleteElement() { if (!laterDraggedItem) return const index = diagram.elements.indexOf(laterDraggedItem) if (index > -1) { diagram.elements.splice(index, 1) } updateJSONFromDiagram() } function viewLink(element) { const parentId = element.parentId if (!parentId) return [] const parent = diagram.elements.find(element => element.id === parentId) if (!parent) return [] const xA = parent.x const yA = parent.y const xB = element.x const yB = element.y const radius = 24 const d = Math.sqrt((xB - xA) * (xB - xA) + (yB - yA) * (yB - yA)) const d2 = d - radius const ratio = d2 / d const dx = (xB - xA) * ratio const dy = (yB - yA) * ratio const x = xA + dx const y = yA + dy return m("line", { x1: x, y1: y, x2: element.x - dx, y2: element.y - dy, "marker-end": "url(#arrowhead)", stroke: "black", "stroke-width": 1 }) } const findURLRegex = /(http[s]?:\/\/)([\da-z.-]+)\.([a-z.]{2,6})([/\w.-]*)*\/?/ function viewElement(element) { const textLocation = diagram.textLocation || "bottom" const hasURL = findURLRegex.exec(element.name || "") const followLink = hasURL ? () => window.open(hasURL[0]) : undefined const extraStyling = hasURL ? ".underline-hover" : "" return [ element === laterDraggedItem ? m("text", {x: element.x, y: element.y - 20, "text-anchor": "middle"}, "*") : element === earlierDraggedItem ? m("text", {x: element.x, y: element.y - 20, "text-anchor": "middle"}, "<") : [], m("image", { "xlink:href": CompendiumIcons[element.type + "_png"], x: element.x - 16, y: element.y - 16, width: 32, height: 32, alt: "question", onmousedown: (event) => onmousedown(element, event), }), textLocation === "right" ? m("text" + extraStyling, {x: element.x + 24, y: element.y + 8, "text-anchor": "left", onclick: followLink}, element.name) : m("text" + extraStyling, {x: element.x, y: element.y + 34, "text-anchor": "middle", onclick: followLink}, element.name) ] } function viewArrowhead() { return m("marker", { id: "arrowhead", orient: "auto", markerWidth: 8, markerHeight: 16, refX: 2, refY: 4, }, m("path", { d: "M0,0 V8 L8,4 Z", fill: "black" })) } function updateJSONFromDiagram() { diagramJSON = JSON.stringify(diagram, null, 4) unsaved = true } function updateDiagramFromJSON() { const newDiagram = JSON.parse(diagramJSON) diagram = newDiagram } */ /* Example to test outline parsing: Q: Top Question A: First Answer Q: Another Question A: Answer A1 A: Answer A2 A: Answer A3 Pro: A point for A3 Con: A point against A3 A: Answer A4 Q: Yet Another Question Q: And also another question A: Answer B1 A: Answer B2 */ /* function updateDiagramFromLabeledOutline() { const nodeTypeMap = { "Q: " : "issue", "A: " : "position", "Pro: " : "plus", "Con: " : "minus" } let nodes = [] let indents = [] const lines = outlineText.split("\n") for (let line of lines) { if (line.trim() === "") { continue } const parseLineRegex = /(^[ ]*)(Q: |A: |Pro: |Con: )(.*)$/ const match = parseLineRegex.exec(line) if (!match) { console.log("Problem parsing line", "'" + line + "'") continue } const lastIndent = indents[indents.length - 1] const indent = match[1] if (indent === "") { nodes = [] indents = [] lastClickPosition.x = delta } else if (indent.length === lastIndent.length) { // same level nodes.pop() indents.pop() lastClickPosition.x -= delta } else if (indent.length < lastIndent.length) { // dedenting let oldIndent = lastIndent while (oldIndent && oldIndent.length > indent.length) { indents.pop() nodes.pop() lastClickPosition.x -= delta oldIndent = indents[indents.length - 1] } if (oldIndent && oldIndent !== indent) { console.log("indentation issue for: ", line, oldIndent.length, indent.length) break } indents.pop() nodes.pop() lastClickPosition.x -= delta } else { // (indent.length > lastIndent.length) // indenting -- do nothing as added later } let parentId = null if (nodes.length) parentId = nodes[nodes.length - 1].id const nodeType = nodeTypeMap[match[2]] const text = match[3] if (nodeType && text) { const element = addElement(nodeType, text, parentId) nodes.push(element) indents.push(indent) } else { console.log("Problem parsing line", line) } } } function updateDiagramFromIndentedTextOutline() { let nodes = [] let indents = [] const lines = outlineText.split("\n") for (let line of lines) { if (line.trim() === "") { continue } const parseLineRegex = /(^[ ]*)(.*)$/ const match = parseLineRegex.exec(line) if (!match) { console.log("Problem parsing line", "'" + line + "'") continue } const lastIndent = indents[indents.length - 1] const indent = match[1] if (indent === "") { nodes = [] indents = [] lastClickPosition.x = delta } else if (indent.length === lastIndent.length) { // same level nodes.pop() indents.pop() lastClickPosition.x -= delta } else if (indent.length < lastIndent.length) { // dedenting let oldIndent = lastIndent while (oldIndent && oldIndent.length > indent.length) { indents.pop() nodes.pop() lastClickPosition.x -= delta oldIndent = indents[indents.length - 1] } if (oldIndent && oldIndent !== indent) { console.log("indentation issue for: ", line, oldIndent.length, indent.length) break } indents.pop() nodes.pop() lastClickPosition.x -= delta } else { // (indent.length > lastIndent.length) // indenting -- do nothing as added later } let parentId = null if (nodes.length) parentId = nodes[nodes.length - 1].id let text = match[2].trim() let nodeType = "" if (text.startsWith("+")) { text = text.substring(1).trim() nodeType = "plus" } else if (text.startsWith("-")) { text = text.substring(1).trim() nodeType = "minus" } else if (text.endsWith("?")) { nodeType = "issue" } else { // if (text[0] && text[0].match(/[a-z]/)) { // console.log("Problem parsing line with intial lowercase", line) // throw new Error("Parse error") // } nodeType = "position" } if (nodeType && text) { const element = addElement(nodeType, text, parentId) nodes.push(element) indents.push(indent) } else { console.log("Problem parsing line", line) } } } function viewItemPanel() { const element = laterDraggedItem const disabled = !element return m("div.ma1", [ isItemPanelDisplayed ? [ m("div", "Type"), m("select.ma1", {onchange: event => element.type = event.target.value, disabled}, Object.keys(CompendiumIcons).sort().map(key => { // remove"_png" at end const type = key.substring(0, key.length - 4) return m("option", {value: type, selected: element && element.type === type}, type) }) ), m("br"), "Name", m("br"), m("input.w-100", { value: element ? element.name : "", oninput: (event) => { element.name = event.target.value; updateJSONFromDiagram() }, disabled }), m("br.ma2"), "Notes", m("br"), m("textarea.w-100", { value: element ? element.notes : "", oninput: (event) => { element.notes = event.target.value; updateJSONFromDiagram() }, disabled }), ] : [] ]) } function importDiagram() { FileUtils.loadFromFile((fileName, fileContents) => { if (fileContents) { diagramJSON = fileContents updateDiagramFromJSON() if (diagram.diagramName.toLowerCase().startsWith("untitled")) { if (fileName.endsWith(".json")) fileName = fileName.substring(0, fileName.length - ".json".length) diagram.diagramName = fileName } m.redraw() } }) } function exportDiagram() { const provisionalFileName = diagram.diagramName FileUtils.saveToFile(provisionalFileName, diagramJSON, ".json", (fileName) => { diagram.diagramName = fileName updateJSONFromDiagram() unsaved = false }) } function saveDiagram() { if (diagram.diagramName.toLowerCase().startsWith("untitled")) { alert("Please name the diagram first by clicking on the diagram name") return } const timestamp = new Date().toISOString() backend.addItem({ collageUUID, diagram, userID, timestamp }) unsaved = false console.log("sent to server", diagram) } function clearDiagram() { if (!confirm("Clear diagram?")) return diagram.elements = [] updateJSONFromDiagram() lastClickPosition = {x: delta, y: delta} } /* function loadDiagram() { const diagramName = prompt("Load which diagram name?", diagram.diagramName) if (!diagramName) return const items = Twirlip7.findItem({entity: diagramName, attribute: "contents"}) if (items.length === 0) { console.log("item not found", diagramName) return } const item = items[0] diagramJSON = item.value updateDiagramFromJSON() m.redraw() } */ /* function viewImportExportPanel() { return m("div.ma1", m("button.ma1", { onclick: importDiagram }, "Import Diagram"), m("button.ma1", { onclick: exportDiagram }, "Export Diagram"), m("button.ma1", { onclick: saveDiagram }, "Save to server"), // m("button.ma1", { onclick: loadDiagram }, "Load"), ) } function viewCheckboxesPanel() { return m("div.ma1", m("input[type=checkbox].ma1", { checked: isItemPanelDisplayed, onchange: event => isItemPanelDisplayed = event.target.checked }), "Edit Item", m("input[type=checkbox].ma1.ml3", { checked: isJSONPanelDisplayed, onchange: event => isJSONPanelDisplayed = event.target.checked }), m("span", "Edit Diagram as JSON"), m("input[type=checkbox].ma1.ml3", { checked: isImportOutlinePanelDisplayed, onchange: event => isImportOutlinePanelDisplayed = event.target.checked }), m("span", "Import outline"), ) } function viewJSONPanel() { return m("div", isJSONPanelDisplayed ? [ m("div", "JSON:"), m("textarea.w-100", { height: "20rem", value: diagramJSON, oninput: (event) => diagramJSON = event.target.value }), m("button.ma1", { onclick: updateDiagramFromJSON }, "Update Diagram from JSON"), ] : [], ) } function viewOutlinePanel() { return m("div", isImportOutlinePanelDisplayed ? [ m("div", "Outline:"), m("textarea.w-100", { height: "20rem", value: outlineText, oninput: (event) => outlineText = event.target.value }), m("br"), m("button.ma1", { onclick: updateDiagramFromIndentedTextOutline }, "Parse itIBIS outline"), m("button.ma1", { onclick: updateDiagramFromLabeledOutline }, "Parse labeled outline"), m("button.ma1", { onclick: clearDiagram }, "Clear diagram"), ] : [] ) } function changeDiagramName() { const newDiagramName = prompt("Diagram name?", diagram.diagramName) if (newDiagramName) diagram.diagramName = newDiagramName } // { extraStyling: ".bg-blue.br4", title: () => "IBIS Diagram for: " + diagram.diagramName } function view() { return m("div.bg-blue.br4.pa3.h-100.w-100.flex.flex-column.overflow-hidden", m("div.flex-none", m("span", "Issue Based Information System (IBIS) for Dialogue Mapping"), " -- ", m("span", { onclick: changeDiagramName, title: "Click to change diagram name" }, diagram.diagramName), " ", m("span", { onclick: () => { diagram.width = prompt("New diagram width?", diagram.width) || diagram.width updateJSONFromDiagram() }, title: "Diagram width -- click to change" }, diagram.width), " X ", m("span", { onclick: () => { diagram.height = prompt("New diagram height?", diagram.height) || diagram.height updateJSONFromDiagram() }, title: "Diagram height -- click to change" }, diagram.height), unsaved ? " [UNSAVED]" : "" ), m("div.mt1.mb1.flex-none", m("button.ma1.pa1", { onclick: addElement.bind(null, "issue", null, null) }, m("img.v-mid.mr1", { src: CompendiumIcons.issue_png, style: "width: 16px; height: 16px;" }), "Question" ), m("button.ma1.pa1", { onclick: addElement.bind(null, "position", null, null) }, m("img.v-mid.mr1", { src: CompendiumIcons.position_png, style: "width: 16px; height: 16px;" }), "Idea" ), m("button.ma1.pa1", { onclick: addElement.bind(null, "plus", null, null) }, m("img.v-mid.mr1", { src: CompendiumIcons.plus_png, style: "width: 16px; height: 16px;" }), "Pro" ), m("button.ma1.pa1", { onclick: addElement.bind(null, "minus", null, null) }, m("img.v-mid.mr1", { src: CompendiumIcons.minus_png, style: "width: 16px; height: 16px;" }), "Con" ), m("button.ma1.pa1", { onclick: deleteElement }, "Delete"), m("button.ma1.pa1", { onclick: addLink }, "Link <--*"), m("button.ma1.pa1", { onclick: deleteLink }, "Unlink *"), m("select.ma1.pa1", { onchange: (event) => diagram.textLocation = event.target.value }, m("option", { value: "right", selected: diagram.textLocation === "right" }, "text at right"), m("option", { value: "bottom", selected: diagram.textLocation === "bottom" || !diagram.textLocation }, "text at bottom"), ) ), m("div.flex-auto.overflow-auto", [ // on keydown does not seem to work here m("svg.diagram.ba", { width: diagram.width, height: diagram.height, onmousedown: onmousedownBackground, onmousemove: onmousemoveBackground, onmouseup: onmouseupBackground, onkeydown: onkeydown }, [ viewArrowhead(), diagram.elements.map(element => viewLink(element)), diagram.elements.map(element => viewElement(element)), ]), ]), viewImportExportPanel(), viewCheckboxesPanel(), viewItemPanel(), viewJSONPanel(), viewOutlinePanel() ) } */ class Item { constructor(uuid) { this.uuid = uuid || UUID.uuidv4() } } class Collage { constructor(uuid) { this.uuid = {collageUUID: uuid} } getItems() { const result = [] const bcMap = p.findBC(this.uuid, "item") for (let key in bcMap) { const uuid = bcMap[key] if (uuid) result.push(new Item(uuid)) } return result } async addItem(item, summary) { // Keep a copy of essential information // TODO: if (summary) p.addTriple(getOrganizerName(), {itemSummary: item.uuid}, summary) await p.addTripleAsync(this.uuid, {item: item.uuid}, item.uuid) } deleteItem(item) { p.addTriple(this.uuid, {item: item.uuid}, null) } /* getSummaryForItem(uuid) { return p.findC(this.uuid, {itemSummary: uuid}) } */ view() { const items = this.getItems() return m("div", loading ? m("div", "Loading...") : [ items.length === 0 && m("div", "No items"), items.map(item => m("div", item.uuid)), m("button", {onclick: () => this.addItem(new Item())}, "Add item") ] ) } } const TwirlipCollageApp = { view: () => m("div", "Hello Collage ", collageUUID, new Collage(collageUUID).view() ) } const { uuidChangedByApp, getUUID } = HashUUIDTracker("collageUUID", (uuid) => { // Called every time UUID changed from hash in the URL collageUUID = uuid }) collageUUID = getUUID() // TODO: optimize exessive stringify use function isUUIDMatch(a, b) { return CanonicalJSON.stringify(a) === CanonicalJSON.stringify(b) } let loading = true p.connect({ onLoaded: (streamId) => { console.log("p onloaded", streamId) if (isUUIDMatch(streamId, {collageUUID: collageUUID})) { loading = false } } }) m.mount(document.body, TwirlipCollageApp)
src/ui/collage.js
// Collage application // Compendium/IBIS-like app // Thanks for the inspiration, Al, and good luck with whatever you are up to now... /************************************** # Conceptual references Dialogue Mapping: Building Shared Understanding of Wicked Problems by Jeff Conklin https://www.amazon.com/Dialogue-Mapping-Building-Understanding-Problems/dp/0470017686 The book explains how we can visualize discussions on complex topics using the IBIS notation (Questions/Issues, Ideas, Reasons/Pros&Cons) which provides just enough structure to aid a group's short-term memory without getting in the way. What might be arguments over the best way to proceed become collaborations in constructing a dialogue map exploring all the possibilities and their evaluations. More on Dialog Mapping can be found at Jeff Conklin's website here: http://cognexus.org/id41.htm Compendium desktop software for IBIS: http://compendium.open.ac.uk/ Constructing Knowledge Art: An Experiential Perspective on Crafting Participatory Representations by Al Selvin and Simon Buckingham Shum (who created the Compendium software) https://www.amazon.com/gp/product/1627052593 This is a broader exploration of dialog mapping and similar participatory technologies from an advanced facilitator's perspective. Most people would probably want to read Jeff Conklin's "how to" book on Dialogue Mapping first, and then move onto this one once they are ready to grow further as a facilitator of group work. # Programming references arrow marker: https://stackoverflow.com/questions/12680166/how-to-use-an-arrow-marker-on-an-svg-line-element arrowhead derived from: https://stackoverflow.com/questions/11808860/how-to-place-arrow-head-triangles-on-svg-lines marker-end: http://tutorials.jenkov.com/svg/marker-element.html line to edge of circle: https://stackoverflow.com/questions/13165913/draw-an-arrow-between-two-circles#13234898 **************************************/ /* eslint-disable no-console */ /* global CompendiumIcons */ "use strict" // defines CompendiumIcons import "./examples/ibis_icons.js" // defines m import "./vendor/mithril.js" import { StoreUsingServer } from "./StoreUsingServer.js" import { HashUUIDTracker } from "./HashUUIDTracker.js" // import { FileUtils } from "./FileUtils.js" // import { UUID } from "./UUID.js" let userID = localStorage.getItem("userID") || "anonymous" let collageUUID function userIDChange(event) { userID = event.target.value backend.configure(undefined, userID) localStorage.setItem("userID", userID) } /* let diagram = { width: 800, height: 500, diagramName: "Untitled IBIS Diagram", elements: [], textLocation: "right" } let isItemPanelDisplayed = false let diagramJSON = JSON.stringify(diagram, null, 4) let isJSONPanelDisplayed = false let outlineText = "" let isImportOutlinePanelDisplayed = false // tiny stack for connecting items let earlierDraggedItem = null let laterDraggedItem = null let draggedItem = null let dragStart = {x: 0, y: 0} let objectStart = {x: 0, y: 0} const messages = [] let unsaved = false const delta = 60 let lastClickPosition = {x: delta, y: delta} function onmousedownBackground(event) { event.preventDefault() if (draggedItem) return // TODO: Rubber band selection } function onmousedown(element, event) { event.preventDefault() earlierDraggedItem = laterDraggedItem laterDraggedItem = element draggedItem = element dragStart = { x: event.clientX, y: event.clientY } objectStart = { x: element.x, y: element.y } } function onmousemoveBackground(event) { event.preventDefault() if (draggedItem) { const dx = event.clientX - dragStart.x const dy = event.clientY - dragStart.y const newX = objectStart.x + dx const newY = objectStart.y + dy draggedItem.x = newX draggedItem.y = newY } } function onmouseupBackground(event) { event.preventDefault() const rect = event.target.getBoundingClientRect() if (draggedItem) { lastClickPosition = { x: draggedItem.x, y: draggedItem.y } updateJSONFromDiagram() } else { lastClickPosition = { x: event.clientX - rect.left, y: event.clientY - rect.top } } draggedItem = null } function onkeydown(event) { console.log("onkeydown", event) } function addElement(type, name, parentId) { if (!name) name = prompt(type + " name") if (!name) return const x = lastClickPosition.x + delta const y = lastClickPosition.y + delta const element = { type: type, name: name, x: x, y: y, notes: "", id: UUID.uuidv4() } if (parentId) element.parentId = parentId diagram.elements.unshift(element) if (lastClickPosition) { lastClickPosition.x += delta lastClickPosition.y += delta } earlierDraggedItem = laterDraggedItem laterDraggedItem = element updateJSONFromDiagram() return element } function addLink() { if (!earlierDraggedItem) return if (!laterDraggedItem) return if (earlierDraggedItem === laterDraggedItem) return laterDraggedItem.parentId = earlierDraggedItem.id updateJSONFromDiagram() } // Need to add undo function deleteLink() { if (!laterDraggedItem) return laterDraggedItem.parentId = undefined updateJSONFromDiagram() } function deleteElement() { if (!laterDraggedItem) return const index = diagram.elements.indexOf(laterDraggedItem) if (index > -1) { diagram.elements.splice(index, 1) } updateJSONFromDiagram() } function viewLink(element) { const parentId = element.parentId if (!parentId) return [] const parent = diagram.elements.find(element => element.id === parentId) if (!parent) return [] const xA = parent.x const yA = parent.y const xB = element.x const yB = element.y const radius = 24 const d = Math.sqrt((xB - xA) * (xB - xA) + (yB - yA) * (yB - yA)) const d2 = d - radius const ratio = d2 / d const dx = (xB - xA) * ratio const dy = (yB - yA) * ratio const x = xA + dx const y = yA + dy return m("line", { x1: x, y1: y, x2: element.x - dx, y2: element.y - dy, "marker-end": "url(#arrowhead)", stroke: "black", "stroke-width": 1 }) } const findURLRegex = /(http[s]?:\/\/)([\da-z.-]+)\.([a-z.]{2,6})([/\w.-]*)*\/?/ function viewElement(element) { const textLocation = diagram.textLocation || "bottom" const hasURL = findURLRegex.exec(element.name || "") const followLink = hasURL ? () => window.open(hasURL[0]) : undefined const extraStyling = hasURL ? ".underline-hover" : "" return [ element === laterDraggedItem ? m("text", {x: element.x, y: element.y - 20, "text-anchor": "middle"}, "*") : element === earlierDraggedItem ? m("text", {x: element.x, y: element.y - 20, "text-anchor": "middle"}, "<") : [], m("image", { "xlink:href": CompendiumIcons[element.type + "_png"], x: element.x - 16, y: element.y - 16, width: 32, height: 32, alt: "question", onmousedown: (event) => onmousedown(element, event), }), textLocation === "right" ? m("text" + extraStyling, {x: element.x + 24, y: element.y + 8, "text-anchor": "left", onclick: followLink}, element.name) : m("text" + extraStyling, {x: element.x, y: element.y + 34, "text-anchor": "middle", onclick: followLink}, element.name) ] } function viewArrowhead() { return m("marker", { id: "arrowhead", orient: "auto", markerWidth: 8, markerHeight: 16, refX: 2, refY: 4, }, m("path", { d: "M0,0 V8 L8,4 Z", fill: "black" })) } function updateJSONFromDiagram() { diagramJSON = JSON.stringify(diagram, null, 4) unsaved = true } function updateDiagramFromJSON() { const newDiagram = JSON.parse(diagramJSON) diagram = newDiagram } */ /* Example to test outline parsing: Q: Top Question A: First Answer Q: Another Question A: Answer A1 A: Answer A2 A: Answer A3 Pro: A point for A3 Con: A point against A3 A: Answer A4 Q: Yet Another Question Q: And also another question A: Answer B1 A: Answer B2 */ /* function updateDiagramFromLabeledOutline() { const nodeTypeMap = { "Q: " : "issue", "A: " : "position", "Pro: " : "plus", "Con: " : "minus" } let nodes = [] let indents = [] const lines = outlineText.split("\n") for (let line of lines) { if (line.trim() === "") { continue } const parseLineRegex = /(^[ ]*)(Q: |A: |Pro: |Con: )(.*)$/ const match = parseLineRegex.exec(line) if (!match) { console.log("Problem parsing line", "'" + line + "'") continue } const lastIndent = indents[indents.length - 1] const indent = match[1] if (indent === "") { nodes = [] indents = [] lastClickPosition.x = delta } else if (indent.length === lastIndent.length) { // same level nodes.pop() indents.pop() lastClickPosition.x -= delta } else if (indent.length < lastIndent.length) { // dedenting let oldIndent = lastIndent while (oldIndent && oldIndent.length > indent.length) { indents.pop() nodes.pop() lastClickPosition.x -= delta oldIndent = indents[indents.length - 1] } if (oldIndent && oldIndent !== indent) { console.log("indentation issue for: ", line, oldIndent.length, indent.length) break } indents.pop() nodes.pop() lastClickPosition.x -= delta } else { // (indent.length > lastIndent.length) // indenting -- do nothing as added later } let parentId = null if (nodes.length) parentId = nodes[nodes.length - 1].id const nodeType = nodeTypeMap[match[2]] const text = match[3] if (nodeType && text) { const element = addElement(nodeType, text, parentId) nodes.push(element) indents.push(indent) } else { console.log("Problem parsing line", line) } } } function updateDiagramFromIndentedTextOutline() { let nodes = [] let indents = [] const lines = outlineText.split("\n") for (let line of lines) { if (line.trim() === "") { continue } const parseLineRegex = /(^[ ]*)(.*)$/ const match = parseLineRegex.exec(line) if (!match) { console.log("Problem parsing line", "'" + line + "'") continue } const lastIndent = indents[indents.length - 1] const indent = match[1] if (indent === "") { nodes = [] indents = [] lastClickPosition.x = delta } else if (indent.length === lastIndent.length) { // same level nodes.pop() indents.pop() lastClickPosition.x -= delta } else if (indent.length < lastIndent.length) { // dedenting let oldIndent = lastIndent while (oldIndent && oldIndent.length > indent.length) { indents.pop() nodes.pop() lastClickPosition.x -= delta oldIndent = indents[indents.length - 1] } if (oldIndent && oldIndent !== indent) { console.log("indentation issue for: ", line, oldIndent.length, indent.length) break } indents.pop() nodes.pop() lastClickPosition.x -= delta } else { // (indent.length > lastIndent.length) // indenting -- do nothing as added later } let parentId = null if (nodes.length) parentId = nodes[nodes.length - 1].id let text = match[2].trim() let nodeType = "" if (text.startsWith("+")) { text = text.substring(1).trim() nodeType = "plus" } else if (text.startsWith("-")) { text = text.substring(1).trim() nodeType = "minus" } else if (text.endsWith("?")) { nodeType = "issue" } else { // if (text[0] && text[0].match(/[a-z]/)) { // console.log("Problem parsing line with intial lowercase", line) // throw new Error("Parse error") // } nodeType = "position" } if (nodeType && text) { const element = addElement(nodeType, text, parentId) nodes.push(element) indents.push(indent) } else { console.log("Problem parsing line", line) } } } function viewItemPanel() { const element = laterDraggedItem const disabled = !element return m("div.ma1", [ isItemPanelDisplayed ? [ m("div", "Type"), m("select.ma1", {onchange: event => element.type = event.target.value, disabled}, Object.keys(CompendiumIcons).sort().map(key => { // remove"_png" at end const type = key.substring(0, key.length - 4) return m("option", {value: type, selected: element && element.type === type}, type) }) ), m("br"), "Name", m("br"), m("input.w-100", { value: element ? element.name : "", oninput: (event) => { element.name = event.target.value; updateJSONFromDiagram() }, disabled }), m("br.ma2"), "Notes", m("br"), m("textarea.w-100", { value: element ? element.notes : "", oninput: (event) => { element.notes = event.target.value; updateJSONFromDiagram() }, disabled }), ] : [] ]) } function importDiagram() { FileUtils.loadFromFile((fileName, fileContents) => { if (fileContents) { diagramJSON = fileContents updateDiagramFromJSON() if (diagram.diagramName.toLowerCase().startsWith("untitled")) { if (fileName.endsWith(".json")) fileName = fileName.substring(0, fileName.length - ".json".length) diagram.diagramName = fileName } m.redraw() } }) } function exportDiagram() { const provisionalFileName = diagram.diagramName FileUtils.saveToFile(provisionalFileName, diagramJSON, ".json", (fileName) => { diagram.diagramName = fileName updateJSONFromDiagram() unsaved = false }) } function saveDiagram() { if (diagram.diagramName.toLowerCase().startsWith("untitled")) { alert("Please name the diagram first by clicking on the diagram name") return } const timestamp = new Date().toISOString() backend.addItem({ collageUUID, diagram, userID, timestamp }) unsaved = false console.log("sent to server", diagram) } function clearDiagram() { if (!confirm("Clear diagram?")) return diagram.elements = [] updateJSONFromDiagram() lastClickPosition = {x: delta, y: delta} } /* function loadDiagram() { const diagramName = prompt("Load which diagram name?", diagram.diagramName) if (!diagramName) return const items = Twirlip7.findItem({entity: diagramName, attribute: "contents"}) if (items.length === 0) { console.log("item not found", diagramName) return } const item = items[0] diagramJSON = item.value updateDiagramFromJSON() m.redraw() } */ /* function viewImportExportPanel() { return m("div.ma1", m("button.ma1", { onclick: importDiagram }, "Import Diagram"), m("button.ma1", { onclick: exportDiagram }, "Export Diagram"), m("button.ma1", { onclick: saveDiagram }, "Save to server"), // m("button.ma1", { onclick: loadDiagram }, "Load"), ) } function viewCheckboxesPanel() { return m("div.ma1", m("input[type=checkbox].ma1", { checked: isItemPanelDisplayed, onchange: event => isItemPanelDisplayed = event.target.checked }), "Edit Item", m("input[type=checkbox].ma1.ml3", { checked: isJSONPanelDisplayed, onchange: event => isJSONPanelDisplayed = event.target.checked }), m("span", "Edit Diagram as JSON"), m("input[type=checkbox].ma1.ml3", { checked: isImportOutlinePanelDisplayed, onchange: event => isImportOutlinePanelDisplayed = event.target.checked }), m("span", "Import outline"), ) } function viewJSONPanel() { return m("div", isJSONPanelDisplayed ? [ m("div", "JSON:"), m("textarea.w-100", { height: "20rem", value: diagramJSON, oninput: (event) => diagramJSON = event.target.value }), m("button.ma1", { onclick: updateDiagramFromJSON }, "Update Diagram from JSON"), ] : [], ) } function viewOutlinePanel() { return m("div", isImportOutlinePanelDisplayed ? [ m("div", "Outline:"), m("textarea.w-100", { height: "20rem", value: outlineText, oninput: (event) => outlineText = event.target.value }), m("br"), m("button.ma1", { onclick: updateDiagramFromIndentedTextOutline }, "Parse itIBIS outline"), m("button.ma1", { onclick: updateDiagramFromLabeledOutline }, "Parse labeled outline"), m("button.ma1", { onclick: clearDiagram }, "Clear diagram"), ] : [] ) } function changeDiagramName() { const newDiagramName = prompt("Diagram name?", diagram.diagramName) if (newDiagramName) diagram.diagramName = newDiagramName } // { extraStyling: ".bg-blue.br4", title: () => "IBIS Diagram for: " + diagram.diagramName } function view() { return m("div.bg-blue.br4.pa3.h-100.w-100.flex.flex-column.overflow-hidden", m("div.flex-none", m("span", "Issue Based Information System (IBIS) for Dialogue Mapping"), " -- ", m("span", { onclick: changeDiagramName, title: "Click to change diagram name" }, diagram.diagramName), " ", m("span", { onclick: () => { diagram.width = prompt("New diagram width?", diagram.width) || diagram.width updateJSONFromDiagram() }, title: "Diagram width -- click to change" }, diagram.width), " X ", m("span", { onclick: () => { diagram.height = prompt("New diagram height?", diagram.height) || diagram.height updateJSONFromDiagram() }, title: "Diagram height -- click to change" }, diagram.height), unsaved ? " [UNSAVED]" : "" ), m("div.mt1.mb1.flex-none", m("button.ma1.pa1", { onclick: addElement.bind(null, "issue", null, null) }, m("img.v-mid.mr1", { src: CompendiumIcons.issue_png, style: "width: 16px; height: 16px;" }), "Question" ), m("button.ma1.pa1", { onclick: addElement.bind(null, "position", null, null) }, m("img.v-mid.mr1", { src: CompendiumIcons.position_png, style: "width: 16px; height: 16px;" }), "Idea" ), m("button.ma1.pa1", { onclick: addElement.bind(null, "plus", null, null) }, m("img.v-mid.mr1", { src: CompendiumIcons.plus_png, style: "width: 16px; height: 16px;" }), "Pro" ), m("button.ma1.pa1", { onclick: addElement.bind(null, "minus", null, null) }, m("img.v-mid.mr1", { src: CompendiumIcons.minus_png, style: "width: 16px; height: 16px;" }), "Con" ), m("button.ma1.pa1", { onclick: deleteElement }, "Delete"), m("button.ma1.pa1", { onclick: addLink }, "Link <--*"), m("button.ma1.pa1", { onclick: deleteLink }, "Unlink *"), m("select.ma1.pa1", { onchange: (event) => diagram.textLocation = event.target.value }, m("option", { value: "right", selected: diagram.textLocation === "right" }, "text at right"), m("option", { value: "bottom", selected: diagram.textLocation === "bottom" || !diagram.textLocation }, "text at bottom"), ) ), m("div.flex-auto.overflow-auto", [ // on keydown does not seem to work here m("svg.diagram.ba", { width: diagram.width, height: diagram.height, onmousedown: onmousedownBackground, onmousemove: onmousemoveBackground, onmouseup: onmouseupBackground, onkeydown: onkeydown }, [ viewArrowhead(), diagram.elements.map(element => viewLink(element)), diagram.elements.map(element => viewElement(element)), ]), ]), viewImportExportPanel(), viewCheckboxesPanel(), viewItemPanel(), viewJSONPanel(), viewOutlinePanel() ) } */ const TwirlipCollageApp = { view: () => m("div", "Hello Collage ", collageUUID) } const diagramResponder = { onLoaded: () => console.log("onLoaded"), onAddItem: (item) => { console.log("onAddItem", item) } } const { uuidChangedByApp, getUUID } = HashUUIDTracker("collageUUID", (uuid) => { // Called every time UUID changed from hash in the URL collageUUID = uuid backend.configure({collageUUID: collageUUID}) }) collageUUID = getUUID() const backend = StoreUsingServer(m.redraw, {collageUUID: collageUUID}, userID) backend.connect(diagramResponder) try { backend.setup() } catch(e) { alert("This Collage app requires a backend server supporting socket.io (i.e. won't work correctly on rawgit)") } m.mount(document.body, TwirlipCollageApp)
college: first cut at displaying items and adding them
src/ui/collage.js
college: first cut at displaying items and adding them
<ide><path>rc/ui/collage.js <ide> <ide> import { StoreUsingServer } from "./StoreUsingServer.js" <ide> import { HashUUIDTracker } from "./HashUUIDTracker.js" <add>import { Pointrel20190914 } from "./Pointrel20190914.js" <add>import { CanonicalJSON } from "./CanonicalJSON.js" <add>import { UUID } from "./UUID.js" <add> <add>const p = new Pointrel20190914() <ide> <ide> // import { FileUtils } from "./FileUtils.js" <ide> // import { UUID } from "./UUID.js" <ide> <ide> */ <ide> <add>class Item { <add> constructor(uuid) { <add> this.uuid = uuid || UUID.uuidv4() <add> } <add>} <add> <add>class Collage { <add> constructor(uuid) { <add> this.uuid = {collageUUID: uuid} <add> } <add> <add> getItems() { <add> const result = [] <add> const bcMap = p.findBC(this.uuid, "item") <add> for (let key in bcMap) { <add> const uuid = bcMap[key] <add> if (uuid) result.push(new Item(uuid)) <add> } <add> return result <add> } <add> <add> async addItem(item, summary) { <add> // Keep a copy of essential information <add> // TODO: if (summary) p.addTriple(getOrganizerName(), {itemSummary: item.uuid}, summary) <add> await p.addTripleAsync(this.uuid, {item: item.uuid}, item.uuid) <add> } <add> <add> deleteItem(item) { <add> p.addTriple(this.uuid, {item: item.uuid}, null) <add> } <add> <add> /* <add> getSummaryForItem(uuid) { <add> return p.findC(this.uuid, {itemSummary: uuid}) <add> } <add> */ <add> <add> view() { <add> const items = this.getItems() <add> return m("div", <add> loading <add> ? m("div", "Loading...") <add> : [ <add> items.length === 0 && m("div", "No items"), <add> items.map(item => m("div", item.uuid)), <add> m("button", {onclick: () => this.addItem(new Item())}, "Add item") <add> ] <add> ) <add> } <add>} <add> <ide> const TwirlipCollageApp = { <del> view: () => m("div", "Hello Collage ", collageUUID) <del>} <del> <del>const diagramResponder = { <del> onLoaded: () => console.log("onLoaded"), <del> onAddItem: (item) => { <del> console.log("onAddItem", item) <del> } <add> view: () => m("div", "Hello Collage ", collageUUID, <add> new Collage(collageUUID).view() <add> ) <ide> } <ide> <ide> const { uuidChangedByApp, getUUID } = HashUUIDTracker("collageUUID", (uuid) => { <ide> // Called every time UUID changed from hash in the URL <ide> collageUUID = uuid <del> backend.configure({collageUUID: collageUUID}) <ide> }) <ide> <ide> collageUUID = getUUID() <ide> <del>const backend = StoreUsingServer(m.redraw, {collageUUID: collageUUID}, userID) <del> <del>backend.connect(diagramResponder) <del>try { <del> backend.setup() <del>} catch(e) { <del> alert("This Collage app requires a backend server supporting socket.io (i.e. won't work correctly on rawgit)") <del>} <add>// TODO: optimize exessive stringify use <add>function isUUIDMatch(a, b) { <add> return CanonicalJSON.stringify(a) === CanonicalJSON.stringify(b) <add>} <add> <add>let loading = true <add> <add>p.connect({ <add> onLoaded: (streamId) => { <add> console.log("p onloaded", streamId) <add> if (isUUIDMatch(streamId, {collageUUID: collageUUID})) { <add> loading = false <add> } <add> } <add>}) <ide> <ide> m.mount(document.body, TwirlipCollageApp)
JavaScript
mit
569b5c70a2d61a03c1645951eac84de455a0d072
0
blakeembrey/pluralize
/* global define */ (function (root, pluralize) { /* istanbul ignore else */ if (typeof require === 'function' && typeof exports === 'object' && typeof module === 'object') { // Node. module.exports = pluralize(); } else if (typeof define === 'function' && define.amd) { // AMD, registers as an anonymous module. define(function () { return pluralize(); }); } else { // Browser global. root.pluralize = pluralize(); } })(this, function () { // Rule storage - pluralize and singularize need to be run sequentially, // while other rules can be optimized using an object for instant lookups. var pluralRules = []; var singularRules = []; var uncountables = {}; var irregularPlurals = {}; var irregularSingles = {}; /** * Title case a string. * * @param {string} str * @return {string} */ function toTitleCase (str) { return str.charAt(0).toUpperCase() + str.substr(1).toLowerCase(); } /** * Sanitize a pluralization rule to a usable regular expression. * * @param {(RegExp|string)} rule * @return {RegExp} */ function sanitizeRule (rule) { if (typeof rule === 'string') { return new RegExp('^' + rule + '$', 'i'); } return rule; } /** * Pass in a word token to produce a function that can replicate the case on * another word. * * @param {string} word * @param {string} token * @return {Function} */ function restoreCase (word, token) { // Upper cased words. E.g. "HELLO". if (word === word.toUpperCase()) { return token.toUpperCase(); } // Title cased words. E.g. "Title". if (word[0] === word[0].toUpperCase()) { return toTitleCase(token); } // Lower cased words. E.g. "test". return token.toLowerCase(); } /** * Interpolate a regexp string. * * @param {string} str * @param {Array} args * @return {string} */ function interpolate (str, args) { return str.replace(/\$(\d{1,2})/g, function (match, index) { return args[index] || ''; }); } /** * Sanitize a word by passing in the word and sanitization rules. * * @param {String} token * @param {String} word * @param {Array} collection * @return {String} */ function sanitizeWord (token, word, collection) { // Empty string or doesn't need fixing. if (!token.length || uncountables.hasOwnProperty(token)) { return word; } var len = collection.length; // Iterate over the sanitization rules and use the first one to match. while (len--) { var rule = collection[len]; // If the rule passes, return the replacement. if (rule[0].test(word)) { return word.replace(rule[0], function (match, index, word) { var result = interpolate(rule[1], arguments); if (match === '') { return restoreCase(word[index - 1], result); } return restoreCase(match, result); }); } } return word; } /** * Replace a word with the updated word. * * @param {Object} replaceMap * @param {Object} keepMap * @param {Array} rules * @return {Function} */ function replaceWord (replaceMap, keepMap, rules) { return function (word) { // Get the correct token and case restoration functions. var token = word.toLowerCase(); // Check against the keep object map. if (keepMap.hasOwnProperty(token)) { return restoreCase(word, token); } // Check against the replacement map for a direct word replacement. if (replaceMap.hasOwnProperty(token)) { return restoreCase(word, replaceMap[token]); } // Run all the rules against the word. return sanitizeWord(token, word, rules); }; } /** * Pluralize or singularize a word based on the passed in count. * * @param {String} word * @param {Number} count * @param {Boolean} inclusive * @return {String} */ function pluralize (word, count, inclusive) { var pluralized = count === 1 ? pluralize.singular(word) : pluralize.plural(word); return (inclusive ? count + ' ' : '') + pluralized; } /** * Pluralize a word. * * @type {Function} */ pluralize.plural = replaceWord( irregularSingles, irregularPlurals, pluralRules ); /** * Singularize a word. * * @type {Function} */ pluralize.singular = replaceWord( irregularPlurals, irregularSingles, singularRules ); /** * Add a pluralization rule to the collection. * * @param {(string|RegExp)} rule * @param {string} replacement */ pluralize.addPluralRule = function (rule, replacement) { pluralRules.push([sanitizeRule(rule), replacement]); }; /** * Add a singularization rule to the collection. * * @param {(string|RegExp)} rule * @param {string} replacement */ pluralize.addSingularRule = function (rule, replacement) { singularRules.push([sanitizeRule(rule), replacement]); }; /** * Add an uncountable word rule. * * @param {(string|RegExp)} word */ pluralize.addUncountableRule = function (word) { if (typeof word === 'string') { uncountables[word.toLowerCase()] = true; return; } // Set singular and plural references for the word. pluralize.addPluralRule(word, '$0'); pluralize.addSingularRule(word, '$0'); }; /** * Add an irregular word definition. * * @param {String} single * @param {String} plural */ pluralize.addIrregularRule = function (single, plural) { plural = plural.toLowerCase(); single = single.toLowerCase(); irregularSingles[single] = plural; irregularPlurals[plural] = single; }; /** * Irregular rules. */ [ // Pronouns. ['I', 'we'], ['me', 'us'], ['he', 'they'], ['she', 'they'], ['them', 'them'], ['myself', 'ourselves'], ['yourself', 'yourselves'], ['itself', 'themselves'], ['herself', 'themselves'], ['himself', 'themselves'], ['themself', 'themselves'], ['is', 'are'], ['this', 'these'], ['that', 'those'], // Words ending in with a consonant and `o`. ['echo', 'echoes'], ['dingo', 'dingoes'], ['volcano', 'volcanoes'], ['tornado', 'tornadoes'], ['torpedo', 'torpedoes'], // Ends with `us`. ['genus', 'genera'], ['viscus', 'viscera'], // Ends with `ma`. ['stigma', 'stigmata'], ['stoma', 'stomata'], ['dogma', 'dogmata'], ['lemma', 'lemmata'], ['schema', 'schemata'], ['anathema', 'anathemata'], // Other irregular rules. ['ox', 'oxen'], ['axe', 'axes'], ['die', 'dice'], ['yes', 'yeses'], ['foot', 'feet'], ['eave', 'eaves'], ['goose', 'geese'], ['tooth', 'teeth'], ['quiz', 'quizzes'], ['human', 'humans'], ['proof', 'proofs'], ['carve', 'carves'], ['valve', 'valves'], ['thief', 'thieves'], ['genie', 'genies'], ['groove', 'grooves'], ['pickaxe', 'pickaxes'], ['whiskey', 'whiskies'] ].forEach(function (rule) { return pluralize.addIrregularRule(rule[0], rule[1]); }); /** * Pluralization rules. */ [ [/s?$/i, 's'], [/([^aeiou]ese)$/i, '$1'], [/(ax|test)is$/i, '$1es'], [/(alias|[^aou]us|tlas|gas|ris)$/i, '$1es'], [/(e[mn]u)s?$/i, '$1s'], [/([^l]ias|[aeiou]las|[emjzr]as|[iu]am)$/i, '$1'], [/(alumn|syllab|octop|vir|radi|nucle|fung|cact|stimul|termin|bacill|foc|uter|loc|strat)(?:us|i)$/i, '$1i'], [/(alumn|alg|vertebr)(?:a|ae)$/i, '$1ae'], [/(seraph|cherub)(?:im)?$/i, '$1im'], [/(her|at|gr)o$/i, '$1oes'], [/(agend|addend|millenni|dat|extrem|bacteri|desiderat|strat|candelabr|errat|ov|symposi|curricul|automat|quor)(?:a|um)$/i, '$1a'], [/(apheli|hyperbat|periheli|asyndet|noumen|phenomen|criteri|organ|prolegomen|hedr|automat)(?:a|on)$/i, '$1a'], [/sis$/i, 'ses'], [/(?:(kni|wi|li)fe|(ar|l|ea|eo|oa|hoo)f)$/i, '$1$2ves'], [/([^aeiouy]|qu)y$/i, '$1ies'], [/([^ch][ieo][ln])ey$/i, '$1ies'], [/(x|ch|ss|sh|zz)$/i, '$1es'], [/(matr|cod|mur|sil|vert|ind|append)(?:ix|ex)$/i, '$1ices'], [/(m|l)(?:ice|ouse)$/i, '$1ice'], [/(pe)(?:rson|ople)$/i, '$1ople'], [/(child)(?:ren)?$/i, '$1ren'], [/eaux$/i, '$0'], [/m[ae]n$/i, 'men'], ['thou', 'you'] ].forEach(function (rule) { return pluralize.addPluralRule(rule[0], rule[1]); }); /** * Singularization rules. */ [ [/s$/i, ''], [/(ss)$/i, '$1'], [/((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)(?:sis|ses)$/i, '$1sis'], [/(^analy)(?:sis|ses)$/i, '$1sis'], [/(wi|kni|(?:after|half|high|low|mid|non|night|[^\w]|^)li)ves$/i, '$1fe'], [/(ar|(?:wo|[ae])l|[eo][ao])ves$/i, '$1f'], [/([^aeiouy]|qu)ies$/i, '$1y'], [/(^[pl]|zomb|^(?:neck)?t|[aeo][lt]|cut)ies$/i, '$1ie'], [/(\b(?:mon|smil))ies$/i, '$1ey'], [/(m|l)ice$/i, '$1ouse'], [/(seraph|cherub)im$/i, '$1'], [/(x|ch|ss|sh|zz|tto|go|cho|alias|[^aou]us|tlas|gas|(?:her|at|gr)o|ris)(?:es)?$/i, '$1'], [/(e[mn]u)s?$/i, '$1'], [/(movie|twelve)s$/i, '$1'], [/(cris|test|diagnos)(?:is|es)$/i, '$1is'], [/(alumn|syllab|octop|vir|radi|nucle|fung|cact|stimul|termin|bacill|foc|uter|loc|strat)(?:us|i)$/i, '$1us'], [/(agend|addend|millenni|dat|extrem|bacteri|desiderat|strat|candelabr|errat|ov|symposi|curricul|quor)a$/i, '$1um'], [/(apheli|hyperbat|periheli|asyndet|noumen|phenomen|criteri|organ|prolegomen|hedr|automat)a$/i, '$1on'], [/(alumn|alg|vertebr)ae$/i, '$1a'], [/(cod|mur|sil|vert|ind)ices$/i, '$1ex'], [/(matr|append)ices$/i, '$1ix'], [/(pe)(rson|ople)$/i, '$1rson'], [/(child)ren$/i, '$1'], [/(eau)x?$/i, '$1'], [/men$/i, 'man'] ].forEach(function (rule) { return pluralize.addSingularRule(rule[0], rule[1]); }); /** * Uncountable rules. */ [ // Check with https://en.wiktionary.org/wiki/{{word}} // Singular words with no plurals. 'agenda', 'ammo', 'athletics', 'bison', 'bream', 'buffalo', 'carp', 'cash', 'chassis', 'clothing', 'cooperation', 'corps', 'digestion', 'debris', 'diabetes', 'excretion', 'expertise', 'flounder', 'fun', 'gallows', 'garbage', 'graffiti', 'headquarters', 'health', 'herpes', 'highjinks', 'homework', 'housework', 'information', 'jeans', 'justice', 'kudos', 'labour', 'literature', 'machinery', 'mackerel', 'media', 'mews', 'moose', 'music', 'news', 'pike', 'plankton', 'pliers', 'pollution', 'premises', 'rain', 'research', 'rice', 'salmon', 'scissors', 'series', 'sewage', 'shambles', 'shrimp', 'species', 'staff', 'swine', 'trout', 'traffic', 'transporation', 'tuna', 'wealth', 'welfare', 'whiting', 'wildebeest', 'wildlife', 'you', // Regexes. /pox$/i, // "chickpox", "smallpox" /ois$/i, /deer$/i, // "deer", "reindeer" /fish$/i, // "fish", "blowfish", "angelfish" /sheep$/i, /measles$/i, /[^aeiou]ese$/i // "chinese", "japanese" ].forEach(pluralize.addUncountableRule); return pluralize; });
pluralize.js
/* global define */ (function (root, pluralize) { /* istanbul ignore else */ if (typeof require === 'function' && typeof exports === 'object' && typeof module === 'object') { // Node. module.exports = pluralize(); } else if (typeof define === 'function' && define.amd) { // AMD, registers as an anonymous module. define(function () { return pluralize(); }); } else { // Browser global. root.pluralize = pluralize(); } })(this, function () { // Rule storage - pluralize and singularize need to be run sequentially, // while other rules can be optimized using an object for instant lookups. var pluralRules = []; var singularRules = []; var uncountables = {}; var irregularPlurals = {}; var irregularSingles = {}; /** * Title case a string. * * @param {string} str * @return {string} */ function toTitleCase (str) { return str.charAt(0).toUpperCase() + str.substr(1).toLowerCase(); } /** * Sanitize a pluralization rule to a usable regular expression. * * @param {(RegExp|string)} rule * @return {RegExp} */ function sanitizeRule (rule) { if (typeof rule === 'string') { return new RegExp('^' + rule + '$', 'i'); } return rule; } /** * Pass in a word token to produce a function that can replicate the case on * another word. * * @param {string} word * @param {string} token * @return {Function} */ function restoreCase (word, token) { // Upper cased words. E.g. "HELLO". if (word === word.toUpperCase()) { return token.toUpperCase(); } // Title cased words. E.g. "Title". if (word[0] === word[0].toUpperCase()) { return toTitleCase(token); } // Lower cased words. E.g. "test". return token.toLowerCase(); } /** * Interpolate a regexp string. * * @param {string} str * @param {Array} args * @return {string} */ function interpolate (str, args) { return str.replace(/\$(\d{1,2})/g, function (match, index) { return args[index] || ''; }); } /** * Sanitize a word by passing in the word and sanitization rules. * * @param {String} token * @param {String} word * @param {Array} collection * @return {String} */ function sanitizeWord (token, word, collection) { // Empty string or doesn't need fixing. if (!token.length || uncountables.hasOwnProperty(token)) { return word; } var len = collection.length; // Iterate over the sanitization rules and use the first one to match. while (len--) { var rule = collection[len]; // If the rule passes, return the replacement. if (rule[0].test(word)) { return word.replace(rule[0], function (match, index, word) { var result = interpolate(rule[1], arguments); if (match === '') { return restoreCase(word[index - 1], result); } return restoreCase(match, result); }); } } return word; } /** * Replace a word with the updated word. * * @param {Object} replaceMap * @param {Object} keepMap * @param {Array} rules * @return {Function} */ function replaceWord (replaceMap, keepMap, rules) { return function (word) { // Get the correct token and case restoration functions. var token = word.toLowerCase(); // Check against the keep object map. if (keepMap.hasOwnProperty(token)) { return restoreCase(word, token); } // Check against the replacement map for a direct word replacement. if (replaceMap.hasOwnProperty(token)) { return restoreCase(word, replaceMap[token]); } // Run all the rules against the word. return sanitizeWord(token, word, rules); }; } /** * Pluralize or singularize a word based on the passed in count. * * @param {String} word * @param {Number} count * @param {Boolean} inclusive * @return {String} */ function pluralize (word, count, inclusive) { var pluralized = count === 1 ? pluralize.singular(word) : pluralize.plural(word); return (inclusive ? count + ' ' : '') + pluralized; } /** * Pluralize a word. * * @type {Function} */ pluralize.plural = replaceWord( irregularSingles, irregularPlurals, pluralRules ); /** * Singularize a word. * * @type {Function} */ pluralize.singular = replaceWord( irregularPlurals, irregularSingles, singularRules ); /** * Add a pluralization rule to the collection. * * @param {(string|RegExp)} rule * @param {string} replacement */ pluralize.addPluralRule = function (rule, replacement) { pluralRules.push([sanitizeRule(rule), replacement]); }; /** * Add a singularization rule to the collection. * * @param {(string|RegExp)} rule * @param {string} replacement */ pluralize.addSingularRule = function (rule, replacement) { singularRules.push([sanitizeRule(rule), replacement]); }; /** * Add an uncountable word rule. * * @param {(string|RegExp)} word */ pluralize.addUncountableRule = function (word) { if (typeof word === 'string') { uncountables[word.toLowerCase()] = true; return; } // Set singular and plural references for the word. pluralize.addPluralRule(word, '$0'); pluralize.addSingularRule(word, '$0'); }; /** * Add an irregular word definition. * * @param {String} single * @param {String} plural */ pluralize.addIrregularRule = function (single, plural) { plural = plural.toLowerCase(); single = single.toLowerCase(); irregularSingles[single] = plural; irregularPlurals[plural] = single; }; /** * Irregular rules. */ [ // Pronouns. ['I', 'we'], ['me', 'us'], ['he', 'they'], ['she', 'they'], ['them', 'them'], ['myself', 'ourselves'], ['yourself', 'yourselves'], ['itself', 'themselves'], ['herself', 'themselves'], ['himself', 'themselves'], ['themself', 'themselves'], ['is', 'are'], ['this', 'these'], ['that', 'those'], // Words ending in with a consonant and `o`. ['echo', 'echoes'], ['dingo', 'dingoes'], ['volcano', 'volcanoes'], ['tornado', 'tornadoes'], ['torpedo', 'torpedoes'], // Ends with `us`. ['genus', 'genera'], ['viscus', 'viscera'], // Ends with `ma`. ['stigma', 'stigmata'], ['stoma', 'stomata'], ['dogma', 'dogmata'], ['lemma', 'lemmata'], ['schema', 'schemata'], ['anathema', 'anathemata'], // Other irregular rules. ['ox', 'oxen'], ['axe', 'axes'], ['die', 'dice'], ['yes', 'yeses'], ['foot', 'feet'], ['eave', 'eaves'], ['goose', 'geese'], ['tooth', 'teeth'], ['quiz', 'quizzes'], ['human', 'humans'], ['proof', 'proofs'], ['carve', 'carves'], ['valve', 'valves'], ['thief', 'thieves'], ['genie', 'genies'], ['groove', 'grooves'], ['pickaxe', 'pickaxes'], ['whiskey', 'whiskies'] ].forEach(function (rule) { return pluralize.addIrregularRule(rule[0], rule[1]); }); /** * Pluralization rules. */ [ [/s?$/i, 's'], [/([^aeiou]ese)$/i, '$1'], [/(ax|test)is$/i, '$1es'], [/(alias|[^aou]us|tlas|gas|ris)$/i, '$1es'], [/(e[mn]u)s?$/i, '$1s'], [/([^l]ias|[aeiou]las|[emjzr]as|[iu]am)$/i, '$1'], [/(alumn|syllab|octop|vir|radi|nucle|fung|cact|stimul|termin|bacill|foc|uter|loc|strat)(?:us|i)$/i, '$1i'], [/(alumn|alg|vertebr)(?:a|ae)$/i, '$1ae'], [/(seraph|cherub)(?:im)?$/i, '$1im'], [/(her|at|gr)o$/i, '$1oes'], [/(agend|addend|millenni|dat|extrem|bacteri|desiderat|strat|candelabr|errat|ov|symposi|curricul|automat|quor)(?:a|um)$/i, '$1a'], [/(apheli|hyperbat|periheli|asyndet|noumen|phenomen|criteri|organ|prolegomen|hedr|automat)(?:a|on)$/i, '$1a'], [/sis$/i, 'ses'], [/(?:(kni|wi|li)fe|(ar|l|ea|eo|oa|hoo)f)$/i, '$1$2ves'], [/([^aeiouy]|qu)y$/i, '$1ies'], [/([^ch][ieo][ln])ey$/i, '$1ies'], [/(x|ch|ss|sh|zz)$/i, '$1es'], [/(matr|cod|mur|sil|vert|ind|append)(?:ix|ex)$/i, '$1ices'], [/(m|l)(?:ice|ouse)$/i, '$1ice'], [/(pe)(?:rson|ople)$/i, '$1ople'], [/(child)(?:ren)?$/i, '$1ren'], [/eaux$/i, '$0'], [/m[ae]n$/i, 'men'], ['thou', 'you'] ].forEach(function (rule) { return pluralize.addPluralRule(rule[0], rule[1]); }); /** * Singularization rules. */ [ [/s$/i, ''], [/(ss)$/i, '$1'], [/((a)naly|(b)a|(d)iagno|(p)arenthe|(p)rogno|(s)ynop|(t)he)(?:sis|ses)$/i, '$1sis'], [/(^analy)(?:sis|ses)$/i, '$1sis'], [/(wi|kni|(?:after|half|high|low|mid|non|night|[^\w]|^)li)ves$/i, '$1fe'], [/(ar|(?:wo|[ae])l|[eo][ao])ves$/i, '$1f'], [/([^aeiouy]|qu)ies$/i, '$1y'], [/(^[pl]|zomb|^(?:neck)?t|[aeo][lt]|cut)ies$/i, '$1ie'], [/(\b(?:mon|smil))ies$/i, '$1ey'], [/(m|l)ice$/i, '$1ouse'], [/(seraph|cherub)im$/i, '$1'], [/(x|ch|ss|sh|zz|tto|go|cho|alias|[^aou]us|tlas|gas|(?:her|at|gr)o|ris)(?:es)?$/i, '$1'], [/(e[mn]u)s?$/i, '$1'], [/(movie|twelve)s$/i, '$1'], [/(cris|test|diagnos)(?:is|es)$/i, '$1is'], [/(alumn|syllab|octop|vir|radi|nucle|fung|cact|stimul|termin|bacill|foc|uter|loc|strat)(?:us|i)$/i, '$1us'], [/(agend|addend|millenni|dat|extrem|bacteri|desiderat|strat|candelabr|errat|ov|symposi|curricul|quor)a$/i, '$1um'], [/(apheli|hyperbat|periheli|asyndet|noumen|phenomen|criteri|organ|prolegomen|hedr|automat)a$/i, '$1on'], [/(alumn|alg|vertebr)ae$/i, '$1a'], [/(cod|mur|sil|vert|ind)ices$/i, '$1ex'], [/(matr|append)ices$/i, '$1ix'], [/(pe)(rson|ople)$/i, '$1rson'], [/(child)ren$/i, '$1'], [/(eau)x?$/i, '$1'], [/men$/i, 'man'] ].forEach(function (rule) { return pluralize.addSingularRule(rule[0], rule[1]); }); /** * Uncountable rules. */ [ // Singular words with no plurals. 'access', 'accommodation', 'adulthood', 'advertising', 'advice', 'agenda', 'aggression', 'aid', 'alcohol', 'ammo', 'anger', 'applause', 'arithmetic', 'art', 'assistance', 'athletics', 'attention', 'baggage', 'beef', 'beer', 'biology', 'bison', 'blood', 'botany', 'bream', 'buffalo', 'butter', 'carp', 'cash', 'chassis', 'childhood', 'clothing', 'cod', 'content', 'cooperation', 'corps', 'danger', 'digestion', 'debris', 'diabetes', 'energy', 'equipment', 'elk', 'excretion', 'expertise', 'flounder', 'gallows', 'garbage', 'graffiti', 'headquarters', 'health', 'herpes', 'highjinks', 'homework', 'importance', 'information', 'jeans', 'justice', 'kudos', 'labour', 'literature', 'machinery', 'mackerel', 'media', 'mews', 'moose', 'music', 'news', 'pike', 'plankton', 'pliers', 'pollution', 'premises', 'rain', 'research', 'rice', 'salmon', 'scissors', 'series', 'sewage', 'shambles', 'shrimp', 'species', 'staff', 'swine', 'trout', 'traffic', 'transporation', 'tuna', 'wealth', 'welfare', 'whiting', 'wildebeest', 'wildlife', 'you', // Regexes. /pox$/i, // "chickpox", "smallpox" /ois$/i, /deer$/i, // "deer", "reindeer" /fish$/i, // "fish", "blowfish", "angelfish" /sheep$/i, /measles$/i, /[^aeiou]ese$/i // "chinese", "japanese" ].forEach(pluralize.addUncountableRule); return pluralize; });
Removed some words
pluralize.js
Removed some words
<ide><path>luralize.js <ide> * Uncountable rules. <ide> */ <ide> [ <add> // Check with https://en.wiktionary.org/wiki/{{word}} <ide> // Singular words with no plurals. <del> 'access', <del> 'accommodation', <del> 'adulthood', <del> 'advertising', <del> 'advice', <ide> 'agenda', <del> 'aggression', <del> 'aid', <del> 'alcohol', <ide> 'ammo', <del> 'anger', <del> 'applause', <del> 'arithmetic', <del> 'art', <del> 'assistance', <ide> 'athletics', <del> 'attention', <del> 'baggage', <del> 'beef', <del> 'beer', <del> 'biology', <ide> 'bison', <del> 'blood', <del> 'botany', <ide> 'bream', <ide> 'buffalo', <del> 'butter', <ide> 'carp', <ide> 'cash', <ide> 'chassis', <del> 'childhood', <ide> 'clothing', <del> 'cod', <del> 'content', <ide> 'cooperation', <ide> 'corps', <del> 'danger', <ide> 'digestion', <ide> 'debris', <ide> 'diabetes', <del> 'energy', <del> 'equipment', <del> 'elk', <ide> 'excretion', <ide> 'expertise', <ide> 'flounder', <add> 'fun', <ide> 'gallows', <ide> 'garbage', <ide> 'graffiti', <ide> 'herpes', <ide> 'highjinks', <ide> 'homework', <del> 'importance', <add> 'housework', <ide> 'information', <ide> 'jeans', <ide> 'justice',
Java
apache-2.0
af6b6d0ff03d03db37023939d87ce7cc6020d9c2
0
apache/jackrabbit,apache/jackrabbit,apache/jackrabbit
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.test.api.query; import org.apache.jackrabbit.test.NotExecutableException; import javax.jcr.RepositoryException; import javax.jcr.Node; import javax.jcr.Repository; /** * Tests the text() node test in XPath. * * @tck.config testroot path to node that allows child nodes of type * <code>nodetype</code>. The node at <code>testroot</code> must allow child * nodes with name jcr:xmltext. Assignment of node type for that child node must * be determined by the child node definition. That is, the test will create the * node with {@link javax.jcr.Node#addNode(String)}, without giving an explicit * node type. * @tck.config nodetype name of a node type for nodes under * <code>testroot</code>. This node type must allow child nodes with name * jcr:xmltext. Assignment of node type for that child node must be determined * by the child node definition. That is, the test will create the node with * {@link javax.jcr.Node#addNode(String)}, without giving an explicit node * type. * @tck.config nodename1 name of a child node under <code>testroot</code>. * * @test * @sources TextNodeTest.java * @executeClass org.apache.jackrabbit.test.api.query.TextNodeTest * @keywords textNodeTest */ public class TextNodeTest extends AbstractQueryTest { /** Resolved QName for jcr:xmltext */ private String jcrXMLText; private String jcrXMLCharacters; protected void setUp() throws Exception { super.setUp(); jcrXMLText = superuser.getNamespacePrefix(NS_JCR_URI) + ":xmltext"; jcrXMLCharacters = superuser.getNamespacePrefix(NS_JCR_URI) + ":xmlcharacters"; } /** * Tests if text() node test is equivalent with jcr:xmltext. */ public void testTextNodeTest() throws RepositoryException { Node text1 = testRootNode.addNode(jcrXMLText); text1.setProperty(jcrXMLCharacters, "foo"); testRootNode.save(); String xpath = "/" + jcrRoot + testRoot + "/text()"; executeXPathQuery(superuser, xpath, new Node[]{text1}); } /** * Tests if text() node test is equivalent with jcr:xmltext and will select * multiple nodes with name jcr:xmltext. */ public void testTextNodeTestMultiNodes() throws RepositoryException { Node text1 = testRootNode.addNode(jcrXMLText); text1.setProperty(jcrXMLCharacters, "foo"); Node text2 = testRootNode.addNode(nodeName1, testNodeType).addNode(jcrXMLText); text2.setProperty(jcrXMLCharacters, "foo"); testRootNode.save(); String xpath = "/" + jcrRoot + testRoot + "//text()"; executeXPathQuery(superuser, xpath, new Node[]{text1, text2}); } /** * Tests if text() node test is equivalent with jcr:xmltext and jcr:contains * matches content in jcr:xmlcharacters property. */ public void testTextNodeTestContains() throws RepositoryException { Node text1 = testRootNode.addNode(jcrXMLText); text1.setProperty(jcrXMLCharacters, "the quick brown fox jumps over the lazy dog."); Node text2 = testRootNode.addNode(nodeName1, testNodeType).addNode(jcrXMLText); text2.setProperty(jcrXMLCharacters, "java content repository"); testRootNode.save(); String xpath = "/" + jcrRoot + testRoot + "//text()[" + jcrContains + "(., 'fox')]"; executeXPathQuery(superuser, xpath, new Node[]{text1}); } /** * Tests text() node test with various position predicates: position(), * first(), last(). * @throws NotExecutableException if the repository does not support queries * with position inidex. */ public void testTextNodeTestWithPosition() throws RepositoryException, NotExecutableException { if (!isSupported(Repository.QUERY_XPATH_POS_INDEX)) { throw new NotExecutableException("Repository does not support position index"); } Node text1 = testRootNode.addNode(jcrXMLText); text1.setProperty(jcrXMLCharacters, "foo"); if (!text1.getDefinition().allowsSameNameSiblings()) { throw new NotExecutableException("Node at path: " + testRoot + " does not allow same name siblings with name: " + jcrXMLText); } testRootNode.addNode(nodeName1, testNodeType); Node text2 = testRootNode.addNode(jcrXMLText); text2.setProperty(jcrXMLCharacters, "foo"); testRootNode.save(); String xpath = "/" + jcrRoot + testRoot + "/text()[2]"; executeXPathQuery(superuser, xpath, new Node[]{text2}); xpath = "/" + jcrRoot + testRoot + "/text()[last()]"; executeXPathQuery(superuser, xpath, new Node[]{text2}); xpath = "/" + jcrRoot + testRoot + "/text()[position() = 2]"; executeXPathQuery(superuser, xpath, new Node[]{text2}); xpath = "/" + jcrRoot + testRoot + "/text()[first()]"; executeXPathQuery(superuser, xpath, new Node[]{text1}); } }
jackrabbit/src/test/java/org/apache/jackrabbit/test/api/query/TextNodeTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.test.api.query; import org.apache.jackrabbit.test.NotExecutableException; import javax.jcr.RepositoryException; import javax.jcr.Node; import javax.jcr.Repository; /** * Tests the text() node test in XPath. * * @tck.config testroot path to node that allows child nodes of type * <code>nodetype</code>. The node at <code>testroot</code> must allow child * nodes with name jcr:xmltext. Assignment of node type for that child node must * be determined by the child node definition. That is, the test will create the * node with {@link javax.jcr.Node#addNode(String)}, without giving an explicit * node type. * @tck.config nodetype name of a node type for nodes under * <code>testroot</code>. This node type must allow child nodes with name * jcr:xmltext. Assignment of node type for that child node must be determined * by the child node definition. That is, the test will create the node with * {@link javax.jcr.Node#addNode(String)}, without giving an explicit node * type. * @tck.config nodename1 name of a child node under <code>testroot</code>. * * @test * @sources TextNodeTest.java * @executeClass org.apache.jackrabbit.test.api.query.TextNodeTest * @keywords textNodeTest */ public class TextNodeTest extends AbstractQueryTest { /** Resolved QName for jcr:xmltext */ private String jcrXMLText; private String jcrXMLCharacters; protected void setUp() throws Exception { super.setUp(); jcrXMLText = superuser.getNamespacePrefix(NS_JCR_URI) + ":xmltext"; jcrXMLCharacters = superuser.getNamespacePrefix(NS_JCR_URI) + ":xmlcharacters"; } /** * Tests if text() node test is equivalent with jcr:xmltext. */ public void testTextNodeTest() throws RepositoryException { Node text1 = testRootNode.addNode(jcrXMLText); testRootNode.save(); String xpath = "/" + jcrRoot + testRoot + "/text()"; executeXPathQuery(superuser, xpath, new Node[]{text1}); } /** * Tests if text() node test is equivalent with jcr:xmltext and will select * multiple nodes with name jcr:xmltext. */ public void testTextNodeTestMultiNodes() throws RepositoryException { Node text1 = testRootNode.addNode(jcrXMLText); Node text2 = testRootNode.addNode(nodeName1, testNodeType).addNode(jcrXMLText); testRootNode.save(); String xpath = "/" + jcrRoot + testRoot + "//text()"; executeXPathQuery(superuser, xpath, new Node[]{text1, text2}); } /** * Tests if text() node test is equivalent with jcr:xmltext and jcr:contains * matches content in jcr:xmlcharacters property. */ public void testTextNodeTestContains() throws RepositoryException { Node text1 = testRootNode.addNode(jcrXMLText); text1.setProperty(jcrXMLCharacters, "the quick brown fox jumps over the lazy dog."); Node text2 = testRootNode.addNode(nodeName1, testNodeType).addNode(jcrXMLText); text2.setProperty(jcrXMLCharacters, "java content repository"); testRootNode.save(); String xpath = "/" + jcrRoot + testRoot + "//text()[" + jcrContains + "(., 'fox')]"; executeXPathQuery(superuser, xpath, new Node[]{text1}); } /** * Tests text() node test with various position predicates: position(), * first(), last(). * @throws NotExecutableException if the repository does not support queries * with position inidex. */ public void testTextNodeTestWithPosition() throws RepositoryException, NotExecutableException { if (!isSupported(Repository.QUERY_XPATH_POS_INDEX)) { throw new NotExecutableException("Repository does not support position index"); } Node text1 = testRootNode.addNode(jcrXMLText); if (!text1.getDefinition().allowsSameNameSiblings()) { throw new NotExecutableException("Node at path: " + testRoot + " does not allow same name siblings with name: " + jcrXMLText); } testRootNode.addNode(nodeName1, testNodeType); Node text2 = testRootNode.addNode(jcrXMLText); testRootNode.save(); String xpath = "/" + jcrRoot + testRoot + "/text()[2]"; executeXPathQuery(superuser, xpath, new Node[]{text2}); xpath = "/" + jcrRoot + testRoot + "/text()[last()]"; executeXPathQuery(superuser, xpath, new Node[]{text2}); xpath = "/" + jcrRoot + testRoot + "/text()[position() = 2]"; executeXPathQuery(superuser, xpath, new Node[]{text2}); xpath = "/" + jcrRoot + testRoot + "/text()[first()]"; executeXPathQuery(superuser, xpath, new Node[]{text1}); } }
JCR-525: TCK: TextNodeTest and jcr:xmltext/jcr:xmlcharacters git-svn-id: 02b679d096242155780e1604e997947d154ee04a@428377 13f79535-47bb-0310-9956-ffa450edef68
jackrabbit/src/test/java/org/apache/jackrabbit/test/api/query/TextNodeTest.java
JCR-525: TCK: TextNodeTest and jcr:xmltext/jcr:xmlcharacters
<ide><path>ackrabbit/src/test/java/org/apache/jackrabbit/test/api/query/TextNodeTest.java <ide> */ <ide> public void testTextNodeTest() throws RepositoryException { <ide> Node text1 = testRootNode.addNode(jcrXMLText); <add> text1.setProperty(jcrXMLCharacters, "foo"); <ide> testRootNode.save(); <ide> String xpath = "/" + jcrRoot + testRoot + "/text()"; <ide> executeXPathQuery(superuser, xpath, new Node[]{text1}); <ide> */ <ide> public void testTextNodeTestMultiNodes() throws RepositoryException { <ide> Node text1 = testRootNode.addNode(jcrXMLText); <add> text1.setProperty(jcrXMLCharacters, "foo"); <ide> Node text2 = testRootNode.addNode(nodeName1, testNodeType).addNode(jcrXMLText); <add> text2.setProperty(jcrXMLCharacters, "foo"); <ide> testRootNode.save(); <ide> String xpath = "/" + jcrRoot + testRoot + "//text()"; <ide> executeXPathQuery(superuser, xpath, new Node[]{text1, text2}); <ide> throw new NotExecutableException("Repository does not support position index"); <ide> } <ide> Node text1 = testRootNode.addNode(jcrXMLText); <add> text1.setProperty(jcrXMLCharacters, "foo"); <ide> if (!text1.getDefinition().allowsSameNameSiblings()) { <ide> throw new NotExecutableException("Node at path: " + testRoot + " does not allow same name siblings with name: " + jcrXMLText); <ide> } <ide> testRootNode.addNode(nodeName1, testNodeType); <ide> Node text2 = testRootNode.addNode(jcrXMLText); <add> text2.setProperty(jcrXMLCharacters, "foo"); <ide> testRootNode.save(); <ide> String xpath = "/" + jcrRoot + testRoot + "/text()[2]"; <ide> executeXPathQuery(superuser, xpath, new Node[]{text2});
Java
apache-2.0
11a9b9f171b99b104e9c55d5fd7a6793a36ae295
0
ryano144/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,TangHao1987/intellij-community,ernestp/consulo,kdwink/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,ahb0327/intellij-community,caot/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,adedayo/intellij-community,robovm/robovm-studio,slisson/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,michaelgallacher/intellij-community,petteyg/intellij-community,izonder/intellij-community,petteyg/intellij-community,kdwink/intellij-community,blademainer/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,holmes/intellij-community,vvv1559/intellij-community,jexp/idea2,youdonghai/intellij-community,pwoodworth/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,vladmm/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,robovm/robovm-studio,orekyuu/intellij-community,TangHao1987/intellij-community,petteyg/intellij-community,dslomov/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,kdwink/intellij-community,samthor/intellij-community,kool79/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,fitermay/intellij-community,amith01994/intellij-community,holmes/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,samthor/intellij-community,ahb0327/intellij-community,jexp/idea2,MichaelNedzelsky/intellij-community,FHannes/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,kdwink/intellij-community,izonder/intellij-community,signed/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,lucafavatella/intellij-community,vladmm/intellij-community,ryano144/intellij-community,Distrotech/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,dslomov/intellij-community,dslomov/intellij-community,robovm/robovm-studio,ftomassetti/intellij-community,SerCeMan/intellij-community,lucafavatella/intellij-community,slisson/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,adedayo/intellij-community,asedunov/intellij-community,asedunov/intellij-community,ibinti/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,wreckJ/intellij-community,idea4bsd/idea4bsd,muntasirsyed/intellij-community,supersven/intellij-community,mglukhikh/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,ryano144/intellij-community,jagguli/intellij-community,FHannes/intellij-community,izonder/intellij-community,xfournet/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,vvv1559/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,amith01994/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,Distrotech/intellij-community,vladmm/intellij-community,holmes/intellij-community,xfournet/intellij-community,jagguli/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,petteyg/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,holmes/intellij-community,jagguli/intellij-community,ibinti/intellij-community,ernestp/consulo,amith01994/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,izonder/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,consulo/consulo,akosyakov/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,asedunov/intellij-community,jexp/idea2,kdwink/intellij-community,mglukhikh/intellij-community,dslomov/intellij-community,FHannes/intellij-community,diorcety/intellij-community,apixandru/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,signed/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,diorcety/intellij-community,xfournet/intellij-community,joewalnes/idea-community,diorcety/intellij-community,ryano144/intellij-community,robovm/robovm-studio,adedayo/intellij-community,allotria/intellij-community,hurricup/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,Lekanich/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,supersven/intellij-community,Distrotech/intellij-community,suncycheng/intellij-community,ftomassetti/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,kool79/intellij-community,kool79/intellij-community,diorcety/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,fnouama/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,amith01994/intellij-community,clumsy/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,jexp/idea2,SerCeMan/intellij-community,hurricup/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,da1z/intellij-community,suncycheng/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,jexp/idea2,holmes/intellij-community,ernestp/consulo,adedayo/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,semonte/intellij-community,slisson/intellij-community,xfournet/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,robovm/robovm-studio,michaelgallacher/intellij-community,dslomov/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,robovm/robovm-studio,semonte/intellij-community,akosyakov/intellij-community,allotria/intellij-community,kool79/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,FHannes/intellij-community,kdwink/intellij-community,allotria/intellij-community,ernestp/consulo,xfournet/intellij-community,Lekanich/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,caot/intellij-community,da1z/intellij-community,adedayo/intellij-community,retomerz/intellij-community,adedayo/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,jexp/idea2,supersven/intellij-community,ahb0327/intellij-community,petteyg/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,pwoodworth/intellij-community,xfournet/intellij-community,ibinti/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,izonder/intellij-community,blademainer/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,petteyg/intellij-community,diorcety/intellij-community,slisson/intellij-community,orekyuu/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,adedayo/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,jagguli/intellij-community,vladmm/intellij-community,samthor/intellij-community,ftomassetti/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,ryano144/intellij-community,da1z/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,hurricup/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,SerCeMan/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,petteyg/intellij-community,fitermay/intellij-community,apixandru/intellij-community,fitermay/intellij-community,caot/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,signed/intellij-community,holmes/intellij-community,semonte/intellij-community,adedayo/intellij-community,blademainer/intellij-community,MichaelNedzelsky/intellij-community,vvv1559/intellij-community,izonder/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,vladmm/intellij-community,ernestp/consulo,nicolargo/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,petteyg/intellij-community,caot/intellij-community,caot/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,samthor/intellij-community,blademainer/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,robovm/robovm-studio,apixandru/intellij-community,blademainer/intellij-community,fitermay/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,michaelgallacher/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,retomerz/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,xfournet/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,Distrotech/intellij-community,wreckJ/intellij-community,kool79/intellij-community,hurricup/intellij-community,clumsy/intellij-community,joewalnes/idea-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,slisson/intellij-community,consulo/consulo,asedunov/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,fnouama/intellij-community,allotria/intellij-community,jagguli/intellij-community,da1z/intellij-community,retomerz/intellij-community,ryano144/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,supersven/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,vladmm/intellij-community,semonte/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,lucafavatella/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,samthor/intellij-community,jagguli/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,da1z/intellij-community,slisson/intellij-community,diorcety/intellij-community,ftomassetti/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,da1z/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,jexp/idea2,suncycheng/intellij-community,izonder/intellij-community,pwoodworth/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,gnuhub/intellij-community,robovm/robovm-studio,joewalnes/idea-community,apixandru/intellij-community,fnouama/intellij-community,lucafavatella/intellij-community,semonte/intellij-community,da1z/intellij-community,izonder/intellij-community,ibinti/intellij-community,ryano144/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,izonder/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,muntasirsyed/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,samthor/intellij-community,gnuhub/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,joewalnes/idea-community,samthor/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,signed/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,caot/intellij-community,lucafavatella/intellij-community,youdonghai/intellij-community,clumsy/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,holmes/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,supersven/intellij-community,retomerz/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,pwoodworth/intellij-community,vladmm/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,hurricup/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,allotria/intellij-community,kdwink/intellij-community,vladmm/intellij-community,nicolargo/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,orekyuu/intellij-community,slisson/intellij-community,ahb0327/intellij-community,consulo/consulo,dslomov/intellij-community,jexp/idea2,fitermay/intellij-community,retomerz/intellij-community,ryano144/intellij-community,joewalnes/idea-community,akosyakov/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,xfournet/intellij-community,allotria/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,akosyakov/intellij-community,Distrotech/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,joewalnes/idea-community,hurricup/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,samthor/intellij-community,tmpgit/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,joewalnes/idea-community,TangHao1987/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,semonte/intellij-community,gnuhub/intellij-community,semonte/intellij-community,jagguli/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,fitermay/intellij-community,holmes/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,slisson/intellij-community,signed/intellij-community,joewalnes/idea-community,blademainer/intellij-community,consulo/consulo,ernestp/consulo,nicolargo/intellij-community,consulo/consulo,semonte/intellij-community,ol-loginov/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,apixandru/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,consulo/consulo,xfournet/intellij-community,adedayo/intellij-community,amith01994/intellij-community,clumsy/intellij-community,kool79/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,apixandru/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,kool79/intellij-community,ibinti/intellij-community,jagguli/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,signed/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,caot/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,semonte/intellij-community,allotria/intellij-community,izonder/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,fnouama/intellij-community,vladmm/intellij-community,signed/intellij-community,fnouama/intellij-community,robovm/robovm-studio,ibinti/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,ftomassetti/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,blademainer/intellij-community,supersven/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,signed/intellij-community,MER-GROUP/intellij-community,blademainer/intellij-community,dslomov/intellij-community,slisson/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,caot/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,akosyakov/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,gnuhub/intellij-community,pwoodworth/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,allotria/intellij-community,xfournet/intellij-community,samthor/intellij-community,signed/intellij-community,Lekanich/intellij-community,fitermay/intellij-community,diorcety/intellij-community,ibinti/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,joewalnes/idea-community
package com.intellij.psi.impl.source.parsing; import com.intellij.lexer.JavaLexer; import com.intellij.lexer.Lexer; import com.intellij.lexer.JavaWithJspTemplateDataLexer; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.impl.source.Constants; import com.intellij.psi.impl.source.ParsingContext; import com.intellij.psi.impl.source.parsing.jsp.JspStep1Lexer; import com.intellij.psi.impl.source.tree.*; import com.intellij.psi.impl.source.tree.java.ModifierListElement; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.tree.IChameleonElementType; import com.intellij.psi.xml.XmlElementType; import com.intellij.util.CharTable; import com.intellij.lang.ASTNode; /** * */ public class ParseUtil implements Constants { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.parsing.ParseUtil"); public static final Key<String> UNCLOSED_ELEMENT_PROPERTY = Key.create("UNCLOSED_ELEMENT_PROPERTY"); public static boolean isStrongWhitespaceHolder(IElementType type){ if (type == XmlElementType.XML_TEXT) return true; return false; } public static TreeElement createTokenElement(Lexer lexer, CharTable table) { IElementType tokenType = lexer.getTokenType(); if (tokenType == null) return null; if (tokenType == DOC_COMMENT) { CompositeElement element = Factory.createCompositeElement(tokenType); LeafElement chameleon = Factory.createLeafElement(DOC_COMMENT_TEXT, lexer.getBuffer(), lexer.getTokenStart(), lexer.getTokenEnd(), lexer.getState(), table); TreeUtil.addChildren(element, chameleon); return element; } else { final LeafElement leafElement = Factory.createLeafElement(tokenType, lexer.getBuffer(), lexer.getTokenStart(), lexer.getTokenEnd(), lexer.getState(), table); leafElement.setState(lexer.getState()); return leafElement; } } public static long savePosition(Lexer lexer) { return lexer.getTokenStart() | (long)lexer.getState() << 32; } public static int getStoredPosition(long position) { return (int)position & 0xFFFFFFFF; } public static int getStoredState(long startPos) { return (int)(startPos >> 32); } public static void restorePosition(Lexer lexer, long position) { lexer.start(lexer.getBuffer(), (int)position & 0xFFFFFFFF, lexer.getBufferEnd(), (int)(position >> 32)); } public static String getTokenText(Lexer lexer) { return StringFactory.createStringFromConstantArray(lexer.getBuffer(), lexer.getTokenStart(), lexer.getTokenEnd() - lexer.getTokenStart()); } public static interface TokenProcessor { TreeElement process(Lexer lexer, ParsingContext context); boolean isTokenValid(IElementType tokenType); } public static abstract class DefaultWhiteSpaceTokenProcessorImpl implements TokenProcessor { public boolean isTokenValid(IElementType tokenType) { return tokenType != null && isInSet(tokenType); } public TreeElement process(Lexer lexer, ParsingContext context) { TreeElement first = null; TreeElement last = null; while (isTokenValid(lexer.getTokenType())) { TreeElement tokenElement = ParseUtil.createTokenElement(lexer, context.getCharTable()); IElementType type = lexer.getTokenType(); if (!isInSet(type)) { LOG.error("Missed token should be white space or comment:" + tokenElement); throw new RuntimeException(); } if (last != null) { last.setTreeNext(tokenElement); tokenElement.setTreePrev(last); last = tokenElement; } else { first = last = tokenElement; } lexer.advance(); } return first; } protected abstract boolean isInSet(final IElementType type); } public static class WhiteSpaceAndCommentsProcessor implements TokenProcessor { public static final TokenProcessor INSTANCE = new WhiteSpaceAndCommentsProcessor(); private WhiteSpaceAndCommentsProcessor() { } public TreeElement process(Lexer lexer, ParsingContext context) { TreeElement first = null; TreeElement last = null; while (isTokenValid(lexer.getTokenType())) { TreeElement tokenElement = ParseUtil.createTokenElement(lexer, context.getCharTable()); IElementType type = lexer.getTokenType(); if (!WHITE_SPACE_OR_COMMENT_BIT_SET.isInSet(type)) { LOG.error("Missed token should be white space or comment:" + tokenElement); throw new RuntimeException(); } if (last != null) { last.setTreeNext(tokenElement); tokenElement.setTreePrev(last); last = tokenElement; } else { first = last = tokenElement; } lexer.advance(); } return first; } public boolean isTokenValid(IElementType tokenType) { return tokenType != null && WHITE_SPACE_OR_COMMENT_BIT_SET.isInSet(tokenType); } } public static final class CommonParentState { TreeElement startLeafBranchStart = null; ASTNode nextLeafBranchStart = null; CompositeElement strongWhiteSpaceHolder = null; boolean isStrongElementOnRisingSlope = true; } public static void insertMissingTokens(CompositeElement root, Lexer lexer, int startOffset, int endOffset, TokenProcessor processor, ParsingContext context) { insertMissingTokens(root, lexer, startOffset, endOffset, -1, processor, context); } public static void insertMissingTokens(CompositeElement root, Lexer lexer, int startOffset, int endOffset, int state, TokenProcessor processor, ParsingContext context) { if (state < 0) { lexer.start(lexer.getBuffer(), startOffset, endOffset); } else { lexer.start(lexer.getBuffer(), startOffset, endOffset, state); } boolean gt = lexer instanceof JavaLexer || lexer instanceof JspStep1Lexer || lexer instanceof JavaWithJspTemplateDataLexer; LeafElement leaf = TreeUtil.findFirstLeaf(root); if (leaf == null) { final TreeElement firstMissing = processor.process(lexer, context); if (firstMissing != null) { TreeUtil.addChildren(root, firstMissing); } return; } { // Missing in the begining final IElementType tokenType = gt ? GTTokens.getTokenType(lexer) : lexer.getTokenType(); if (tokenType != leaf.getElementType() && processor.isTokenValid(tokenType)) { final TreeElement firstMissing = processor.process(lexer, context); if (firstMissing != null) { TreeUtil.insertBefore((TreeElement)root.getFirstChildNode(), firstMissing); } } passTokenOrChameleon(leaf, lexer, gt); } // Missing in tree body insertMissingTokensInTreeBody(leaf, gt, lexer, processor, context, null); if(lexer.getTokenType() != null){ // whitespaces at the end of the file final TreeElement firstMissing = processor.process(lexer, context); if(firstMissing != null){ ASTNode current = root; while(current instanceof CompositeElement){ if(current.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) break; current = current.getLastChildNode(); } if(current instanceof CompositeElement){ TreeUtil.addChildren((CompositeElement)current, firstMissing); } else{ TreeUtil.insertAfter((TreeElement)root.getLastChildNode(), firstMissing); } } } bindComments(root); } public static void insertMissingTokensInTreeBody(TreeElement leaf, boolean gt, Lexer lexer, TokenProcessor processor, ParsingContext context, ASTNode endToken) { final CommonParentState commonParents = new CommonParentState(); while(leaf != null){ commonParents.strongWhiteSpaceHolder = null; final IElementType tokenType = gt ? GTTokens.getTokenType(lexer) : lexer.getTokenType(); final TreeElement next; if(tokenType instanceof IChameleonElementType) next = nextLeaf(leaf, commonParents, tokenType); else next = nextLeaf(leaf, commonParents, null); if (next == null || tokenType == null || next == endToken) break; if (tokenType != next.getElementType() && processor.isTokenValid(tokenType)) { final TreeElement firstMissing = processor.process(lexer, context); final CompositeElement unclosedElement = commonParents.strongWhiteSpaceHolder; if (unclosedElement != null) { if(commonParents.isStrongElementOnRisingSlope || unclosedElement.getFirstChildNode() == null) { TreeUtil.addChildren(unclosedElement, firstMissing); } else { TreeUtil.insertBefore((TreeElement)unclosedElement.getFirstChildNode(), firstMissing); } } else { final ASTNode insertBefore = commonParents.nextLeafBranchStart; TreeElement insertAfter = commonParents.startLeafBranchStart; TreeElement current = commonParents.startLeafBranchStart; while (current != insertBefore) { final TreeElement treeNext = current.getTreeNext(); if (treeNext == insertBefore) { insertAfter = current; break; } if (treeNext instanceof ModifierListElement) { insertAfter = current; break; } if (treeNext.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) { insertAfter = null; TreeUtil.addChildren((CompositeElement)treeNext, firstMissing); break; } current = treeNext; } if (insertAfter != null) TreeUtil.insertAfter(insertAfter, firstMissing); } } passTokenOrChameleon(next, lexer, gt); leaf = next; } } private static void passTokenOrChameleon(final ASTNode next, Lexer lexer, boolean gtUse) { if (next instanceof ChameleonElement) { final int endOfChameleon = next.getTextLength() + lexer.getTokenStart(); while (lexer.getTokenType() != null && lexer.getTokenEnd() < endOfChameleon) { lexer.advance(); } } if (gtUse) { GTTokens.advance(next.getElementType(), lexer); } else { lexer.advance(); } } public static LeafElement nextLeaf(TreeElement start, CommonParentState commonParent) { return (LeafElement)nextLeaf(start, commonParent, null); } public static TreeElement nextLeaf(TreeElement start, CommonParentState commonParent, IElementType searchedType) { TreeElement next = null; if(commonParent != null){ commonParent.startLeafBranchStart = start; initStrongWhitespaceHolder(commonParent, start, true); } TreeElement nextTree = start; while (next == null && (nextTree = nextTree.getTreeNext()) != null) { if(nextTree.getElementType() == searchedType) return nextTree; next = findFirstLeaf(nextTree, searchedType, commonParent); } if(next != null){ if(commonParent != null) commonParent.nextLeafBranchStart = nextTree; return next; } final CompositeElement parent = start.getTreeParent(); if (parent == null) return null; return nextLeaf(parent, commonParent, searchedType); } private static void initStrongWhitespaceHolder(CommonParentState commonParent, ASTNode start, boolean slopeSide) { if(start instanceof CompositeElement && (isStrongWhitespaceHolder(start.getElementType()) || (start.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) && slopeSide)){ commonParent.strongWhiteSpaceHolder = (CompositeElement)start; commonParent.isStrongElementOnRisingSlope = slopeSide; } } private static TreeElement findFirstLeaf(TreeElement element, IElementType searchedType, CommonParentState commonParent) { if(commonParent != null){ initStrongWhitespaceHolder(commonParent, element, false); } if (element instanceof LeafElement || element.getElementType() == searchedType){ return element; } else{ for(TreeElement child = (TreeElement)element.getFirstChildNode(); child != null; child = child.getTreeNext()){ TreeElement leaf = findFirstLeaf(child, searchedType, commonParent); if (leaf != null) return leaf; } return null; } } public static LeafElement prevLeaf(TreeElement start, CommonParentState commonParent) { LeafElement prev = null; if(commonParent != null){ if(commonParent.strongWhiteSpaceHolder != null && start.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) commonParent.strongWhiteSpaceHolder = (CompositeElement)start; commonParent.startLeafBranchStart = start; } ASTNode prevTree = start; while (prev == null && (prevTree = prevTree.getTreePrev()) != null) { prev = TreeUtil.findLastLeaf(prevTree); } if(prev != null){ if(commonParent != null) commonParent.nextLeafBranchStart = prevTree; return prev; } final CompositeElement parent = start.getTreeParent(); if (parent == null) return null; return prevLeaf(parent, commonParent); } static void bindComments(ASTNode root) { TreeElement child = (TreeElement)root.getFirstChildNode(); while (child != null) { if (child.getElementType() == DOC_COMMENT) { if (bindDocComment(child)) { child = child.getTreeParent(); continue; } } // bind "trailing comments" (like "int a; // comment") if (child.getElementType() == END_OF_LINE_COMMENT || child.getElementType() == C_STYLE_COMMENT) { if (bindTrailingComment(child)) { child = child.getTreeParent(); continue; } } // bind "preceding comments" (like "// comment \n void f();") if (child.getElementType() == END_OF_LINE_COMMENT || child.getElementType() == C_STYLE_COMMENT) { if (bindPrecedingComment(child)) { child = child.getTreeParent(); if (child.getTreePrev() != null) { child = child.getTreePrev(); } continue; } } if (child instanceof CompositeElement) { bindComments(child); } child = child.getTreeNext(); } } private static boolean bindDocComment(TreeElement docComment) { TreeElement element = docComment.getTreeNext(); if (element == null) return false; TreeElement startSpaces = null; ASTNode endSpaces = null; // Bypass meaningless tokens and hold'em in hands while (element.getElementType() == WHITE_SPACE || element.getElementType() == C_STYLE_COMMENT || element.getElementType() == END_OF_LINE_COMMENT || (element.getElementType() == IMPORT_LIST && element.getTextLength() == 0) ) { if (startSpaces == null) startSpaces = element; element = element.getTreeNext(); if (element == null) return false; } endSpaces = element; if (element.getElementType() == CLASS || element.getElementType() == FIELD || element.getElementType() == METHOD || element.getElementType() == ENUM_CONSTANT) { TreeElement first = (TreeElement)element.getFirstChildNode(); TreeUtil.remove(docComment); TreeUtil.insertBefore(first, docComment); if (startSpaces != null) { element = startSpaces.getTreeNext(); if (startSpaces.getElementType() != IMPORT_LIST) { TreeUtil.remove(startSpaces); TreeUtil.insertBefore(first, startSpaces); } TreeElement anchor = startSpaces; while (element != endSpaces) { TreeElement next = element.getTreeNext(); if (element.getElementType() != IMPORT_LIST) { TreeUtil.remove(element); TreeUtil.insertAfter(anchor, element); anchor = element; } element = next; } } return true; } return false; } private static final TokenSet BIND_TRAINLING_COMMENT_BIT_SET = TokenSet.orSet(TokenSet.create(new IElementType[]{ FIELD, METHOD, CLASS, CLASS_INITIALIZER, IMPORT_STATEMENT, IMPORT_STATIC_STATEMENT, PACKAGE_STATEMENT }), STATEMENT_BIT_SET); private static boolean bindTrailingComment(TreeElement comment) { TreeElement element = comment.getTreePrev(); if (element == null) return false; TreeElement space = null; if (element.getElementType() == WHITE_SPACE) { space = element; element = element.getTreePrev(); } if (element != null && BIND_TRAINLING_COMMENT_BIT_SET.isInSet(element.getElementType())) { if (space == null || (!space.textContains('\n') && !space.textContains('\r'))) { if (!comment.textContains('\n') && !comment.textContains('\r')) { if (space != null) { TreeUtil.remove(space); TreeUtil.addChildren((CompositeElement)element, space); } TreeUtil.remove(comment); TreeUtil.addChildren((CompositeElement)element, comment); return true; } } } return false; } private static final TokenSet BIND_PRECEDING_COMMENT_BIT_SET = TokenSet.create(new IElementType[]{ FIELD, METHOD, CLASS, CLASS_INITIALIZER, }); private static final TokenSet PRECEDING_COMMENT_OR_SPACE_BIT_SET = TokenSet.create(new IElementType[]{ C_STYLE_COMMENT, END_OF_LINE_COMMENT, WHITE_SPACE }); private static boolean bindPrecedingComment(TreeElement comment) { ASTNode element = TreeUtil.skipElements(comment, PRECEDING_COMMENT_OR_SPACE_BIT_SET); if (element == null) return false; if (element.getElementType() == IMPORT_LIST && element.getTextLength() == 0) { element = element.getTreeNext(); } if (element != null && BIND_PRECEDING_COMMENT_BIT_SET.isInSet(element.getElementType())) { for (ASTNode child = comment; child != element; child = child.getTreeNext()) { if (child.getElementType() == WHITE_SPACE) { int count = StringUtil.getLineBreakCount(child.getText()); if (count > 1) return false; } else { if (comment.getTreePrev() != null && comment.getTreePrev().getElementType() == ElementType.WHITE_SPACE) { LeafElement prev = (LeafElement)comment.getTreePrev(); char lastC = prev.charAt(prev.getTextLength() - 1); if (lastC == '\n' || lastC == '\r') return false; } else { return false; } } } // check if the comment is on separate line if (comment.getTreePrev() != null) { ASTNode prev = comment.getTreePrev(); if (prev.getElementType() != ElementType.WHITE_SPACE) { return false; } else { if (!prev.textContains('\n')) return false; } } TreeElement first = (TreeElement)element.getFirstChildNode(); TreeElement child = comment; while (child != element) { TreeElement next = child.getTreeNext(); if (child.getElementType() != IMPORT_LIST) { TreeUtil.remove(child); TreeUtil.insertBefore(first, child); } child = next; } return true; } return false; } }
source/com/intellij/psi/impl/source/parsing/ParseUtil.java
package com.intellij.psi.impl.source.parsing; import com.intellij.lexer.JavaLexer; import com.intellij.lexer.Lexer; import com.intellij.lexer.JavaWithJspTemplateDataLexer; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.impl.source.Constants; import com.intellij.psi.impl.source.ParsingContext; import com.intellij.psi.impl.source.parsing.jsp.JspStep1Lexer; import com.intellij.psi.impl.source.tree.*; import com.intellij.psi.impl.source.tree.java.ModifierListElement; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; import com.intellij.psi.tree.IChameleonElementType; import com.intellij.psi.xml.XmlElementType; import com.intellij.util.CharTable; import com.intellij.lang.ASTNode; /** * */ public class ParseUtil implements Constants { private static final Logger LOG = Logger.getInstance("#com.intellij.psi.impl.source.parsing.ParseUtil"); public static final Key<String> UNCLOSED_ELEMENT_PROPERTY = Key.create("UNCLOSED_ELEMENT_PROPERTY"); public static boolean isStrongWhitespaceHolder(IElementType type){ if (type == XmlElementType.XML_TEXT) return true; return false; } public static TreeElement createTokenElement(Lexer lexer, CharTable table) { IElementType tokenType = lexer.getTokenType(); if (tokenType == null) return null; if (tokenType == DOC_COMMENT) { CompositeElement element = Factory.createCompositeElement(tokenType); LeafElement chameleon = Factory.createLeafElement(DOC_COMMENT_TEXT, lexer.getBuffer(), lexer.getTokenStart(), lexer.getTokenEnd(), lexer.getState(), table); TreeUtil.addChildren(element, chameleon); return element; } else { final LeafElement leafElement = Factory.createLeafElement(tokenType, lexer.getBuffer(), lexer.getTokenStart(), lexer.getTokenEnd(), lexer.getState(), table); leafElement.setState(lexer.getState()); return leafElement; } } public static long savePosition(Lexer lexer) { return lexer.getTokenStart() | (long)lexer.getState() << 32; } public static int getStoredPosition(long position) { return (int)position & 0xFFFFFFFF; } public static int getStoredState(long startPos) { return (int)(startPos >> 32); } public static void restorePosition(Lexer lexer, long position) { lexer.start(lexer.getBuffer(), (int)position & 0xFFFFFFFF, lexer.getBufferEnd(), (int)(position >> 32)); } public static int addTokens(CompositeElement parent, Lexer lexer, TokenSet typeBitSet, ParsingContext context) { int count = 0; while (true) { IElementType tokenType = lexer.getTokenType(); if (tokenType == null) break; if (!typeBitSet.isInSet(tokenType)) break; if (parent != null) { TreeUtil.addChildren(parent, ParseUtil.createTokenElement(lexer, context.getCharTable())); } lexer.advance(); count++; } return count; } public static int addTokensUntil(CompositeElement parent, Lexer lexer, IElementType stopType, ParsingContext context) { int count = 0; while (true) { IElementType tokenType = lexer.getTokenType(); if (tokenType == null) break; if (tokenType == stopType) break; if (parent != null) { TreeUtil.addChildren(parent, createTokenElement(lexer, context.getCharTable())); } lexer.advance(); count++; } return count; } public static String getTokenText(Lexer lexer) { return StringFactory.createStringFromConstantArray(lexer.getBuffer(), lexer.getTokenStart(), lexer.getTokenEnd() - lexer.getTokenStart()); } public static interface TokenProcessor { TreeElement process(Lexer lexer, ParsingContext context); boolean isTokenValid(IElementType tokenType); } public static abstract class DefaultWhiteSpaceTokenProcessorImpl implements TokenProcessor { public boolean isTokenValid(IElementType tokenType) { return tokenType != null && isInSet(tokenType); } public TreeElement process(Lexer lexer, ParsingContext context) { TreeElement first = null; TreeElement last = null; while (isTokenValid(lexer.getTokenType())) { TreeElement tokenElement = ParseUtil.createTokenElement(lexer, context.getCharTable()); IElementType type = lexer.getTokenType(); if (!isInSet(type)) { LOG.error("Missed token should be white space or comment:" + tokenElement); throw new RuntimeException(); } if (last != null) { last.setTreeNext(tokenElement); tokenElement.setTreePrev(last); last = tokenElement; } else { first = last = tokenElement; } lexer.advance(); } return first; } protected abstract boolean isInSet(final IElementType type); } public static class WhiteSpaceAndCommentsProcessor implements TokenProcessor { public static final TokenProcessor INSTANCE = new WhiteSpaceAndCommentsProcessor(); private WhiteSpaceAndCommentsProcessor() { } public TreeElement process(Lexer lexer, ParsingContext context) { TreeElement first = null; TreeElement last = null; while (isTokenValid(lexer.getTokenType())) { TreeElement tokenElement = ParseUtil.createTokenElement(lexer, context.getCharTable()); IElementType type = lexer.getTokenType(); if (!WHITE_SPACE_OR_COMMENT_BIT_SET.isInSet(type)) { LOG.error("Missed token should be white space or comment:" + tokenElement); throw new RuntimeException(); } if (last != null) { last.setTreeNext(tokenElement); tokenElement.setTreePrev(last); last = tokenElement; } else { first = last = tokenElement; } lexer.advance(); } return first; } public boolean isTokenValid(IElementType tokenType) { return tokenType != null && WHITE_SPACE_OR_COMMENT_BIT_SET.isInSet(tokenType); } } public static final class CommonParentState { TreeElement startLeafBranchStart = null; ASTNode nextLeafBranchStart = null; CompositeElement strongWhiteSpaceHolder = null; boolean isStrongElementOnRisingSlope = true; } public static void insertMissingTokens(CompositeElement root, Lexer lexer, int startOffset, int endOffset, TokenProcessor processor, ParsingContext context) { insertMissingTokens(root, lexer, startOffset, endOffset, -1, processor, context); } public static void insertMissingTokens(CompositeElement root, Lexer lexer, int startOffset, int endOffset, int state, TokenProcessor processor, ParsingContext context) { if (state < 0) { lexer.start(lexer.getBuffer(), startOffset, endOffset); } else { lexer.start(lexer.getBuffer(), startOffset, endOffset, state); } boolean gt = lexer instanceof JavaLexer || lexer instanceof JspStep1Lexer || lexer instanceof JavaWithJspTemplateDataLexer; LeafElement leaf = TreeUtil.findFirstLeaf(root); if (leaf == null) { final TreeElement firstMissing = processor.process(lexer, context); if (firstMissing != null) { TreeUtil.addChildren(root, firstMissing); } return; } { // Missing in the begining final IElementType tokenType = gt ? GTTokens.getTokenType(lexer) : lexer.getTokenType(); if (tokenType != leaf.getElementType() && processor.isTokenValid(tokenType)) { final TreeElement firstMissing = processor.process(lexer, context); if (firstMissing != null) { TreeUtil.insertBefore((TreeElement)root.getFirstChildNode(), firstMissing); } } passTokenOrChameleon(leaf, lexer, gt); } // Missing in tree body insertMissingTokensInTreeBody(leaf, gt, lexer, processor, context, null); if(lexer.getTokenType() != null){ // whitespaces at the end of the file final TreeElement firstMissing = processor.process(lexer, context); if(firstMissing != null){ ASTNode current = root; while(current instanceof CompositeElement){ if(current.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) break; current = current.getLastChildNode(); } if(current instanceof CompositeElement){ TreeUtil.addChildren((CompositeElement)current, firstMissing); } else{ TreeUtil.insertAfter((TreeElement)root.getLastChildNode(), firstMissing); } } } bindComments(root); } public static void insertMissingTokensInTreeBody(TreeElement leaf, boolean gt, Lexer lexer, TokenProcessor processor, ParsingContext context, ASTNode endToken) { final CommonParentState commonParents = new CommonParentState(); while(leaf != null){ commonParents.strongWhiteSpaceHolder = null; final IElementType tokenType = gt ? GTTokens.getTokenType(lexer) : lexer.getTokenType(); final TreeElement next; if(tokenType instanceof IChameleonElementType) next = nextLeaf(leaf, commonParents, tokenType); else next = nextLeaf(leaf, commonParents, null); if (next == null || tokenType == null || next == endToken) break; if (tokenType != next.getElementType() && processor.isTokenValid(tokenType)) { final TreeElement firstMissing = processor.process(lexer, context); final CompositeElement unclosedElement = commonParents.strongWhiteSpaceHolder; if (unclosedElement != null) { if(commonParents.isStrongElementOnRisingSlope || unclosedElement.getFirstChildNode() == null) { TreeUtil.addChildren(unclosedElement, firstMissing); } else { TreeUtil.insertBefore((TreeElement)unclosedElement.getFirstChildNode(), firstMissing); } } else { final ASTNode insertBefore = commonParents.nextLeafBranchStart; TreeElement insertAfter = commonParents.startLeafBranchStart; TreeElement current = commonParents.startLeafBranchStart; while (current != insertBefore) { final TreeElement treeNext = current.getTreeNext(); if (treeNext == insertBefore) { insertAfter = current; break; } if (treeNext instanceof ModifierListElement) { insertAfter = current; break; } if (treeNext.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) { insertAfter = null; TreeUtil.addChildren((CompositeElement)treeNext, firstMissing); break; } current = treeNext; } if (insertAfter != null) TreeUtil.insertAfter(insertAfter, firstMissing); } } passTokenOrChameleon(next, lexer, gt); leaf = next; } } private static void passTokenOrChameleon(final ASTNode next, Lexer lexer, boolean gtUse) { if (next instanceof ChameleonElement) { final int endOfChameleon = next.getTextLength() + lexer.getTokenStart(); while (lexer.getTokenType() != null && lexer.getTokenEnd() < endOfChameleon) { lexer.advance(); } } if (gtUse) { GTTokens.advance(next.getElementType(), lexer); } else { lexer.advance(); } } public static LeafElement nextLeaf(TreeElement start, CommonParentState commonParent) { return (LeafElement)nextLeaf(start, commonParent, null); } public static TreeElement nextLeaf(TreeElement start, CommonParentState commonParent, IElementType searchedType) { TreeElement next = null; if(commonParent != null){ commonParent.startLeafBranchStart = start; initStrongWhitespaceHolder(commonParent, start, true); } TreeElement nextTree = start; while (next == null && (nextTree = nextTree.getTreeNext()) != null) { if(nextTree.getElementType() == searchedType) return nextTree; next = findFirstLeaf(nextTree, searchedType, commonParent); } if(next != null){ if(commonParent != null) commonParent.nextLeafBranchStart = nextTree; return next; } final CompositeElement parent = start.getTreeParent(); if (parent == null) return null; return nextLeaf(parent, commonParent, searchedType); } private static void initStrongWhitespaceHolder(CommonParentState commonParent, ASTNode start, boolean slopeSide) { if(start instanceof CompositeElement && (isStrongWhitespaceHolder(start.getElementType()) || (start.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) && slopeSide)){ commonParent.strongWhiteSpaceHolder = (CompositeElement)start; commonParent.isStrongElementOnRisingSlope = slopeSide; } } private static TreeElement findFirstLeaf(TreeElement element, IElementType searchedType, CommonParentState commonParent) { if(commonParent != null){ initStrongWhitespaceHolder(commonParent, element, false); } if (element instanceof LeafElement || element.getElementType() == searchedType){ return element; } else{ for(TreeElement child = (TreeElement)element.getFirstChildNode(); child != null; child = child.getTreeNext()){ TreeElement leaf = findFirstLeaf(child, searchedType, commonParent); if (leaf != null) return leaf; } return null; } } public static LeafElement prevLeaf(TreeElement start, CommonParentState commonParent) { LeafElement prev = null; if(commonParent != null){ if(commonParent.strongWhiteSpaceHolder != null && start.getUserData(UNCLOSED_ELEMENT_PROPERTY) != null) commonParent.strongWhiteSpaceHolder = (CompositeElement)start; commonParent.startLeafBranchStart = start; } ASTNode prevTree = start; while (prev == null && (prevTree = prevTree.getTreePrev()) != null) { prev = TreeUtil.findLastLeaf(prevTree); } if(prev != null){ if(commonParent != null) commonParent.nextLeafBranchStart = prevTree; return prev; } final CompositeElement parent = start.getTreeParent(); if (parent == null) return null; return prevLeaf(parent, commonParent); } static void bindComments(ASTNode root) { TreeElement child = (TreeElement)root.getFirstChildNode(); while (child != null) { if (child.getElementType() == DOC_COMMENT) { if (bindDocComment(child)) { child = child.getTreeParent(); continue; } } // bind "trailing comments" (like "int a; // comment") if (child.getElementType() == END_OF_LINE_COMMENT || child.getElementType() == C_STYLE_COMMENT) { if (bindTrailingComment(child)) { child = child.getTreeParent(); continue; } } // bind "preceding comments" (like "// comment \n void f();") if (child.getElementType() == END_OF_LINE_COMMENT || child.getElementType() == C_STYLE_COMMENT) { if (bindPrecedingComment(child)) { child = child.getTreeParent(); if (child.getTreePrev() != null) { child = child.getTreePrev(); } continue; } } if (child instanceof CompositeElement) { bindComments(child); } child = child.getTreeNext(); } } private static boolean bindDocComment(TreeElement docComment) { TreeElement element = docComment.getTreeNext(); if (element == null) return false; TreeElement startSpaces = null; ASTNode endSpaces = null; // Bypass meaningless tokens and hold'em in hands while (element.getElementType() == WHITE_SPACE || element.getElementType() == C_STYLE_COMMENT || element.getElementType() == END_OF_LINE_COMMENT || (element.getElementType() == IMPORT_LIST && element.getTextLength() == 0) ) { if (startSpaces == null) startSpaces = element; element = element.getTreeNext(); if (element == null) return false; } endSpaces = element; if (element.getElementType() == CLASS || element.getElementType() == FIELD || element.getElementType() == METHOD || element.getElementType() == ENUM_CONSTANT) { TreeElement first = (TreeElement)element.getFirstChildNode(); TreeUtil.remove(docComment); TreeUtil.insertBefore(first, docComment); if (startSpaces != null) { element = startSpaces.getTreeNext(); if (startSpaces.getElementType() != IMPORT_LIST) { TreeUtil.remove(startSpaces); TreeUtil.insertBefore(first, startSpaces); } TreeElement anchor = startSpaces; while (element != endSpaces) { TreeElement next = element.getTreeNext(); if (element.getElementType() != IMPORT_LIST) { TreeUtil.remove(element); TreeUtil.insertAfter(anchor, element); anchor = element; } element = next; } } return true; } return false; } private static final TokenSet BIND_TRAINLING_COMMENT_BIT_SET = TokenSet.orSet(TokenSet.create(new IElementType[]{ FIELD, METHOD, CLASS, CLASS_INITIALIZER, IMPORT_STATEMENT, IMPORT_STATIC_STATEMENT, PACKAGE_STATEMENT }), STATEMENT_BIT_SET); private static boolean bindTrailingComment(TreeElement comment) { TreeElement element = comment.getTreePrev(); if (element == null) return false; TreeElement space = null; if (element.getElementType() == WHITE_SPACE) { space = element; element = element.getTreePrev(); } if (element != null && BIND_TRAINLING_COMMENT_BIT_SET.isInSet(element.getElementType())) { if (space == null || (!space.textContains('\n') && !space.textContains('\r'))) { if (!comment.textContains('\n') && !comment.textContains('\r')) { if (space != null) { TreeUtil.remove(space); TreeUtil.addChildren((CompositeElement)element, space); } TreeUtil.remove(comment); TreeUtil.addChildren((CompositeElement)element, comment); return true; } } } return false; } private static final TokenSet BIND_PRECEDING_COMMENT_BIT_SET = TokenSet.create(new IElementType[]{ FIELD, METHOD, CLASS, CLASS_INITIALIZER, }); private static final TokenSet PRECEDING_COMMENT_OR_SPACE_BIT_SET = TokenSet.create(new IElementType[]{ C_STYLE_COMMENT, END_OF_LINE_COMMENT, WHITE_SPACE }); private static boolean bindPrecedingComment(TreeElement comment) { ASTNode element = TreeUtil.skipElements(comment, PRECEDING_COMMENT_OR_SPACE_BIT_SET); if (element == null) return false; if (element.getElementType() == IMPORT_LIST && element.getTextLength() == 0) { element = element.getTreeNext(); } if (element != null && BIND_PRECEDING_COMMENT_BIT_SET.isInSet(element.getElementType())) { for (ASTNode child = comment; child != element; child = child.getTreeNext()) { if (child.getElementType() == WHITE_SPACE) { int count = StringUtil.getLineBreakCount(child.getText()); if (count > 1) return false; } else { if (comment.getTreePrev() != null && comment.getTreePrev().getElementType() == ElementType.WHITE_SPACE) { LeafElement prev = (LeafElement)comment.getTreePrev(); char lastC = prev.charAt(prev.getTextLength() - 1); if (lastC == '\n' || lastC == '\r') return false; } else { return false; } } } // check if the comment is on separate line if (comment.getTreePrev() != null) { ASTNode prev = comment.getTreePrev(); if (prev.getElementType() != ElementType.WHITE_SPACE) { return false; } else { if (!prev.textContains('\n')) return false; } } TreeElement first = (TreeElement)element.getFirstChildNode(); TreeElement child = comment; while (child != element) { TreeElement next = child.getTreeNext(); if (child.getElementType() != IMPORT_LIST) { TreeUtil.remove(child); TreeUtil.insertBefore(first, child); } child = next; } return true; } return false; } }
(no message)
source/com/intellij/psi/impl/source/parsing/ParseUtil.java
(no message)
<ide><path>ource/com/intellij/psi/impl/source/parsing/ParseUtil.java <ide> <ide> public static void restorePosition(Lexer lexer, long position) { <ide> lexer.start(lexer.getBuffer(), (int)position & 0xFFFFFFFF, lexer.getBufferEnd(), (int)(position >> 32)); <del> } <del> <del> public static int addTokens(CompositeElement parent, Lexer lexer, TokenSet typeBitSet, ParsingContext context) { <del> int count = 0; <del> while (true) { <del> IElementType tokenType = lexer.getTokenType(); <del> if (tokenType == null) break; <del> if (!typeBitSet.isInSet(tokenType)) break; <del> if (parent != null) { <del> TreeUtil.addChildren(parent, ParseUtil.createTokenElement(lexer, context.getCharTable())); <del> } <del> lexer.advance(); <del> count++; <del> } <del> return count; <del> } <del> <del> public static int addTokensUntil(CompositeElement parent, Lexer lexer, IElementType stopType, ParsingContext context) { <del> int count = 0; <del> while (true) { <del> IElementType tokenType = lexer.getTokenType(); <del> if (tokenType == null) break; <del> if (tokenType == stopType) break; <del> if (parent != null) { <del> TreeUtil.addChildren(parent, createTokenElement(lexer, context.getCharTable())); <del> } <del> lexer.advance(); <del> count++; <del> } <del> return count; <ide> } <ide> <ide> public static String getTokenText(Lexer lexer) {
Java
apache-2.0
a90fbd7fc6ce01e22ec092cf096fe0cad4b8996a
0
tsygipova/java_first
package ru.stqa.pft.addressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.support.ui.Select; import org.testng.Assert; import ru.stqa.pft.addressbook.model.ContactsData; /** * Created by Дарья on 03-Sep-16. */ public class ContactsHelper extends BaseHelper { public ContactsHelper(WebDriver wd) { super(wd); } public void returnToContactsPage() { click(By.linkText("home page")); } public void submitContactCreation() { click(By.xpath("//div[@id='content']/form/input[21]")); } public void fillContactForm(ContactsData contactsData, boolean creation) { type(By.name("firstname"), contactsData.getFirstname()); type(By.name("lastname"), contactsData.getLastname()); type(By.name("nickname"), contactsData.getNickname()); type(By.name("company"), contactsData.getCompany()); type(By.name("address"), contactsData.getAddress()); type(By.name("home"), contactsData.getHomephone()); type(By.name("mobile"), contactsData.getMobilephone()); type(By.name("email"), contactsData.getEmail1()); if (creation) { new Select(wd.findElement(By.name("new_group"))).selectByVisibleText(contactsData.getGroup()); } else { Assert.assertFalse(isElementPresent(By.name("new_group"))); } } public void initContactCreation() { click(By.linkText("add new")); } public void selectContact() { click(By.name("selected[]")); } public void initContactDeletion() { click(By.xpath("//div[@id='content']/form[2]/div[2]/input")); } public void submitContactDeletion() { wd.switchTo().alert().accept(); } public void initContactModification() { click(By.xpath("//table[@id='maintable']/tbody/tr[2]/td[8]/a/img")); } public void submitContactModification() { click(By.name("update")); } public void createContact(ContactsData contact) { initContactCreation(); fillContactForm(contact, true); submitContactCreation(); returnToContactsPage(); } public boolean isThereAContact() { return isElementPresent(By.name("selected[]")); } }
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/appmanager/ContactsHelper.java
package ru.stqa.pft.addressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.NoSuchElementException; import org.openqa.selenium.WebDriver; import org.openqa.selenium.firefox.FirefoxDriver; import org.openqa.selenium.support.ui.Select; import org.testng.Assert; import ru.stqa.pft.addressbook.model.ContactsData; /** * Created by Дарья on 03-Sep-16. */ public class ContactsHelper extends BaseHelper { public ContactsHelper(WebDriver wd) { super(wd); } public void returnToContactsPage() { click(By.linkText("home page")); } public void submitContactCreation() { click(By.xpath("//div[@id='content']/form/input[21]")); } public void fillContactForm(ContactsData contactsData, boolean creation) { type(By.name("firstname"), contactsData.getFirstname()); type(By.name("lastname"), contactsData.getLastname()); type(By.name("nickname"), contactsData.getNickname()); type(By.name("company"), contactsData.getCompany()); type(By.name("address"), contactsData.getAddress()); type(By.name("home"), contactsData.getHomephone()); type(By.name("mobile"), contactsData.getMobilephone()); type(By.name("email"), contactsData.getEmail1()); if (creation) { new Select(wd.findElement(By.name("new_group"))).selectByVisibleText(contactsData.getGroup()); } else { Assert.assertFalse(isElementPresent(By.name("new_group"))); } } public void initContactCreation() { click(By.linkText("add new")); } public void selectContact() { click(By.name("selected[]")); } public void initContactDeletion() { click(By.xpath("//div[@id='content']/form[2]/div[2]/input")); } public void submitContactDeletion() { wd.switchTo().alert().accept(); } public void initContactModification() { click(By.xpath("//table[@id='maintable']/tbody/tr[2]/td[8]/a/img")); } public void submitContactModification() { click(By.name("update")); } public void createContact(ContactsData contact) { initContactCreation(); fillContactForm(contact); submitContactCreation(); returnToContactsPage(); } public void fillContactForm(ContactsData contact) { } public boolean isThereAContact() { return isElementPresent(By.name("selected[]")); } }
ошибка
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/appmanager/ContactsHelper.java
ошибка
<ide><path>ddressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/appmanager/ContactsHelper.java <ide> <ide> public void createContact(ContactsData contact) { <ide> initContactCreation(); <del> fillContactForm(contact); <add> fillContactForm(contact, true); <ide> submitContactCreation(); <ide> returnToContactsPage(); <ide> <ide> } <ide> <del> public void fillContactForm(ContactsData contact) { <del> } <del> <ide> public boolean isThereAContact() { <ide> return isElementPresent(By.name("selected[]")); <ide> } <add> <add> <ide> } <ide> <ide>
Java
apache-2.0
6c7b40bedbb5fc8a198596e671dab8c36640d1d8
0
apache/commons-jexl,apache/commons-jexl,apache/commons-jexl
/* * Copyright 2002-2006 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.jexl.parser; import org.apache.commons.jexl.JexlContext; /** * A Useful implementation of {@link Node}. * Mostly autogenerated by javacc * * @author <a href="mailto:[email protected]">Geir Magnusson Jr.</a> * @version $Id$ */ public class SimpleNode implements Node { /** parent node. */ protected Node parent; /** children of this node. */ protected Node[] children; /** id of the node. */ protected int id; /** parser that created the node. */ protected Parser parser; /** * Create the node given an id. * @param i node id. */ public SimpleNode(int i) { id = i; } /** * Create a node with the given parser and id. * @param p a parser. * @param i node id. */ public SimpleNode(Parser p, int i) { this(i); parser = p; } public void jjtOpen() { } public void jjtClose() { } public void jjtSetParent(Node n) { parent = n; } public Node jjtGetParent() { return parent; } public void jjtAddChild(Node n, int i) { if (children == null) { children = new Node[i + 1]; } else if (i >= children.length) { Node c[] = new Node[i + 1]; System.arraycopy(children, 0, c, 0, children.length); children = c; } children[i] = n; } public Node jjtGetChild(int i) { return children[i]; } public int jjtGetNumChildren() { return (children == null) ? 0 : children.length; } /** * Accept the visitor. * @param visitor a {@link ParserVisitor}. * @param data data to be passed along to the visitor. * @return the value from visiting. * @see ParserVisitor#visit */ public Object jjtAccept(ParserVisitor visitor, Object data) { return visitor.visit(this, data); } /** Accept the visitor. **/ public Object childrenAccept(ParserVisitor visitor, Object data) { if (children != null) { for (int i = 0; i < children.length; ++i) { children[i].jjtAccept(visitor, data); } } return data; } public String toString() { return ParserTreeConstants.jjtNodeName[id]; } public String toString(String prefix) { return prefix + toString(); } public void dump(String prefix) { System.out.println(toString(prefix)); if (children != null) { for (int i = 0; i < children.length; ++i) { SimpleNode n = (SimpleNode)children[i]; if (n != null) { n.dump(prefix + " "); } } } } /** * basic interpret - just invoke interpret on all children */ public boolean interpret(JexlContext pc) throws Exception { for (int i=0; i<jjtGetNumChildren();i++) { SimpleNode node = (SimpleNode) jjtGetChild(i); if (!node.interpret(pc)) return false; } return true; } /** * Gets the value of this node. * @param context the context to retrieve values from. * @return the result of addition. * @throws Exception when evaluating the operands fails. */ public Object value(JexlContext context) throws Exception { return null; } /** * Sets the value for the node - again, only makes sense for some nodes * but lazyness tempts me to put it here. Keeps things simple. */ public Object setValue(JexlContext context, Object value) throws Exception { return null; } /** * Used to let a node calcuate it's value.. */ public Object execute(Object o, JexlContext ctx) throws Exception { return null; } }
src/java/org/apache/commons/jexl/parser/SimpleNode.java
/* * Copyright 2002,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.jexl.parser; import org.apache.commons.jexl.JexlContext; /** * A Useful implementation of {@link Node}. * Mostly autogenerated by javacc * * @author <a href="mailto:[email protected]">Geir Magnusson Jr.</a> * @version $Id$ */ public class SimpleNode implements Node { protected Node parent; protected Node[] children; protected int id; protected Parser parser; public SimpleNode(int i) { id = i; } public SimpleNode(Parser p, int i) { this(i); parser = p; } public void jjtOpen() { } public void jjtClose() { } public void jjtSetParent(Node n) { parent = n; } public Node jjtGetParent() { return parent; } public void jjtAddChild(Node n, int i) { if (children == null) { children = new Node[i + 1]; } else if (i >= children.length) { Node c[] = new Node[i + 1]; System.arraycopy(children, 0, c, 0, children.length); children = c; } children[i] = n; } public Node jjtGetChild(int i) { return children[i]; } public int jjtGetNumChildren() { return (children == null) ? 0 : children.length; } /** Accept the visitor. **/ public Object jjtAccept(ParserVisitor visitor, Object data) { return visitor.visit(this, data); } /** Accept the visitor. **/ public Object childrenAccept(ParserVisitor visitor, Object data) { if (children != null) { for (int i = 0; i < children.length; ++i) { children[i].jjtAccept(visitor, data); } } return data; } public String toString() { return ParserTreeConstants.jjtNodeName[id]; } public String toString(String prefix) { return prefix + toString(); } public void dump(String prefix) { System.out.println(toString(prefix)); if (children != null) { for (int i = 0; i < children.length; ++i) { SimpleNode n = (SimpleNode)children[i]; if (n != null) { n.dump(prefix + " "); } } } } /** * basic interpret - just invoke interpret on all children */ public boolean interpret(JexlContext pc) throws Exception { for (int i=0; i<jjtGetNumChildren();i++) { SimpleNode node = (SimpleNode) jjtGetChild(i); if (!node.interpret(pc)) return false; } return true; } /** * Returns the value of the node. */ public Object value(JexlContext context) throws Exception { return null; } /** * Sets the value for the node - again, only makes sense for some nodes * but lazyness tempts me to put it here. Keeps things simple. */ public Object setValue(JexlContext context, Object value) throws Exception { return null; } /** * Used to let a node calcuate it's value.. */ public Object execute(Object o, JexlContext ctx) throws Exception { return null; } }
Copy over basic node docs from AddNode git-svn-id: de0229a90a04588cc4459530912ec55932f3d65c@397347 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/commons/jexl/parser/SimpleNode.java
Copy over basic node docs from AddNode
<ide><path>rc/java/org/apache/commons/jexl/parser/SimpleNode.java <ide> /* <del> * Copyright 2002,2004 The Apache Software Foundation. <del> * <add> * Copyright 2002-2006 The Apache Software Foundation. <add> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> * You may obtain a copy of the License at <del> * <add> * <ide> * http://www.apache.org/licenses/LICENSE-2.0 <del> * <add> * <ide> * Unless required by applicable law or agreed to in writing, software <ide> * distributed under the License is distributed on an "AS IS" BASIS, <ide> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <ide> * @author <a href="mailto:[email protected]">Geir Magnusson Jr.</a> <ide> * @version $Id$ <ide> */ <del>public class SimpleNode implements Node <del>{ <add>public class SimpleNode implements Node { <add> /** parent node. */ <ide> protected Node parent; <add> /** children of this node. */ <ide> protected Node[] children; <add> /** id of the node. */ <ide> protected int id; <add> /** parser that created the node. */ <ide> protected Parser parser; <ide> <del> public SimpleNode(int i) <del> { <add> /** <add> * Create the node given an id. <add> * @param i node id. <add> */ <add> public SimpleNode(int i) { <ide> id = i; <ide> } <ide> <del> public SimpleNode(Parser p, int i) <del> { <add> /** <add> * Create a node with the given parser and id. <add> * @param p a parser. <add> * @param i node id. <add> */ <add> public SimpleNode(Parser p, int i) { <ide> this(i); <ide> parser = p; <ide> } <ide> return (children == null) ? 0 : children.length; <ide> } <ide> <del> /** Accept the visitor. **/ <add> /** <add> * Accept the visitor. <add> * @param visitor a {@link ParserVisitor}. <add> * @param data data to be passed along to the visitor. <add> * @return the value from visiting. <add> * @see ParserVisitor#visit <add> */ <ide> public Object jjtAccept(ParserVisitor visitor, Object data) <ide> { <ide> return visitor.visit(this, data); <ide> <ide> <ide> /** <del> * Returns the value of the node. <del> */ <del> public Object value(JexlContext context) <del> throws Exception <del> { <add> * Gets the value of this node. <add> * @param context the context to retrieve values from. <add> * @return the result of addition. <add> * @throws Exception when evaluating the operands fails. <add> */ <add> public Object value(JexlContext context) throws Exception { <ide> return null; <ide> } <ide>
Java
apache-2.0
a1eed0d6fb432b32139353d7953d4e0638ca7146
0
npaduch/reminder
package com.npaduch.reminder; import android.app.Activity; import android.app.FragmentManager; import android.content.res.Configuration; import android.os.Bundle; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.widget.DrawerLayout; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; /** * Created by nolanpaduch on 5/3/14. */ public class MainActivity extends Activity { /* Navigation Drawer */ private String[] mDrawerLabels; private DrawerLayout mDrawerLayout; private ListView mDrawerList; public CharSequence mTitle; public CharSequence mDrawerTitle; private ActionBarDrawerToggle mDrawerToggle; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); /** Navigation Drawer */ mDrawerLabels = getResources().getStringArray(R.array.drawer_titles); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerList = (ListView) findViewById(R.id.left_drawer); // Set the adapter for the list view mDrawerList.setAdapter(new ArrayAdapter<String>(this, R.layout.drawer_list_item, mDrawerLabels)); // Set the list's click listener mDrawerList.setOnItemClickListener(new DrawerItemClickListener()); /* Fragment Manager */ // load initial fragment if (savedInstanceState == null) { getFragmentManager().beginTransaction() .add(R.id.content_frame, new MainFragment()) .commit(); } /** Handle open and close drawer events */ mTitle = mDrawerTitle = getTitle(); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerToggle = new ActionBarDrawerToggle( this, /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.drawable.ic_drawer, /* nav drawer icon to replace 'Up' caret */ R.string.drawer_open, /* "open drawer" description */ R.string.drawer_close /* "close drawer" description */ ) { /** Called when a drawer has settled in a completely closed state. */ public void onDrawerClosed(View view) { super.onDrawerClosed(view); invalidateOptionsMenu(); getActionBar().setTitle(mTitle); } /** Called when a drawer has settled in a completely open state. */ public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); invalidateOptionsMenu(); getActionBar().setTitle(mDrawerTitle); } }; // Set the drawer toggle as the DrawerListener mDrawerLayout.setDrawerListener(mDrawerToggle); // set to enable drawer from action bar getActionBar().setDisplayHomeAsUpEnabled(true); getActionBar().setHomeButtonEnabled(true); } /** Called whenever we call invalidateOptionsMenu() */ @Override public boolean onPrepareOptionsMenu(Menu menu) { // If the nav drawer is open, hide action items related to the content view boolean drawerOpen = mDrawerLayout.isDrawerOpen(mDrawerList); menu.findItem(R.id.action_settings).setVisible(!drawerOpen); menu.findItem(R.id.action_add_reminder).setVisible(!drawerOpen); return super.onPrepareOptionsMenu(menu); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Pass the event to ActionBarDrawerToggle, if it returns // true, then it has handled the app icon touch event if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } // Handle your other action bar items... return super.onOptionsItemSelected(item); } /** Navigation Drawer Item Click Listener */ private class DrawerItemClickListener implements ListView.OnItemClickListener { @Override public void onItemClick(AdapterView parent, View view, int position, long id) { selectItem(position); } } /** Swaps fragments in the main content view */ private void selectItem(int position) { // Create a new fragment and specify the planet to show based on position MainFragment fragment = new MainFragment(); // Insert the fragment by replacing any existing fragment FragmentManager fragmentManager = getFragmentManager(); fragmentManager.beginTransaction() .replace(R.id.content_frame, fragment) .commit(); // Highlight the selected item, update the title, and close the drawer mDrawerList.setItemChecked(position, true); setTitle(getResources().getStringArray(R.array.drawer_titles)[position]); mDrawerLayout.closeDrawer(mDrawerList); } @Override public void setTitle(CharSequence title) { mTitle = title; getActionBar().setTitle(mTitle); } /** Navigation Draw */ @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); // Sync the toggle state after onRestoreInstanceState has occurred. mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } }
reminder/src/main/java/com/npaduch/reminder/MainActivity.java
package com.npaduch.reminder; import android.app.Activity; import android.app.FragmentManager; import android.content.res.Configuration; import android.os.Bundle; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.widget.DrawerLayout; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; /** * Created by nolanpaduch on 5/3/14. */ public class MainActivity extends Activity { /* Navigation Drawer */ private String[] mDrawerLabels; private DrawerLayout mDrawerLayout; private ListView mDrawerList; public CharSequence mTitle; public CharSequence mDrawerTitle; private ActionBarDrawerToggle mDrawerToggle; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); /* Navigation Drawer */ mDrawerLabels = getResources().getStringArray(R.array.drawer_titles); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerList = (ListView) findViewById(R.id.left_drawer); // Set the adapter for the list view mDrawerList.setAdapter(new ArrayAdapter<String>(this, R.layout.drawer_list_item, mDrawerLabels)); // Set the list's click listener mDrawerList.setOnItemClickListener(new DrawerItemClickListener()); /* Fragment Manager */ if (savedInstanceState == null) { getFragmentManager().beginTransaction() .add(R.id.content_frame, new MainFragment()) .commit(); } /* Handle open and close drawer events */ mTitle = mDrawerTitle = getTitle(); mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); mDrawerToggle = new ActionBarDrawerToggle( this, /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.drawable.ic_drawer, /* nav drawer icon to replace 'Up' caret */ R.string.drawer_open, /* "open drawer" description */ R.string.drawer_close /* "close drawer" description */ ) { /** Called when a drawer has settled in a completely closed state. */ public void onDrawerClosed(View view) { super.onDrawerClosed(view); invalidateOptionsMenu(); getActionBar().setTitle(mTitle); } /** Called when a drawer has settled in a completely open state. */ public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); invalidateOptionsMenu(); getActionBar().setTitle(mDrawerTitle); } }; // Set the drawer toggle as the DrawerListener mDrawerLayout.setDrawerListener(mDrawerToggle); getActionBar().setDisplayHomeAsUpEnabled(true); getActionBar().setHomeButtonEnabled(true); } /* Called whenever we call invalidateOptionsMenu() */ @Override public boolean onPrepareOptionsMenu(Menu menu) { // If the nav drawer is open, hide action items related to the content view boolean drawerOpen = mDrawerLayout.isDrawerOpen(mDrawerList); menu.findItem(R.id.action_settings).setVisible(!drawerOpen); menu.findItem(R.id.action_add_reminder).setVisible(!drawerOpen); return super.onPrepareOptionsMenu(menu); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.main, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Pass the event to ActionBarDrawerToggle, if it returns // true, then it has handled the app icon touch event if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } // Handle your other action bar items... return super.onOptionsItemSelected(item); } /* Navigation Drawer */ private class DrawerItemClickListener implements ListView.OnItemClickListener { @Override public void onItemClick(AdapterView parent, View view, int position, long id) { selectItem(position); } } /** Swaps fragments in the main content view */ private void selectItem(int position) { // Create a new fragment and specify the planet to show based on position MainFragment fragment = new MainFragment(); // Insert the fragment by replacing any existing fragment FragmentManager fragmentManager = getFragmentManager(); fragmentManager.beginTransaction() .replace(R.id.content_frame, fragment) .commit(); // Highlight the selected item, update the title, and close the drawer mDrawerList.setItemChecked(position, true); setTitle(getResources().getStringArray(R.array.drawer_titles)[position]); mDrawerLayout.closeDrawer(mDrawerList); } @Override public void setTitle(CharSequence title) { mTitle = title; getActionBar().setTitle(mTitle); } /* Navigation Draw */ @Override protected void onPostCreate(Bundle savedInstanceState) { super.onPostCreate(savedInstanceState); // Sync the toggle state after onRestoreInstanceState has occurred. mDrawerToggle.syncState(); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); mDrawerToggle.onConfigurationChanged(newConfig); } }
Updated some comments
reminder/src/main/java/com/npaduch/reminder/MainActivity.java
Updated some comments
<ide><path>eminder/src/main/java/com/npaduch/reminder/MainActivity.java <ide> super.onCreate(savedInstanceState); <ide> setContentView(R.layout.activity_main); <ide> <del> /* Navigation Drawer */ <add> /** Navigation Drawer */ <ide> mDrawerLabels = getResources().getStringArray(R.array.drawer_titles); <ide> mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); <ide> mDrawerList = (ListView) findViewById(R.id.left_drawer); <ide> mDrawerList.setOnItemClickListener(new DrawerItemClickListener()); <ide> <ide> /* Fragment Manager */ <add> // load initial fragment <ide> if (savedInstanceState == null) { <ide> getFragmentManager().beginTransaction() <ide> .add(R.id.content_frame, new MainFragment()) <ide> .commit(); <ide> } <ide> <del> /* Handle open and close drawer events */ <add> /** Handle open and close drawer events */ <ide> mTitle = mDrawerTitle = getTitle(); <ide> mDrawerLayout = (DrawerLayout) findViewById(R.id.drawer_layout); <ide> mDrawerToggle = new ActionBarDrawerToggle( <ide> // Set the drawer toggle as the DrawerListener <ide> mDrawerLayout.setDrawerListener(mDrawerToggle); <ide> <add> // set to enable drawer from action bar <ide> getActionBar().setDisplayHomeAsUpEnabled(true); <ide> getActionBar().setHomeButtonEnabled(true); <ide> <ide> } <ide> <del> /* Called whenever we call invalidateOptionsMenu() */ <add> /** Called whenever we call invalidateOptionsMenu() */ <ide> @Override <ide> public boolean onPrepareOptionsMenu(Menu menu) { <ide> // If the nav drawer is open, hide action items related to the content view <ide> return super.onOptionsItemSelected(item); <ide> } <ide> <del> /* Navigation Drawer */ <add> /** Navigation Drawer Item Click Listener */ <ide> private class DrawerItemClickListener implements ListView.OnItemClickListener { <ide> @Override <ide> public void onItemClick(AdapterView parent, View view, int position, long id) { <ide> getActionBar().setTitle(mTitle); <ide> } <ide> <del> /* Navigation Draw */ <del> <add> /** Navigation Draw */ <ide> @Override <ide> protected void onPostCreate(Bundle savedInstanceState) { <ide> super.onPostCreate(savedInstanceState);
Java
lgpl-2.1
9a0745b93786693b6c4c4190037330b0b65b1cc6
0
sbliven/biojava,sbliven/biojava,sbliven/biojava
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * * created at Apr 26, 2008 */ package org.biojava.bio.structure; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.List; import org.biojava.bio.structure.io.PDBFileParser; import org.biojava.bio.structure.io.mmcif.MMcifParser; import org.biojava.bio.structure.io.mmcif.SimpleMMcifConsumer; import org.biojava.bio.structure.io.mmcif.SimpleMMcifParser; import junit.framework.TestCase; public class MMcifTest extends TestCase { public void testLoad(){ // test a simple protein comparePDB2cif("5pti","A"); // test a protein with modified residues comparePDB2cif("1a4w","L"); comparePDB2cif("1a4w","H"); comparePDB2cif("1a4w","I"); // test a DNA binding protein comparePDB2cif("1j59","A"); comparePDB2cif("1j59","B"); comparePDB2cif("1j59","C"); comparePDB2cif("1j59","D"); comparePDB2cif("1j59","E"); comparePDB2cif("1j59","F"); // test a NMR protein comparePDB2cif("2kc9","A"); } private void comparePDB2cif(String id, String chainId){ String fileName = "/files/"+id+".cif"; InputStream inStream = this.getClass().getResourceAsStream(fileName); assertNotNull(inStream); MMcifParser parser = new SimpleMMcifParser(); SimpleMMcifConsumer consumer = new SimpleMMcifConsumer(); parser.addMMcifConsumer(consumer); try { parser.parse(new BufferedReader(new InputStreamReader(inStream))); } catch (IOException e){ fail(e.getMessage()); } Structure cifStructure = consumer.getStructure(); assertNotNull(cifStructure); // load the PDB file via the PDB parser Structure pdbStructure = null; InputStream pinStream = this.getClass().getResourceAsStream("/files/"+id+".pdb"); assertNotNull(inStream); PDBFileParser pdbpars = new PDBFileParser(); try { pdbStructure = pdbpars.parsePDBFile(pinStream) ; } catch (IOException e) { e.printStackTrace(); } assertNotNull(pdbStructure); //System.out.println(pdbStructure.toPDB()); //System.out.println(cifStructure.toPDB()); // now compare the results try { // chech NMR data assertEquals("the isNMR flag is not the same!", pdbStructure.isNmr(), cifStructure.isNmr()); if ( pdbStructure.isNmr()){ assertEquals("the nr of NMR models is not the same!", pdbStructure.nrModels(), pdbStructure.nrModels()); checkNMR(pdbStructure); checkNMR(cifStructure); } //System.out.println(pdbStructure); //System.out.println(cifStructure); // compare amino acids in chain 1: Chain a_pdb = pdbStructure.getChainByPDB(chainId); Chain a_cif = cifStructure.getChainByPDB(chainId); //System.out.println(a_pdb.getAtomGroups()); //System.out.println(a_cif.getAtomGroups()); //for (Group g: a_cif.getAtomGroups()){ // System.out.println(g); //} //System.out.println("--"); String pdb_SEQseq = a_pdb.getSeqResSequence(); String cif_SEQseq = a_cif.getSeqResSequence(); assertEquals("the SEQRES sequences don;t match!", pdb_SEQseq,cif_SEQseq); // actually this check not necessarily works, since there can be waters in PDB that we don;t deal with yet in cif... //assertEquals("the nr of ATOM record groups is not the same!" , a_pdb.getAtomLength(),a_cif.getAtomLength()); for (int i = 0 ; i < a_pdb.getAtomGroups(GroupType.AMINOACID).size(); i++){ Group gp = a_pdb.getAtomGroups(GroupType.AMINOACID).get(i); Group gc = a_cif.getAtomGroups(GroupType.AMINOACID).get(i); checkGroups(gp,gc); } String pdb_seq = a_pdb.getAtomSequence(); String cif_seq = a_cif.getAtomSequence(); //System.out.println(pdb_seq); //System.out.println(cif_seq); assertEquals("the sequences obtained from PDB and mmCif don't match!",pdb_seq, cif_seq); List<DBRef> pdb_dbrefs= pdbStructure.getDBRefs(); List<DBRef> cif_dbrefs= cifStructure.getDBRefs(); assertEquals("nr of DBrefs found does not match!", pdb_dbrefs.size(),cif_dbrefs.size()); DBRef p = pdb_dbrefs.get(0); DBRef c = cif_dbrefs.get(0); //System.out.println(p.toPDB()); //System.out.println(c.toPDB()); String pdb_dbref = p.toPDB(); String cif_dbref = c.toPDB(); assertEquals("DBRef is not equal",pdb_dbref,cif_dbref); PDBHeader h1 = pdbStructure.getPDBHeader(); PDBHeader h2 = cifStructure.getPDBHeader(); //compareString(h1.toPDB() ,h2.toPDB()); //System.out.println(h1.toPDB()); //System.out.println(h2.toPDB()); assertEquals("the PDBHeader.toPDB representation is not equivalent", h1.toPDB().toUpperCase(),h2.toPDB().toUpperCase()); // and the ultimate test! // but we are not there yet... // TODO: still need to parse SSBOND equivalent info from cif files... //assertEquals("the Structure.toPDB representation is not equivalent", pdbStructure.toPDB(),cifStructure.toPDB()); } catch (StructureException ex){ fail(ex.getMessage() + " for PDB: " + id); } } private void checkGroups(Group g1, Group g2){ //System.out.println("comparing " +g1 + " " + g2); assertEquals(g1.getType(),g2.getType()); assertEquals(g1.getPDBCode(),g2.getPDBCode()); assertEquals(g1.getPDBName(),g2.getPDBName()); assertEquals(g1.has3D(),g2.has3D()); assertEquals(g1.getAtoms().size(), g2.getAtoms().size()); if ( g1.has3D()){ try { Atom a1 = g1.getAtom(0); Atom a2 = g2.getAtom(0); assertEquals(a1.getX(),a2.getX()); assertEquals(a1.getOccupancy(),a2.getOccupancy()); assertEquals(a1.getTempFactor(),a2.getTempFactor()); assertEquals(a1.getFullName(),a2.getFullName()); } catch (StructureException e){ fail(e.getMessage()); } } } private void checkNMR(Structure s){ assertTrue(s.isNmr()); int models = s.nrModels(); assertTrue(models > 0); List<Chain> model0 = s.getModel(0); // compare with all others for (int i = 1 ; i < models; i++){ List<Chain> modelX = s.getModel(i); assertEquals(model0.size(),modelX.size()); // compare lengths: for (int j=0 ; j< model0.size();j++){ Chain c1 = model0.get(j); Chain cx = modelX.get(j); assertEquals(c1.getAtomLength(),cx.getAtomLength()); // can;t compare seq res, since this is only done for 1st... //assertEquals("c1.getSeqResLength(),cx.getSeqResLength()); assertEquals(c1.getAtomSequence(),cx.getAtomSequence()); assertEquals(c1.getLengthAminos(),cx.getLengthAminos()); assertEquals(c1.getAtomGroups(GroupType.AMINOACID).size(),cx.getAtomGroups(GroupType.AMINOACID).size()); assertEquals(c1.getAtomGroups(GroupType.NUCLEOTIDE).size(),cx.getAtomGroups(GroupType.NUCLEOTIDE).size()); assertEquals(c1.getAtomGroups(GroupType.HETATM).size(),cx.getAtomGroups(GroupType.HETATM).size()); } } } private void compareString(String t, String pdb){ for (int i =0 ; i < t.length() ; i++){ System.out.println(">"+t.charAt(i)+":"+ pdb.charAt(i)+"<"); if ( Character.toUpperCase(t.charAt(i)) != Character.toUpperCase(pdb.charAt(i))){ break; } } } }
tests/org/biojava/bio/structure/MMcifTest.java
/* * BioJava development code * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. If you do not have a copy, * see: * * http://www.gnu.org/copyleft/lesser.html * * Copyright for this code is held jointly by the individual * authors. These should be listed in @author doc comments. * * For more information on the BioJava project and its aims, * or to join the biojava-l mailing list, visit the home page * at: * * http://www.biojava.org/ * * created at Apr 26, 2008 */ package org.biojava.bio.structure; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.List; import org.biojava.bio.structure.io.PDBFileParser; import org.biojava.bio.structure.io.mmcif.MMcifParser; import org.biojava.bio.structure.io.mmcif.SimpleMMcifConsumer; import org.biojava.bio.structure.io.mmcif.SimpleMMcifParser; import junit.framework.TestCase; public class MMcifTest extends TestCase { public void testLoad(){ // test a simple protein comparePDB2cif("5pti","A"); // test a protein with modified residues comparePDB2cif("1a4w","L"); comparePDB2cif("1a4w","H"); comparePDB2cif("1a4w","I"); // test a DNA binding protein comparePDB2cif("1j59","A"); comparePDB2cif("1j59","B"); comparePDB2cif("1j59","C"); comparePDB2cif("1j59","D"); comparePDB2cif("1j59","E"); comparePDB2cif("1j59","F"); // test a NMR protein comparePDB2cif("2kc9","A"); } private void comparePDB2cif(String id, String chainId){ String fileName = "/files/"+id+".cif"; InputStream inStream = this.getClass().getResourceAsStream(fileName); assertNotNull(inStream); MMcifParser parser = new SimpleMMcifParser(); SimpleMMcifConsumer consumer = new SimpleMMcifConsumer(); parser.addMMcifConsumer(consumer); try { parser.parse(new BufferedReader(new InputStreamReader(inStream))); } catch (IOException e){ fail(e.getMessage()); } Structure cifStructure = consumer.getStructure(); assertNotNull(cifStructure); // load the PDB file via the PDB parser Structure pdbStructure = null; InputStream pinStream = this.getClass().getResourceAsStream("/files/"+id+".pdb"); assertNotNull(inStream); PDBFileParser pdbpars = new PDBFileParser(); try { pdbStructure = pdbpars.parsePDBFile(pinStream) ; } catch (IOException e) { e.printStackTrace(); } assertNotNull(pdbStructure); //System.out.println(pdbStructure.toPDB()); //System.out.println(cifStructure.toPDB()); // now compare the results try { // chech NMR data assertEquals("the isNMR flag is not the same!", pdbStructure.isNmr(), cifStructure.isNmr()); if ( pdbStructure.isNmr()){ assertEquals("the nr of NMR models is not the same!", pdbStructure.nrModels(), pdbStructure.nrModels()); checkNMR(pdbStructure); checkNMR(cifStructure); } //System.out.println(pdbStructure); //System.out.println(cifStructure); // compare amino acids in chain 1: Chain a_pdb = pdbStructure.getChainByPDB(chainId); Chain a_cif = cifStructure.getChainByPDB(chainId); //System.out.println(a_pdb.getAtomGroups()); //System.out.println(a_cif.getAtomGroups()); //for (Group g: a_cif.getAtomGroups()){ // System.out.println(g); //} //System.out.println("--"); String pdb_SEQseq = a_pdb.getSeqResSequence(); String cif_SEQseq = a_cif.getSeqResSequence(); assertEquals("the SEQRES sequences don;t match!", pdb_SEQseq,cif_SEQseq); // actually this check not necessarily works, since there can be waters in PDB that we don;t deal with yet in cif... //assertEquals("the nr of ATOM record groups is not the same!" , a_pdb.getAtomLength(),a_cif.getAtomLength()); for (int i = 0 ; i < a_pdb.getAtomGroups(GroupType.AMINOACID).size(); i++){ Group gp = a_pdb.getAtomGroups(GroupType.AMINOACID).get(i); Group gc = a_cif.getAtomGroups(GroupType.AMINOACID).get(i); checkGroups(gp,gc); } String pdb_seq = a_pdb.getAtomSequence(); String cif_seq = a_cif.getAtomSequence(); //System.out.println(pdb_seq); //System.out.println(cif_seq); assertEquals("the sequences obtained from PDB and mmCif don't match!",pdb_seq, cif_seq); List<DBRef> pdb_dbrefs= pdbStructure.getDBRefs(); List<DBRef> cif_dbrefs= cifStructure.getDBRefs(); assertEquals("nr of DBrefs found does not match!", pdb_dbrefs.size(),cif_dbrefs.size()); DBRef p = pdb_dbrefs.get(0); DBRef c = cif_dbrefs.get(0); //System.out.println(p.toPDB()); //System.out.println(c.toPDB()); String pdb_dbref = p.toPDB(); String cif_dbref = c.toPDB(); assertEquals("DBRef is not equal",pdb_dbref,cif_dbref); PDBHeader h1 = pdbStructure.getPDBHeader(); PDBHeader h2 = cifStructure.getPDBHeader(); //compareString(h1.toPDB() ,h2.toPDB()); //System.out.println(h1.toPDB()); //System.out.println(h2.toPDB()); assertEquals("the PDBHeader.toPDB representation is not equivalent", h1.toPDB().toUpperCase(),h2.toPDB().toUpperCase()); // and the ultimate test! // but we are not there yet... // TODO: still need to parse SSBOND equivalent info from cif files... //assertEquals("the Structure.toPDB representation is not equivalent", pdbStructure.toPDB(),cifStructure.toPDB()); } catch (StructureException ex){ fail(ex.getMessage() + " for PDB: " + id); } } private void checkGroups(Group g1, Group g2){ System.out.println("comparing " +g1 + " " + g2); assertEquals(g1.getType(),g2.getType()); assertEquals(g1.getPDBCode(),g2.getPDBCode()); assertEquals(g1.getPDBName(),g2.getPDBName()); assertEquals(g1.has3D(),g2.has3D()); assertEquals(g1.getAtoms().size(), g2.getAtoms().size()); if ( g1.has3D()){ try { Atom a1 = g1.getAtom(0); Atom a2 = g2.getAtom(0); assertEquals(a1.getX(),a2.getX()); assertEquals(a1.getOccupancy(),a2.getOccupancy()); assertEquals(a1.getTempFactor(),a2.getTempFactor()); assertEquals(a1.getFullName(),a2.getFullName()); } catch (StructureException e){ fail(e.getMessage()); } } } private void checkNMR(Structure s){ assertTrue(s.isNmr()); int models = s.nrModels(); assertTrue(models > 0); List<Chain> model0 = s.getModel(0); // compare with all others for (int i = 1 ; i < models; i++){ List<Chain> modelX = s.getModel(i); assertEquals(model0.size(),modelX.size()); // compare lengths: for (int j=0 ; j< model0.size();j++){ Chain c1 = model0.get(j); Chain cx = modelX.get(j); assertEquals(c1.getAtomLength(),cx.getAtomLength()); // can;t compare seq res, since this is only done for 1st... //assertEquals("c1.getSeqResLength(),cx.getSeqResLength()); assertEquals(c1.getAtomSequence(),cx.getAtomSequence()); assertEquals(c1.getLengthAminos(),cx.getLengthAminos()); assertEquals(c1.getAtomGroups(GroupType.AMINOACID).size(),cx.getAtomGroups(GroupType.AMINOACID).size()); assertEquals(c1.getAtomGroups(GroupType.NUCLEOTIDE).size(),cx.getAtomGroups(GroupType.NUCLEOTIDE).size()); assertEquals(c1.getAtomGroups(GroupType.HETATM).size(),cx.getAtomGroups(GroupType.HETATM).size()); } } } private void compareString(String t, String pdb){ for (int i =0 ; i < t.length() ; i++){ System.out.println(">"+t.charAt(i)+":"+ pdb.charAt(i)+"<"); if ( Character.toUpperCase(t.charAt(i)) != Character.toUpperCase(pdb.charAt(i))){ break; } } } }
less verbose parsing git-svn-id: ed25c26de1c5325e8eb0deed0b990ab8af8a4def@6910 7c6358e6-4a41-0410-a743-a5b2a554c398
tests/org/biojava/bio/structure/MMcifTest.java
less verbose parsing
<ide><path>ests/org/biojava/bio/structure/MMcifTest.java <ide> <ide> private void checkGroups(Group g1, Group g2){ <ide> <del> System.out.println("comparing " +g1 + " " + g2); <add> //System.out.println("comparing " +g1 + " " + g2); <ide> <ide> assertEquals(g1.getType(),g2.getType()); <ide> assertEquals(g1.getPDBCode(),g2.getPDBCode());
Java
bsd-3-clause
3742cb3e372e8edbb90d06f818f4e0116820d23a
0
CleverTap/apns-http2
/* * Copyright (c) 2016, CleverTap * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * - Neither the name of CleverTap nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.clevertap.apns.clients; import com.clevertap.apns.Notification; import com.clevertap.apns.NotificationResponse; import com.clevertap.apns.NotificationResponseListener; import com.clevertap.apns.exceptions.InvalidTrustManagerException; import okhttp3.*; import java.io.IOException; import java.io.InputStream; import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; /** * A wrapper around OkHttp's http client to send out notifications using Apple's HTTP/2 API. */ public class AsyncOkHttpApnsClient extends SyncOkHttpApnsClient { public AsyncOkHttpApnsClient(String apnsAuthKey, String teamID, String keyID, boolean production, String defaultTopic, ConnectionPool connectionPool) { super(apnsAuthKey, teamID, keyID, production, defaultTopic, connectionPool); } public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, String defaultTopic, ConnectionPool connectionPool) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, UnrecoverableKeyException, KeyManagementException, InvalidTrustManagerException { super(certificate, password, production, defaultTopic, connectionPool); } public AsyncOkHttpApnsClient(String apnsAuthKey, String teamID, String keyID, boolean production, String defaultTopic, OkHttpClient.Builder builder) { this(apnsAuthKey, teamID, keyID, production, defaultTopic, builder, 443); } public AsyncOkHttpApnsClient(String apnsAuthKey, String teamID, String keyID, boolean production, String defaultTopic, OkHttpClient.Builder builder, int connectionPort, String gatewayUrl) { super(apnsAuthKey, teamID, keyID, production, defaultTopic, builder, gatewayUrl); } public AsyncOkHttpApnsClient(String apnsAuthKey, String teamID, String keyID, boolean production, String defaultTopic, OkHttpClient.Builder builder, int connectionPort) { this(apnsAuthKey, teamID, keyID, production, defaultTopic, builder, 443, null); } public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, String defaultTopic, OkHttpClient.Builder builder) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, UnrecoverableKeyException, KeyManagementException, InvalidTrustManagerException { this(certificate, password, production, defaultTopic, builder, 443); } public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, String defaultTopic, OkHttpClient.Builder builder, int connectionPort, String gatewayUrl) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, UnrecoverableKeyException, KeyManagementException, InvalidTrustManagerException { super(certificate, password, production, defaultTopic, builder, gatewayUrl); } public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, String defaultTopic, OkHttpClient.Builder builder, int connectionPort) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, UnrecoverableKeyException, KeyManagementException, InvalidTrustManagerException { this(certificate, password, production, defaultTopic, builder, 443, null); } @Override public NotificationResponse push(Notification notification) { throw new UnsupportedOperationException("Synchronous requests are not supported by this client"); } @Override public boolean isSynchronous() { return false; } @Override public void push(Notification notification, NotificationResponseListener nrl) { final Request request = buildRequest(notification); client.newCall(request).enqueue(new Callback() { @Override public void onFailure(Call call, IOException e) { nrl.onFailure(notification, new NotificationResponse(null, -1, null, e)); } @Override public void onResponse(Call call, Response response) throws IOException { final NotificationResponse nr; try { nr = parseResponse(response); } catch (Throwable t) { nrl.onFailure(notification, new NotificationResponse(null, -1, null, t)); return; } finally { if (response != null) { response.body().close(); } } if (nr.getHttpStatusCode() == 200) { nrl.onSuccess(notification); } else { nrl.onFailure(notification, nr); } } }); } }
src/main/java/com/clevertap/apns/clients/AsyncOkHttpApnsClient.java
/* * Copyright (c) 2016, CleverTap * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * - Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * - Neither the name of CleverTap nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.clevertap.apns.clients; import com.clevertap.apns.Notification; import com.clevertap.apns.NotificationResponse; import com.clevertap.apns.NotificationResponseListener; import okhttp3.*; import java.io.IOException; import java.io.InputStream; import java.security.KeyManagementException; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; /** * A wrapper around OkHttp's http client to send out notifications using Apple's HTTP/2 API. */ public class AsyncOkHttpApnsClient extends SyncOkHttpApnsClient { public AsyncOkHttpApnsClient(String apnsAuthKey, String teamID, String keyID, boolean production, String defaultTopic, ConnectionPool connectionPool) { super(apnsAuthKey, teamID, keyID, production, defaultTopic, connectionPool); } public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, String defaultTopic, ConnectionPool connectionPool) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, UnrecoverableKeyException, KeyManagementException { super(certificate, password, production, defaultTopic, connectionPool); } public AsyncOkHttpApnsClient(String apnsAuthKey, String teamID, String keyID, boolean production, String defaultTopic, OkHttpClient.Builder builder) { this(apnsAuthKey, teamID, keyID, production, defaultTopic, builder, 443); } public AsyncOkHttpApnsClient(String apnsAuthKey, String teamID, String keyID, boolean production, String defaultTopic, OkHttpClient.Builder builder, int connectionPort, String gatewayUrl) { super(apnsAuthKey, teamID, keyID, production, defaultTopic, builder, gatewayUrl); } public AsyncOkHttpApnsClient(String apnsAuthKey, String teamID, String keyID, boolean production, String defaultTopic, OkHttpClient.Builder builder, int connectionPort) { this(apnsAuthKey, teamID, keyID, production, defaultTopic, builder, 443, null); } public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, String defaultTopic, OkHttpClient.Builder builder) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, UnrecoverableKeyException, KeyManagementException { this(certificate, password, production, defaultTopic, builder, 443); } public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, String defaultTopic, OkHttpClient.Builder builder, int connectionPort, String gatewayUrl) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, UnrecoverableKeyException, KeyManagementException { super(certificate, password, production, defaultTopic, builder, gatewayUrl); } public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, String defaultTopic, OkHttpClient.Builder builder, int connectionPort) throws CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, UnrecoverableKeyException, KeyManagementException { this(certificate, password, production, defaultTopic, builder, 443, null); } @Override public NotificationResponse push(Notification notification) { throw new UnsupportedOperationException("Synchronous requests are not supported by this client"); } @Override public boolean isSynchronous() { return false; } @Override public void push(Notification notification, NotificationResponseListener nrl) { final Request request = buildRequest(notification); client.newCall(request).enqueue(new Callback() { @Override public void onFailure(Call call, IOException e) { nrl.onFailure(notification, new NotificationResponse(null, -1, null, e)); } @Override public void onResponse(Call call, Response response) throws IOException { final NotificationResponse nr; try { nr = parseResponse(response); } catch (Throwable t) { nrl.onFailure(notification, new NotificationResponse(null, -1, null, t)); return; } finally { if (response != null) { response.body().close(); } } if (nr.getHttpStatusCode() == 200) { nrl.onSuccess(notification); } else { nrl.onFailure(notification, nr); } } }); } }
Added InvalidTrustManagerException to the method signature
src/main/java/com/clevertap/apns/clients/AsyncOkHttpApnsClient.java
Added InvalidTrustManagerException to the method signature
<ide><path>rc/main/java/com/clevertap/apns/clients/AsyncOkHttpApnsClient.java <ide> import com.clevertap.apns.Notification; <ide> import com.clevertap.apns.NotificationResponse; <ide> import com.clevertap.apns.NotificationResponseListener; <add>import com.clevertap.apns.exceptions.InvalidTrustManagerException; <ide> import okhttp3.*; <ide> <ide> import java.io.IOException; <ide> public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, <ide> String defaultTopic, ConnectionPool connectionPool) <ide> throws CertificateException, NoSuchAlgorithmException, KeyStoreException, <del> IOException, UnrecoverableKeyException, KeyManagementException { <add> IOException, UnrecoverableKeyException, KeyManagementException, InvalidTrustManagerException { <ide> super(certificate, password, production, defaultTopic, connectionPool); <ide> } <ide> <ide> public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, <ide> String defaultTopic, OkHttpClient.Builder builder) <ide> throws CertificateException, NoSuchAlgorithmException, KeyStoreException, <del> IOException, UnrecoverableKeyException, KeyManagementException { <add> IOException, UnrecoverableKeyException, KeyManagementException, InvalidTrustManagerException { <ide> this(certificate, password, production, defaultTopic, builder, 443); <ide> } <ide> <ide> public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, <ide> String defaultTopic, OkHttpClient.Builder builder, int connectionPort, String gatewayUrl) <ide> throws CertificateException, NoSuchAlgorithmException, KeyStoreException, <del> IOException, UnrecoverableKeyException, KeyManagementException { <add> IOException, UnrecoverableKeyException, KeyManagementException, InvalidTrustManagerException { <ide> super(certificate, password, production, defaultTopic, builder, gatewayUrl); <ide> } <ide> <ide> public AsyncOkHttpApnsClient(InputStream certificate, String password, boolean production, <ide> String defaultTopic, OkHttpClient.Builder builder, int connectionPort) <ide> throws CertificateException, NoSuchAlgorithmException, KeyStoreException, <del> IOException, UnrecoverableKeyException, KeyManagementException { <add> IOException, UnrecoverableKeyException, KeyManagementException, InvalidTrustManagerException { <ide> this(certificate, password, production, defaultTopic, builder, 443, null); <ide> } <ide>
JavaScript
bsd-3-clause
52c77d9fb070aa5fee2d5c5faa3a68ce05e4f906
0
LLK/scratch-vm,LLK/scratch-vm,LLK/scratch-vm
/** * Serialize all the assets of the given type ('sounds' or 'costumes') * in the provided runtime into an array of file descriptors. * A file descriptor is an object containing the name of the file * to be written and the contents of the file, the serialized asset. * @param {Runtime} runtime The runtime with the assets to be serialized * @param {string} assetType The type of assets to be serialized: 'sounds' | 'costumes' * @returns {Array<object>} An array of file descriptors for each asset */ const serializeAssets = function (runtime, assetType) { const targets = runtime.targets; const assetDescs = []; for (let i = 0; i < targets.length; i++) { const currTarget = targets[i]; const currAssets = currTarget.sprite[assetType]; for (let j = 0; j < currAssets.length; j++) { const currAsset = currAssets[j]; const assetId = currAsset.assetId; const storage = runtime.storage; const storedAsset = storage.get(assetId); assetDescs.push({ fileName: assetType === 'sound' ? currAsset.md5 : `${assetId}.${storedAsset.dataFormat}`, fileContent: storedAsset.data}); } } return assetDescs; }; /** * Serialize all the sounds in the provided runtime into an array of file * descriptors. A file descriptor is an object containing the name of the file * to be written and the contents of the file, the serialized sound. * @param {Runtime} runtime The runtime with the sounds to be serialized * @returns {Array<object>} An array of file descriptors for each sound */ const serializeSounds = function (runtime) { return serializeAssets(runtime, 'sounds'); }; /** * Serialize all the costumes in the provided runtime into an array of file * descriptors. A file descriptor is an object containing the name of the file * to be written and the contents of the file, the serialized costume. * @param {Runtime} runtime The runtime with the costumes to be serialized * @returns {Array<object>} An array of file descriptors for each costume */ const serializeCostumes = function (runtime) { return serializeAssets(runtime, 'costumes'); }; module.exports = { serializeSounds, serializeCostumes };
src/serialization/serialize-assets.js
/** * Serialize all the assets of the given type ('sounds' or 'costumes') * in the provided runtime into an array of file descriptors. * A file descriptor is an object containing the name of the file * to be written and the contents of the file, the serialized asset. * @param {Runtime} runtime The runtime with the assets to be serialized * @param {string} assetType The type of assets to be serialized: 'sounds' | 'costumes' * @returns {Array<object>} An array of file descriptors for each asset */ const serializeAssets = function (runtime, assetType) { const targets = runtime.targets; const assetDescs = []; for (let i = 0; i < targets.length; i++) { const currTarget = targets[i]; const currAssets = currTarget.sprite[assetType]; for (let j = 0; j < currAssets.length; j++) { const currAsset = currAssets[j]; const assetId = currAsset.assetId; const storage = runtime.storage; const asset = storage.get(assetId); assetDescs.push({ fileName: assetType === 'sound' ? currAsset.md5 : `${assetId}.${currAsset.dataFormat}`, fileContent: asset.data}); } } return assetDescs; }; /** * Serialize all the sounds in the provided runtime into an array of file * descriptors. A file descriptor is an object containing the name of the file * to be written and the contents of the file, the serialized sound. * @param {Runtime} runtime The runtime with the sounds to be serialized * @returns {Array<object>} An array of file descriptors for each sound */ const serializeSounds = function (runtime) { return serializeAssets(runtime, 'sounds'); }; /** * Serialize all the costumes in the provided runtime into an array of file * descriptors. A file descriptor is an object containing the name of the file * to be written and the contents of the file, the serialized costume. * @param {Runtime} runtime The runtime with the costumes to be serialized * @returns {Array<object>} An array of file descriptors for each costume */ const serializeCostumes = function (runtime) { return serializeAssets(runtime, 'costumes'); }; module.exports = { serializeSounds, serializeCostumes };
Updating asset serialization code to get data format from stored asset rather than vm state. Storage seems to be more up to date than vm in the case of editing the blank backdrop, which is stored as a png instead of an svg.
src/serialization/serialize-assets.js
Updating asset serialization code to get data format from stored asset rather than vm state. Storage seems to be more up to date than vm in the case of editing the blank backdrop, which is stored as a png instead of an svg.
<ide><path>rc/serialization/serialize-assets.js <ide> const currAsset = currAssets[j]; <ide> const assetId = currAsset.assetId; <ide> const storage = runtime.storage; <del> const asset = storage.get(assetId); <add> const storedAsset = storage.get(assetId); <ide> assetDescs.push({ <ide> fileName: assetType === 'sound' ? <del> currAsset.md5 : `${assetId}.${currAsset.dataFormat}`, <del> fileContent: asset.data}); <add> currAsset.md5 : `${assetId}.${storedAsset.dataFormat}`, <add> fileContent: storedAsset.data}); <ide> } <ide> } <ide> return assetDescs;
JavaScript
mit
c16d6f72bb42e4987e19afaf446da0e595210ff2
0
magnumjs/mag.js,magnumjs/mag.js
/* MagJS v0.28.9 http://github.com/magnumjs/mag.js (c) Michael Glazer License: MIT */ (function(mag, global) { 'use strict'; var prop = {}, _VALUE = '_value', MAGNUM = mag.MAGNUM; //TODO: make recursive and clean! var getParent = function(parts, parentElement) { for (var i = 1; i < parts.length; i++) { var key = parts[i]; var index = parts[i + 1]; var found = mag.fill.find(Array.isArray(parentElement) ? parentElement[0] : parentElement, key); if (index && !isNaN(Number(index))) { parentElement = found[index]; } else if (found && found.length && index && i + 2 < parts.length) { parentElement = mag.fill.find(found[0], index); } else if (found && found.length) { parentElement = found if (i + 2 == parts.length) { break; } } } return Array.isArray(parentElement) ? parentElement[0] : parentElement; }; var getElement = function(obj, k, i, parentElement) { // search within _key if there var parts = i.toString().split('.'), found; if (parts.length >= 3) { // recurse parentElement = getParent(parts, parentElement) found = mag.fill.find(parentElement, k); } else { var last = parseInt(parts.pop()), index = !isNaN(last) ? last : 0; found = mag.fill.find(parentElement[index] ? parentElement[index] : parentElement, k); } return syncUIWatcher(found, obj, k, parentElement); } function syncUIWatcher(found, obj, k, parentElement) { var items = [] for (var i in found) { var founder = found[i]; if (isDynaInput(founder)) { // add to return list items.push(founder); addEvent(founder, obj, k, parentElement); } } if (items.length) { if (items.length == 1) { return items[0] } return items; } return false; } function addEvent(founder, obj, k, parentElement) { founder[MAGNUM] = founder[MAGNUM] || {} if (!founder[MAGNUM].eventOnFocus) { var onit = function(parent, obj, k, event) { var check = ~['radio', 'checkbox'].indexOf(founder.type); if (!this[MAGNUM].dirty) { this[MAGNUM].dirty = 1 } if (founder.selectedOptions) { var vals = [].map.call(founder.selectedOptions, x => x.value) } if (check) { if ('_checked' in obj || _VALUE in obj) { obj['_checked'] = this.checked; } else if (this.checked) { obj[k] = this.value; } } else if (obj[_VALUE] !== undefined) { obj[_VALUE] = vals || this.value; } else if (obj._text !== undefined) { obj._text = vals || this.value; } mag.redraw(parent, mag.utils.items.getItem(parent.id)); }.bind(founder, parentElement, obj, k); founder.addEventListener("click", onit); founder.addEventListener("input", onit); founder.addEventListener("change", onit); founder.addEventListener("focus", onit); founder[MAGNUM].eventOnFocus = 1; } } function isDynaInput(item) { return item && ~['INPUT', 'SELECT', 'TEXTAREA'].indexOf(item.tagName); } function isInput(items) { for (var k in items) { if (isDynaInput(items[k])) { return items[k]; } } return false; } var attacher = function(i, k, obj, element) { var oval = obj[k]; // if k =='VALUE' use parent if (~[_VALUE, '_checked', '_text'].indexOf(k) && typeof i == 'string') { k = i.split('.').pop(); } // only for user input fields var found = mag.fill.find(element, k); var founder = isInput(found); if (typeof oval !== 'function' && founder) { var founderCall = getElement.bind({}, obj, k, i, element); founderCall(); Object.defineProperty(obj, k, { configurable: true, get: function() { var founder = founderCall(); // set on focus listener once if (founder && founder.value !== 'undefined' && (founder[MAGNUM] && founder[MAGNUM].dirty) && founder.value !== oval) { oval = founder.value; mag.redraw(element, i, 1) return founder.value; } return oval; }, set: function(newValue) { var founder = founderCall(); if (founder && founder.value !== 'undefined' && founder.value !== newValue && newValue !== oval) { founder.value = newValue; oval = newValue; } } }); } }; var attachToArgs = function(i, args, element) { for (var k in args) { if (args.hasOwnProperty(k)) { var value = args[k] if (k != _VALUE && typeof value === 'object' && !mag.utils.isHTMLEle(value)) { if (mag.utils.isObject(value) && mag.utils.isEmpty(value)) { value[_VALUE] = '' } // recurse attachToArgs(i + '.' + k, value, element); } else { attacher.bind({}, i, k, args, element)(); } } } }; prop.attachToArgs = attachToArgs mag.props = prop }(mag));
src/render.js
/* MagJS v0.28.9 http://github.com/magnumjs/mag.js (c) Michael Glazer License: MIT */ (function(mag, global) { 'use strict'; var prop = {}, _VALUE = '_value', MAGNUM = mag.MAGNUM; //TODO: make recursive and clean! var getParent = function(parts, parentElement) { for (var i = 1; i < parts.length; i++) { var key = parts[i]; var index = parts[i + 1]; var found = mag.fill.find(Array.isArray(parentElement) ? parentElement[0] : parentElement, key); if (index && !isNaN(Number(index))) { parentElement = found[index]; } else if (found && found.length && index && i + 2 < parts.length) { parentElement = mag.fill.find(found[0], index); } else if (found && found.length) { parentElement = found if (i + 2 == parts.length) { break; } } } return Array.isArray(parentElement) ? parentElement[0] : parentElement; }; var getElement = function(obj, k, i, parentElement) { // search within _key if there var parts = i.toString().split('.'), found; if (parts.length >= 3) { // recurse parentElement = getParent(parts, parentElement) found = mag.fill.find(parentElement, k); } else { var last = parseInt(parts.pop()), index = !isNaN(last) ? last : 0; found = mag.fill.find(parentElement[index] ? parentElement[index] : parentElement, k); } return syncUIWatcher(found, obj, k, parentElement); } function syncUIWatcher(found, obj, k, parentElement) { var items = [] for (var i in found) { var founder = found[i]; if (isDynaInput(founder)) { // add to return list items.push(founder); addEvent(founder, obj, k, parentElement); } } if (items.length) { if (items.length == 1) { return items[0] } return items; } return false; } function addEvent(founder, obj, k, parentElement) { founder[MAGNUM] = founder[MAGNUM] || {} if (!founder[MAGNUM].eventOnFocus) { var onit = function(parent, obj, k, event) { var check = ~['radio', 'checkbox'].indexOf(founder.type); if (!this[MAGNUM].dirty) { this[MAGNUM].dirty = 1 } if (founder.selectedOptions) { var vals = [].map.call(founder.selectedOptions, x => x.value) } if (check) { if ('_checked' in obj || _VALUE in obj) { obj['_checked'] = this.checked; } else if (this.checked) { obj[k] = this.value; } } else if (obj[_VALUE] !== undefined) { obj[_VALUE] = vals || this.value; } else if (obj._text !== undefined) { obj._text = vals || this.value; } mag.redraw(parent, mag.utils.items.getItem(parent.id)); }.bind(founder, parentElement, obj, k); founder.addEventListener("click", onit, false); founder.addEventListener("input", onit, false); founder.addEventListener("focus", onit, false); founder[MAGNUM].eventOnFocus = 1; } } function isDynaInput(item) { return item && ~['INPUT', 'SELECT', 'TEXTAREA'].indexOf(item.tagName); } function isInput(items) { for (var k in items) { if (isDynaInput(items[k])) { return items[k]; } } return false; } var attacher = function(i, k, obj, element) { var oval = obj[k]; // if k =='VALUE' use parent if (~[_VALUE, '_checked', '_text'].indexOf(k) && typeof i == 'string') { k = i.split('.').pop(); } // only for user input fields var found = mag.fill.find(element, k); var founder = isInput(found); if (typeof oval !== 'function' && founder) { var founderCall = getElement.bind({}, obj, k, i, element); founderCall(); Object.defineProperty(obj, k, { configurable: true, get: function() { var founder = founderCall(); // set on focus listener once if (founder && founder.value !== 'undefined' && (founder[MAGNUM] && founder[MAGNUM].dirty) && founder.value !== oval) { oval = founder.value; mag.redraw(element, i, 1) return founder.value; } return oval; }, set: function(newValue) { var founder = founderCall(); if (founder && founder.value !== 'undefined' && founder.value !== newValue && newValue !== oval) { founder.value = newValue; oval = newValue; } } }); } }; var attachToArgs = function(i, args, element) { for (var k in args) { if (args.hasOwnProperty(k)) { var value = args[k] if (k != _VALUE && typeof value === 'object' && !mag.utils.isHTMLEle(value)) { if (mag.utils.isObject(value) && mag.utils.isEmpty(value)) { value[_VALUE] = '' } // recurse attachToArgs(i + '.' + k, value, element); } else { attacher.bind({}, i, k, args, element)(); } } } }; prop.attachToArgs = attachToArgs mag.props = prop }(mag));
Update render.js Fix for render events, now both change and input
src/render.js
Update render.js
<ide><path>rc/render.js <ide> <ide> }.bind(founder, parentElement, obj, k); <ide> <del> founder.addEventListener("click", onit, false); <del> founder.addEventListener("input", onit, false); <del> founder.addEventListener("focus", onit, false); <add> founder.addEventListener("click", onit); <add> founder.addEventListener("input", onit); <add> founder.addEventListener("change", onit); <add> founder.addEventListener("focus", onit); <ide> <ide> founder[MAGNUM].eventOnFocus = 1; <ide> }
Java
apache-2.0
f41b8a778d620b03827b775706b9665f9a537623
0
rpudil/midpoint,Pardus-Engerek/engerek,arnost-starosta/midpoint,arnost-starosta/midpoint,sabriarabacioglu/engerek,PetrGasparik/midpoint,gureronder/midpoint,sabriarabacioglu/engerek,PetrGasparik/midpoint,rpudil/midpoint,gureronder/midpoint,PetrGasparik/midpoint,gureronder/midpoint,arnost-starosta/midpoint,Pardus-Engerek/engerek,rpudil/midpoint,rpudil/midpoint,Pardus-Engerek/engerek,sabriarabacioglu/engerek,Pardus-Engerek/engerek,arnost-starosta/midpoint,gureronder/midpoint,PetrGasparik/midpoint,arnost-starosta/midpoint
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.util; import com.evolveum.midpoint.util.aspect.ProfilingDataLog; import com.evolveum.midpoint.util.aspect.ProfilingDataManager; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.text.DecimalFormat; /** * //TODO - After upgrading to javax.servlet version API 3.0, add response status code logging * * In this filter, all incoming requests are captured and we measure server response times (using System.nanoTime() for now), * this may be later adjusted using Java SIMON API (but this API is based on System.nanoTime() as well). * * Right now, we are logging this request/response information * Requested URL * Request method (GET/POST) * Request session id * * Requests for .css or various image files are filtered and not recorded. * * @author lazyman * @author shood */ public class MidPointProfilingServletFilter implements Filter { /* Class Variables */ private static final Trace LOGGER = TraceManager.getTrace(MidPointProfilingServletFilter.class); private static DecimalFormat df = new DecimalFormat("0.00"); /* Attributes */ protected FilterConfig config; /* Behavior */ @Override public void destroy() { } @Override public void init(FilterConfig config) throws ServletException { this.config = config; } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if(LOGGER.isTraceEnabled()){ long startTime = System.nanoTime(); chain.doFilter(request, response); long elapsedTime = System.nanoTime() - startTime; if(request instanceof HttpServletRequest){ String uri = ((HttpServletRequest)request).getRequestURI(); //String info = ((HttpServletRequest)request).getMethod(); //String sessionId = ((HttpServletRequest)request).getRequestedSessionId(); //if(uri.startsWith("/midpoint/admin")){ // LOGGER.trace(info + " " + uri + " " + sessionId + " " + df.format(((double)elapsedTime)/1000000) + " (ms)."); //} if(uri.startsWith("/midpoint/admin")){ prepareRequestProfilingEvent(request, elapsedTime, uri); } } } else { chain.doFilter(request, response); } } //doFilter /* * Prepares profiling event from captured servlet request * */ private void prepareRequestProfilingEvent(ServletRequest request, long elapsed, String uri){ String info = ((HttpServletRequest)request).getMethod(); String sessionId = ((HttpServletRequest)request).getRequestedSessionId(); ProfilingDataLog event = new ProfilingDataLog(info, uri, sessionId, elapsed, System.currentTimeMillis()); ProfilingDataManager.getInstance().prepareRequestProfilingEvent(event); } //prepareRequestProfilingEvent }
gui/admin-gui/src/main/java/com/evolveum/midpoint/web/util/MidPointProfilingServletFilter.java
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.util; import com.evolveum.midpoint.util.aspect.ProfilingDataLog; import com.evolveum.midpoint.util.aspect.ProfilingDataManager; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.text.DecimalFormat; /** * //TODO - After upgrading to javax.servlet version API 3.0, add response status code logging * * In this filter, all incoming requests are captured and we measure server response times (using System.nanoTime() for now), * this may be later adjusted using Java SIMON API (but this API is based on System.nanoTime() as well). * * Right now, we are logging this request/response information * Requested URL * Request method (GET/POST) * Request session id * * Requests for .css or various image files are filtered and not recorded. * * @author lazyman * @author shood */ public class MidPointProfilingServletFilter implements Filter { /* Class Variables */ private static final Trace LOGGER = TraceManager.getTrace(MidPointProfilingServletFilter.class); private static DecimalFormat df = new DecimalFormat("0.00"); /* Attributes */ protected FilterConfig config; /* Behavior */ @Override public void destroy() { } @Override public void init(FilterConfig config) throws ServletException { this.config = config; } @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { if(LOGGER.isTraceEnabled()){ long startTime = System.nanoTime(); chain.doFilter(request, response); long elapsedTime = System.nanoTime() - startTime; if(request instanceof HttpServletRequest){ String uri = ((HttpServletRequest)request).getRequestURI(); //String info = ((HttpServletRequest)request).getMethod(); //String sessionId = ((HttpServletRequest)request).getRequestedSessionId(); //if(uri.startsWith("/midpoint/admin")){ // LOGGER.trace(info + " " + uri + " " + sessionId + " " + df.format(((double)elapsedTime)/1000000) + " (ms)."); //} if(uri.startsWith("/midpoint/admin")){ prepareRequestProfilingEvent(request, elapsedTime, uri); } } } } //doFilter /* * Prepares profiling event from captured servlet request * */ private void prepareRequestProfilingEvent(ServletRequest request, long elapsed, String uri){ String info = ((HttpServletRequest)request).getMethod(); String sessionId = ((HttpServletRequest)request).getRequestedSessionId(); ProfilingDataLog event = new ProfilingDataLog(info, uri, sessionId, elapsed, System.currentTimeMillis()); ProfilingDataManager.getInstance().prepareRequestProfilingEvent(event); } //prepareRequestProfilingEvent }
Fix for deployement problem.
gui/admin-gui/src/main/java/com/evolveum/midpoint/web/util/MidPointProfilingServletFilter.java
Fix for deployement problem.
<ide><path>ui/admin-gui/src/main/java/com/evolveum/midpoint/web/util/MidPointProfilingServletFilter.java <ide> prepareRequestProfilingEvent(request, elapsedTime, uri); <ide> } <ide> } <add> } else { <add> chain.doFilter(request, response); <ide> } <ide> } //doFilter <ide>
JavaScript
bsd-3-clause
044b71cc171845bc88c76ef7650f609864a5e456
0
radify/PathFinding.js,radify/PathFinding.js
/** * A base class for path-finders. * This class *SHOULD NOT* be directly instantiated, as it does not provide * any path-finding algorithms or methods and is intended to be extended * by all the other path-finder classes. * * *Note*: The constructor does *NOT* receive any arguments for instantiation. * All the parameters should be passed in in the {@code init} method. * Therefore, you only need to instantiate this class once, and call * the {@code init} method each time you changed the coordinates or grid * status for a new path-finding request. * * @constructor */ PF.BaseFinder = function() { this.startX = null; // avoids to be garbage collected this.startY = null; this.endX = null; this.endY = null; this.grid = null; this.gridHeight = null; this.gridWidth = null; }; /** * Initiate the path-finder by providing the coordinates and the grid. * @param {integer} startX The x coordinate of the start position. * @param {integer} startY The y coordinate of the start position. * @param {integer} endX The x coordinate of the end position. * @param {integer} endY The y coordinate of the end position. */ PF.BaseFinder.prototype.init = function(startX, startY, endX, endY, grid) { this.startX = startX; this.startY = startY; this.endX = endX; this.endY = endY; this.grid = grid; this.gridWidth = grid.numCols; this.gridHeight = grid.numRows; }; /** * Determine whether the given postition is inside the grid. * @param {integer} x The x coordinate of the position. * @param {integer} y The y coordinate of the position. * @return {boolean} Whether it is inside. */ PF.BaseFinder.prototype.isInsideGrid = function(x, y) { // delegates to grid. return this.grid.isInside(x, y); }; /** * Set the walkable attribute of the given position on the grid. * @param {integer} x The x coordinate of the position. * @param {integer} y The y coordinate of the position. */ PF.BaseFinder.prototype.setWalkable = function(x, y, walkable) { // delegates to grid. this.grid.setWalkable(x, y, walkable); }; /** * Determine whether the given position on the grid is walkable. * @param {integer} x The x coordinate of the position. * @param {integer} y The y coordinate of the position. * @return {boolean} Whether it is walkable. */ PF.BaseFinder.prototype.isWalkable = function(x, y) { // delegates to grid. return this.grid.isWalkable(x, y); }; /** * The constructor of each BaseFinder instance. */ PF.BaseFinder.prototype.constructor = PF.BaseFinder; /** * Starts the search for the path. * *NOTE*: This method is intended to be overriden by sub-classes. * @return {Array.<[integer, integer]>} The path, including both start and * end positions. */ PF.BaseFinder.prototype.findPath = function() { throw new Error('Not Implemented Error: ' + 'Sub-classes must implement this method'); };
src/core/BaseFinder.js
/** * A base class for path-finders. * This class *SHOULD NOT* be directly instantiated, as it does not provide * any path-finding algorithms or methods and is intended to be extended * by all the other path-finder classes. * * *Note*: The constructor does *NOT* receive any arguments for instantiation. * All the parameters should be passed in in the {@code init} method. * Therefore, you only need to instantiate this class once, and call * the {@code init} method each time you changed the coordinates or grid * status for a new path-finding request. * * @constructor */ PF.BaseFinder = function() { this.startX = null; // avoids to be garbage collected this.startY = null; this.endX = null; this.endY = null; this.grid = null; this.gridHeight = null; this.gridWidth = null; }; /** * Initiate the path-finder by providing the coordinates and the grid. * @param {integer} startX The x coordinate of the start position. * @param {integer} startY The y coordinate of the start position. * @param {integer} endX The x coordinate of the end position. * @param {integer} endY The y coordinate of the end position. */ PF.BaseFinder.prototype.init = function(startX, startY, endX, endY, grid) { this.startX = startX; this.startY = startY; this.endX = endX; this.endY = endY; this.grid = grid; this.gridWidth = grid.numCols; this.gridHeight = grid.numRows; }; /** * Determine whether the given postition is inside the grid. * @param {integer} x The x coordinate of the position. * @param {integer} y The y coordinate of the position. * @return {boolean} Whether it is inside. */ PF.BaseFinder.prototype.isInsideGrid = function(x, y) { // delegates to grid. return this.grid.isInside(x, y); }; /** * Set the walkable attribute of the given position on the grid. * @param {integer} x The x coordinate of the position. * @param {integer} y The y coordinate of the position. */ PF.BaseFinder.prototype.setWalkable = function(x, y, walkable) { // delegates to grid. this.grid.setWalkable(x, y, walkable); }; /** * Determine whether the given position on the grid is walkable. * @param {integer} x The x coordinate of the position. * @param {integer} y The y coordinate of the position. * @return {boolean} Whether it is walkable. */ PF.BaseFinder.prototype.isWalkable = function(x, y) { // delegates to grid. return this.grid.isWalkable(x, y); }; /** * The constructor of each BaseFinder instance. */ PF.BaseFinder.prototype.constructor = PF.BaseFinder;
add: abstract `findPath` method to core/BaseFinder.js
src/core/BaseFinder.js
add: abstract `findPath` method to core/BaseFinder.js
<ide><path>rc/core/BaseFinder.js <ide> * The constructor of each BaseFinder instance. <ide> */ <ide> PF.BaseFinder.prototype.constructor = PF.BaseFinder; <add> <add>/** <add> * Starts the search for the path. <add> * *NOTE*: This method is intended to be overriden by sub-classes. <add> * @return {Array.<[integer, integer]>} The path, including both start and <add> * end positions. <add> */ <add>PF.BaseFinder.prototype.findPath = function() { <add> throw new Error('Not Implemented Error: ' + <add> 'Sub-classes must implement this method'); <add>};
JavaScript
bsd-3-clause
456a15b429720fc9d4664b6f3b434af731bfcab4
0
chrismayer/geoext2,bentrm/geoext2,annarieger/geoext2,annarieger/geoext2,m-click/geoext2,chrismayer/geoext2,Sundsvallskommun/geoext2,bentrm/geoext2,geographika/geoext2,marcjansen/geoext2,geoext/geoext2,Sundsvallskommun/geoext2,Sundsvallskommun/geoext2,m-click/geoext2,geographika/geoext2,geoext/geoext2,marcjansen/geoext2
/* * Copyright (c) 2008-2012 The Open Source Geospatial Foundation * * Published under the BSD license. * See https://github.com/geoext/geoext2/blob/master/license.txt for the full text * of the license. */ /* * @include GeoExt/tree/LayerContainer.js */ /** * A layer node plugin that will collect all base layers of an OpenLayers * map. Only layers that have displayInLayerSwitcher set to true will be * included. The node's text defaults to 'Overlays'. * * To use this node plugin in a tree node config, configure a node like this: * {plugins: "gx_overlaylayercontainer", text: "My overlays"} */ Ext.define('GeoExt.tree.OverlayLayerContainer', { extend: 'GeoExt.tree.LayerContainer', alias: 'plugin.gx_overlaylayercontainer', /** * @private * The default text for the target node. */ defaultText: 'Overlays', /** * @private */ init: function(target) { var me = this; var loader = me.loader; me.loader = Ext.applyIf(loader || {}, { filter: function(record) { var layer = record.getLayer(); return !(layer.displayInLayerSwitcher === true && layer.isBaseLayer === true); } }); me.callParent(arguments); } });
src/GeoExt/tree/OverlayLayerContainer.js
/* * Copyright (c) 2008-2012 The Open Source Geospatial Foundation * * Published under the BSD license. * See https://github.com/geoext/geoext2/blob/master/license.txt for the full text * of the license. */ /* * @include GeoExt/tree/LayerContainer.js */ /** * A layer node plugin that will collect all base layers of an OpenLayers * map. Only layers that have displayInLayerSwitcher set to true will be * included. The childrens' iconCls defaults to * "gx-tree-baselayer-icon" and the node' text defaults to * "Base Layer". * * Children will be rendered with a radio button instead of a checkbox, * showing the user that only one base layer can be active at a time. * * To use this node plugin in a tree node config, configure a node like this: * {plugins: "gx_overlaylayercontainer", text: "My overlays"} */ Ext.define('GeoExt.tree.OverlayLayerContainer', { extend: 'GeoExt.tree.LayerContainer', alias: 'plugin.gx_overlaylayercontainer', /** * @private * The default text for the target node. */ defaultText: 'Overlays', /** * @private */ init: function(target) { var me = this; var loader = me.loader; me.loader = Ext.applyIf(loader || {}, { baseAttrs: Ext.applyIf((loader && loader.baseAttrs) || {}, { iconCls: 'gx-tree-baselayer-icon', checkedGroup: 'baselayer' }), filter: function(record) { var layer = record.getLayer(); return !(layer.displayInLayerSwitcher === true && layer.isBaseLayer === true); } }); me.callParent(arguments); } });
This is not a BaseLayerContainer.
src/GeoExt/tree/OverlayLayerContainer.js
This is not a BaseLayerContainer.
<ide><path>rc/GeoExt/tree/OverlayLayerContainer.js <ide> /** <ide> * A layer node plugin that will collect all base layers of an OpenLayers <ide> * map. Only layers that have displayInLayerSwitcher set to true will be <del> * included. The childrens' iconCls defaults to <del> * "gx-tree-baselayer-icon" and the node' text defaults to <del> * "Base Layer". <del> * <del> * Children will be rendered with a radio button instead of a checkbox, <del> * showing the user that only one base layer can be active at a time. <add> * included. The node's text defaults to 'Overlays'. <ide> * <ide> * To use this node plugin in a tree node config, configure a node like this: <ide> * {plugins: "gx_overlaylayercontainer", text: "My overlays"} <ide> var loader = me.loader; <ide> <ide> me.loader = Ext.applyIf(loader || {}, { <del> baseAttrs: Ext.applyIf((loader && loader.baseAttrs) || {}, { <del> iconCls: 'gx-tree-baselayer-icon', <del> checkedGroup: 'baselayer' <del> }), <ide> filter: function(record) { <ide> var layer = record.getLayer(); <ide> return !(layer.displayInLayerSwitcher === true &&
Java
apache-2.0
5007bf7aa041ce344ec042d9960b5e76afdcbaf6
0
diendt/elasticsearch,mkis-/elasticsearch,gfyoung/elasticsearch,polyfractal/elasticsearch,alexbrasetvik/elasticsearch,huanzhong/elasticsearch,Asimov4/elasticsearch,ouyangkongtong/elasticsearch,abibell/elasticsearch,PhaedrusTheGreek/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,Rygbee/elasticsearch,camilojd/elasticsearch,hydro2k/elasticsearch,milodky/elasticsearch,AndreKR/elasticsearch,masaruh/elasticsearch,hanst/elasticsearch,TonyChai24/ESSource,a2lin/elasticsearch,likaiwalkman/elasticsearch,mm0/elasticsearch,hanst/elasticsearch,zhaocloud/elasticsearch,adrianbk/elasticsearch,VukDukic/elasticsearch,obourgain/elasticsearch,andrestc/elasticsearch,mnylen/elasticsearch,sc0ttkclark/elasticsearch,mcku/elasticsearch,mnylen/elasticsearch,sreeramjayan/elasticsearch,F0lha/elasticsearch,Fsero/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,ajhalani/elasticsearch,myelin/elasticsearch,davidvgalbraith/elasticsearch,wimvds/elasticsearch,Kakakakakku/elasticsearch,kingaj/elasticsearch,EasonYi/elasticsearch,likaiwalkman/elasticsearch,mortonsykes/elasticsearch,tebriel/elasticsearch,lzo/elasticsearch-1,apepper/elasticsearch,kimimj/elasticsearch,tahaemin/elasticsearch,hafkensite/elasticsearch,sdauletau/elasticsearch,PhaedrusTheGreek/elasticsearch,pritishppai/elasticsearch,kenshin233/elasticsearch,Collaborne/elasticsearch,sarwarbhuiyan/elasticsearch,maddin2016/elasticsearch,TonyChai24/ESSource,kunallimaye/elasticsearch,wimvds/elasticsearch,jchampion/elasticsearch,masterweb121/elasticsearch,fooljohnny/elasticsearch,brwe/elasticsearch,mrorii/elasticsearch,jimhooker2002/elasticsearch,markwalkom/elasticsearch,mbrukman/elasticsearch,lchennup/elasticsearch,anti-social/elasticsearch,jsgao0/elasticsearch,jw0201/elastic,golubev/elasticsearch,elancom/elasticsearch,ouyangkongtong/elasticsearch,rlugojr/elasticsearch,MisterAndersen/elasticsearch,yynil/elasticsearch,djschny/elasticsearch,infusionsoft/elasticsearch,lzo/elasticsearch-1,yongminxia/elasticsearch,brandonkearby/elasticsearch,brandonkearby/elasticsearch,iacdingping/elasticsearch,markllama/elasticsearch,tahaemin/elasticsearch,VukDukic/elasticsearch,njlawton/elasticsearch,Shekharrajak/elasticsearch,nrkkalyan/elasticsearch,caengcjd/elasticsearch,Siddartha07/elasticsearch,jchampion/elasticsearch,chrismwendt/elasticsearch,sc0ttkclark/elasticsearch,huanzhong/elasticsearch,MaineC/elasticsearch,mortonsykes/elasticsearch,AshishThakur/elasticsearch,jw0201/elastic,alexbrasetvik/elasticsearch,jaynblue/elasticsearch,MichaelLiZhou/elasticsearch,Asimov4/elasticsearch,nezirus/elasticsearch,huanzhong/elasticsearch,thecocce/elasticsearch,Chhunlong/elasticsearch,fforbeck/elasticsearch,clintongormley/elasticsearch,F0lha/elasticsearch,lks21c/elasticsearch,pranavraman/elasticsearch,iacdingping/elasticsearch,szroland/elasticsearch,davidvgalbraith/elasticsearch,mkis-/elasticsearch,sarwarbhuiyan/elasticsearch,fred84/elasticsearch,vietlq/elasticsearch,sdauletau/elasticsearch,marcuswr/elasticsearch-dateline,rento19962/elasticsearch,achow/elasticsearch,AndreKR/elasticsearch,heng4fun/elasticsearch,nilabhsagar/elasticsearch,skearns64/elasticsearch,StefanGor/elasticsearch,brandonkearby/elasticsearch,SergVro/elasticsearch,loconsolutions/elasticsearch,ivansun1010/elasticsearch,winstonewert/elasticsearch,mm0/elasticsearch,tcucchietti/elasticsearch,MjAbuz/elasticsearch,kcompher/elasticsearch,nezirus/elasticsearch,apepper/elasticsearch,sdauletau/elasticsearch,nomoa/elasticsearch,zeroctu/elasticsearch,wangtuo/elasticsearch,Brijeshrpatel9/elasticsearch,Ansh90/elasticsearch,karthikjaps/elasticsearch,pritishppai/elasticsearch,brwe/elasticsearch,Clairebi/ElasticsearchClone,sneivandt/elasticsearch,yongminxia/elasticsearch,markharwood/elasticsearch,ImpressTV/elasticsearch,kimimj/elasticsearch,jpountz/elasticsearch,myelin/elasticsearch,IanvsPoplicola/elasticsearch,wittyameta/elasticsearch,janmejay/elasticsearch,nazarewk/elasticsearch,slavau/elasticsearch,vingupta3/elasticsearch,LewayneNaidoo/elasticsearch,wbowling/elasticsearch,JSCooke/elasticsearch,lchennup/elasticsearch,mjhennig/elasticsearch,lchennup/elasticsearch,polyfractal/elasticsearch,ThiagoGarciaAlves/elasticsearch,LewayneNaidoo/elasticsearch,yongminxia/elasticsearch,truemped/elasticsearch,cnfire/elasticsearch-1,Rygbee/elasticsearch,kkirsche/elasticsearch,girirajsharma/elasticsearch,tsohil/elasticsearch,trangvh/elasticsearch,xpandan/elasticsearch,khiraiwa/elasticsearch,socialrank/elasticsearch,onegambler/elasticsearch,lmtwga/elasticsearch,andrejserafim/elasticsearch,kevinkluge/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,marcuswr/elasticsearch-dateline,F0lha/elasticsearch,kubum/elasticsearch,awislowski/elasticsearch,polyfractal/elasticsearch,AshishThakur/elasticsearch,mbrukman/elasticsearch,nellicus/elasticsearch,tcucchietti/elasticsearch,mapr/elasticsearch,kcompher/elasticsearch,MichaelLiZhou/elasticsearch,rento19962/elasticsearch,strapdata/elassandra-test,codebunt/elasticsearch,pablocastro/elasticsearch,sposam/elasticsearch,tcucchietti/elasticsearch,JackyMai/elasticsearch,wittyameta/elasticsearch,hafkensite/elasticsearch,MichaelLiZhou/elasticsearch,mm0/elasticsearch,dylan8902/elasticsearch,LeoYao/elasticsearch,i-am-Nathan/elasticsearch,anti-social/elasticsearch,tebriel/elasticsearch,umeshdangat/elasticsearch,mnylen/elasticsearch,thecocce/elasticsearch,LewayneNaidoo/elasticsearch,nknize/elasticsearch,golubev/elasticsearch,jango2015/elasticsearch,dantuffery/elasticsearch,hydro2k/elasticsearch,humandb/elasticsearch,trangvh/elasticsearch,kaneshin/elasticsearch,alexbrasetvik/elasticsearch,milodky/elasticsearch,NBSW/elasticsearch,hydro2k/elasticsearch,likaiwalkman/elasticsearch,adrianbk/elasticsearch,YosuaMichael/elasticsearch,Asimov4/elasticsearch,ThiagoGarciaAlves/elasticsearch,kunallimaye/elasticsearch,MjAbuz/elasticsearch,alexkuk/elasticsearch,kevinkluge/elasticsearch,mcku/elasticsearch,AshishThakur/elasticsearch,wangyuxue/elasticsearch,TonyChai24/ESSource,onegambler/elasticsearch,infusionsoft/elasticsearch,sscarduzio/elasticsearch,springning/elasticsearch,mortonsykes/elasticsearch,acchen97/elasticsearch,fekaputra/elasticsearch,sposam/elasticsearch,tkssharma/elasticsearch,rajanm/elasticsearch,schonfeld/elasticsearch,Flipkart/elasticsearch,drewr/elasticsearch,ThalaivaStars/OrgRepo1,masaruh/elasticsearch,jsgao0/elasticsearch,humandb/elasticsearch,mjhennig/elasticsearch,dylan8902/elasticsearch,Brijeshrpatel9/elasticsearch,rlugojr/elasticsearch,adrianbk/elasticsearch,khiraiwa/elasticsearch,karthikjaps/elasticsearch,AleksKochev/elasticsearch,zhiqinghuang/elasticsearch,schonfeld/elasticsearch,henakamaMSFT/elasticsearch,jchampion/elasticsearch,smflorentino/elasticsearch,ulkas/elasticsearch,djschny/elasticsearch,qwerty4030/elasticsearch,strapdata/elassandra5-rc,micpalmia/elasticsearch,brwe/elasticsearch,LeoYao/elasticsearch,huanzhong/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,alexshadow007/elasticsearch,marcuswr/elasticsearch-dateline,dataduke/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,clintongormley/elasticsearch,cnfire/elasticsearch-1,aglne/elasticsearch,gingerwizard/elasticsearch,yynil/elasticsearch,kunallimaye/elasticsearch,jango2015/elasticsearch,maddin2016/elasticsearch,huanzhong/elasticsearch,djschny/elasticsearch,truemped/elasticsearch,andrejserafim/elasticsearch,Collaborne/elasticsearch,golubev/elasticsearch,mrorii/elasticsearch,micpalmia/elasticsearch,C-Bish/elasticsearch,wittyameta/elasticsearch,mjhennig/elasticsearch,heng4fun/elasticsearch,liweinan0423/elasticsearch,lydonchandra/elasticsearch,xuzha/elasticsearch,jaynblue/elasticsearch,vrkansagara/elasticsearch,abibell/elasticsearch,gingerwizard/elasticsearch,springning/elasticsearch,camilojd/elasticsearch,EasonYi/elasticsearch,AleksKochev/elasticsearch,MetSystem/elasticsearch,MichaelLiZhou/elasticsearch,ImpressTV/elasticsearch,snikch/elasticsearch,mohit/elasticsearch,jsgao0/elasticsearch,wittyameta/elasticsearch,petabytedata/elasticsearch,alexbrasetvik/elasticsearch,brandonkearby/elasticsearch,Flipkart/elasticsearch,Siddartha07/elasticsearch,xingguang2013/elasticsearch,sjohnr/elasticsearch,hydro2k/elasticsearch,obourgain/elasticsearch,amit-shar/elasticsearch,dylan8902/elasticsearch,mbrukman/elasticsearch,caengcjd/elasticsearch,markharwood/elasticsearch,hafkensite/elasticsearch,Widen/elasticsearch,likaiwalkman/elasticsearch,andrejserafim/elasticsearch,geidies/elasticsearch,masterweb121/elasticsearch,smflorentino/elasticsearch,jchampion/elasticsearch,yongminxia/elasticsearch,Siddartha07/elasticsearch,dpursehouse/elasticsearch,artnowo/elasticsearch,YosuaMichael/elasticsearch,sauravmondallive/elasticsearch,alexkuk/elasticsearch,elasticdog/elasticsearch,yuy168/elasticsearch,martinstuga/elasticsearch,MetSystem/elasticsearch,micpalmia/elasticsearch,jpountz/elasticsearch,onegambler/elasticsearch,myelin/elasticsearch,dpursehouse/elasticsearch,kkirsche/elasticsearch,nilabhsagar/elasticsearch,iamjakob/elasticsearch,JackyMai/elasticsearch,petabytedata/elasticsearch,palecur/elasticsearch,linglaiyao1314/elasticsearch,hanst/elasticsearch,alexkuk/elasticsearch,ZTE-PaaS/elasticsearch,spiegela/elasticsearch,kalimatas/elasticsearch,cnfire/elasticsearch-1,ivansun1010/elasticsearch,alexkuk/elasticsearch,artnowo/elasticsearch,onegambler/elasticsearch,khiraiwa/elasticsearch,opendatasoft/elasticsearch,cwurm/elasticsearch,linglaiyao1314/elasticsearch,jeteve/elasticsearch,vvcephei/elasticsearch,jimczi/elasticsearch,beiske/elasticsearch,hafkensite/elasticsearch,kalburgimanjunath/elasticsearch,Rygbee/elasticsearch,rajanm/elasticsearch,Fsero/elasticsearch,umeshdangat/elasticsearch,qwerty4030/elasticsearch,jimczi/elasticsearch,alexkuk/elasticsearch,MaineC/elasticsearch,nazarewk/elasticsearch,javachengwc/elasticsearch,achow/elasticsearch,camilojd/elasticsearch,combinatorist/elasticsearch,robin13/elasticsearch,anti-social/elasticsearch,Charlesdong/elasticsearch,shreejay/elasticsearch,njlawton/elasticsearch,ulkas/elasticsearch,mrorii/elasticsearch,JSCooke/elasticsearch,petmit/elasticsearch,huanzhong/elasticsearch,dantuffery/elasticsearch,bestwpw/elasticsearch,tebriel/elasticsearch,franklanganke/elasticsearch,tkssharma/elasticsearch,sc0ttkclark/elasticsearch,AleksKochev/elasticsearch,dpursehouse/elasticsearch,acchen97/elasticsearch,yanjunh/elasticsearch,sneivandt/elasticsearch,djschny/elasticsearch,aglne/elasticsearch,mapr/elasticsearch,Uiho/elasticsearch,KimTaehee/elasticsearch,ThalaivaStars/OrgRepo1,linglaiyao1314/elasticsearch,zeroctu/elasticsearch,jango2015/elasticsearch,kalburgimanjunath/elasticsearch,MetSystem/elasticsearch,mm0/elasticsearch,mute/elasticsearch,fred84/elasticsearch,Liziyao/elasticsearch,zkidkid/elasticsearch,kalimatas/elasticsearch,sauravmondallive/elasticsearch,EasonYi/elasticsearch,iantruslove/elasticsearch,amaliujia/elasticsearch,heng4fun/elasticsearch,mgalushka/elasticsearch,GlenRSmith/elasticsearch,Widen/elasticsearch,xpandan/elasticsearch,Shepard1212/elasticsearch,petabytedata/elasticsearch,djschny/elasticsearch,salyh/elasticsearch,adrianbk/elasticsearch,koxa29/elasticsearch,maddin2016/elasticsearch,jprante/elasticsearch,uschindler/elasticsearch,liweinan0423/elasticsearch,koxa29/elasticsearch,yanjunh/elasticsearch,vietlq/elasticsearch,fekaputra/elasticsearch,jango2015/elasticsearch,Brijeshrpatel9/elasticsearch,MisterAndersen/elasticsearch,infusionsoft/elasticsearch,trangvh/elasticsearch,kubum/elasticsearch,aglne/elasticsearch,milodky/elasticsearch,Charlesdong/elasticsearch,clintongormley/elasticsearch,lightslife/elasticsearch,qwerty4030/elasticsearch,pranavraman/elasticsearch,18098924759/elasticsearch,episerver/elasticsearch,ImpressTV/elasticsearch,glefloch/elasticsearch,hafkensite/elasticsearch,rhoml/elasticsearch,masaruh/elasticsearch,wenpos/elasticsearch,s1monw/elasticsearch,vroyer/elasticassandra,vrkansagara/elasticsearch,artnowo/elasticsearch,codebunt/elasticsearch,franklanganke/elasticsearch,acchen97/elasticsearch,scottsom/elasticsearch,AshishThakur/elasticsearch,chirilo/elasticsearch,robin13/elasticsearch,boliza/elasticsearch,Shepard1212/elasticsearch,ESamir/elasticsearch,geidies/elasticsearch,jimczi/elasticsearch,salyh/elasticsearch,springning/elasticsearch,abibell/elasticsearch,dylan8902/elasticsearch,avikurapati/elasticsearch,mute/elasticsearch,xuzha/elasticsearch,lchennup/elasticsearch,schonfeld/elasticsearch,kenshin233/elasticsearch,MetSystem/elasticsearch,jeteve/elasticsearch,yynil/elasticsearch,ydsakyclguozi/elasticsearch,zeroctu/elasticsearch,mcku/elasticsearch,adrianbk/elasticsearch,areek/elasticsearch,overcome/elasticsearch,onegambler/elasticsearch,tkssharma/elasticsearch,fekaputra/elasticsearch,davidvgalbraith/elasticsearch,codebunt/elasticsearch,Rygbee/elasticsearch,combinatorist/elasticsearch,robin13/elasticsearch,fernandozhu/elasticsearch,tcucchietti/elasticsearch,kubum/elasticsearch,lydonchandra/elasticsearch,hechunwen/elasticsearch,18098924759/elasticsearch,phani546/elasticsearch,humandb/elasticsearch,milodky/elasticsearch,TonyChai24/ESSource,socialrank/elasticsearch,loconsolutions/elasticsearch,cnfire/elasticsearch-1,weipinghe/elasticsearch,queirozfcom/elasticsearch,wbowling/elasticsearch,areek/elasticsearch,overcome/elasticsearch,nazarewk/elasticsearch,mcku/elasticsearch,girirajsharma/elasticsearch,ImpressTV/elasticsearch,nrkkalyan/elasticsearch,MetSystem/elasticsearch,opendatasoft/elasticsearch,artnowo/elasticsearch,weipinghe/elasticsearch,GlenRSmith/elasticsearch,jeteve/elasticsearch,gmarz/elasticsearch,sarwarbhuiyan/elasticsearch,lchennup/elasticsearch,mcku/elasticsearch,pritishppai/elasticsearch,amit-shar/elasticsearch,chirilo/elasticsearch,rento19962/elasticsearch,wuranbo/elasticsearch,Shekharrajak/elasticsearch,weipinghe/elasticsearch,feiqitian/elasticsearch,vietlq/elasticsearch,jeteve/elasticsearch,Microsoft/elasticsearch,KimTaehee/elasticsearch,wenpos/elasticsearch,girirajsharma/elasticsearch,mohit/elasticsearch,sauravmondallive/elasticsearch,gingerwizard/elasticsearch,ulkas/elasticsearch,palecur/elasticsearch,pozhidaevak/elasticsearch,SergVro/elasticsearch,iantruslove/elasticsearch,winstonewert/elasticsearch,myelin/elasticsearch,gingerwizard/elasticsearch,himanshuag/elasticsearch,ajhalani/elasticsearch,obourgain/elasticsearch,rmuir/elasticsearch,zhiqinghuang/elasticsearch,mm0/elasticsearch,socialrank/elasticsearch,kcompher/elasticsearch,yuy168/elasticsearch,areek/elasticsearch,abhijitiitr/es,caengcjd/elasticsearch,likaiwalkman/elasticsearch,Stacey-Gammon/elasticsearch,kunallimaye/elasticsearch,nezirus/elasticsearch,HarishAtGitHub/elasticsearch,jango2015/elasticsearch,dantuffery/elasticsearch,chrismwendt/elasticsearch,YosuaMichael/elasticsearch,nezirus/elasticsearch,kubum/elasticsearch,xpandan/elasticsearch,uschindler/elasticsearch,SergVro/elasticsearch,drewr/elasticsearch,mmaracic/elasticsearch,diendt/elasticsearch,yynil/elasticsearch,schonfeld/elasticsearch,luiseduardohdbackup/elasticsearch,ESamir/elasticsearch,cnfire/elasticsearch-1,dongjoon-hyun/elasticsearch,slavau/elasticsearch,kenshin233/elasticsearch,kenshin233/elasticsearch,hanst/elasticsearch,Chhunlong/elasticsearch,socialrank/elasticsearch,chirilo/elasticsearch,hafkensite/elasticsearch,areek/elasticsearch,sreeramjayan/elasticsearch,btiernay/elasticsearch,i-am-Nathan/elasticsearch,spiegela/elasticsearch,EasonYi/elasticsearch,elancom/elasticsearch,Clairebi/ElasticsearchClone,karthikjaps/elasticsearch,mgalushka/elasticsearch,himanshuag/elasticsearch,Rygbee/elasticsearch,jeteve/elasticsearch,humandb/elasticsearch,beiske/elasticsearch,feiqitian/elasticsearch,boliza/elasticsearch,JackyMai/elasticsearch,vvcephei/elasticsearch,davidvgalbraith/elasticsearch,opendatasoft/elasticsearch,polyfractal/elasticsearch,fekaputra/elasticsearch,Kakakakakku/elasticsearch,pritishppai/elasticsearch,YosuaMichael/elasticsearch,springning/elasticsearch,zeroctu/elasticsearch,markharwood/elasticsearch,petmit/elasticsearch,wayeast/elasticsearch,martinstuga/elasticsearch,huypx1292/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,raishiv/elasticsearch,coding0011/elasticsearch,palecur/elasticsearch,vrkansagara/elasticsearch,easonC/elasticsearch,khiraiwa/elasticsearch,schonfeld/elasticsearch,javachengwc/elasticsearch,kenshin233/elasticsearch,iacdingping/elasticsearch,strapdata/elassandra-test,TonyChai24/ESSource,lks21c/elasticsearch,kalimatas/elasticsearch,Kakakakakku/elasticsearch,thecocce/elasticsearch,JervyShi/elasticsearch,strapdata/elassandra-test,chirilo/elasticsearch,jbertouch/elasticsearch,anti-social/elasticsearch,kaneshin/elasticsearch,kevinkluge/elasticsearch,IanvsPoplicola/elasticsearch,tsohil/elasticsearch,mortonsykes/elasticsearch,hechunwen/elasticsearch,mjhennig/elasticsearch,ydsakyclguozi/elasticsearch,coding0011/elasticsearch,szroland/elasticsearch,thecocce/elasticsearch,salyh/elasticsearch,Ansh90/elasticsearch,mohit/elasticsearch,achow/elasticsearch,wenpos/elasticsearch,strapdata/elassandra-test,Fsero/elasticsearch,zhiqinghuang/elasticsearch,Microsoft/elasticsearch,mortonsykes/elasticsearch,camilojd/elasticsearch,winstonewert/elasticsearch,jbertouch/elasticsearch,pablocastro/elasticsearch,chrismwendt/elasticsearch,alexkuk/elasticsearch,skearns64/elasticsearch,Stacey-Gammon/elasticsearch,yynil/elasticsearch,episerver/elasticsearch,ulkas/elasticsearch,likaiwalkman/elasticsearch,loconsolutions/elasticsearch,masterweb121/elasticsearch,fooljohnny/elasticsearch,andrestc/elasticsearch,a2lin/elasticsearch,sneivandt/elasticsearch,nezirus/elasticsearch,nellicus/elasticsearch,janmejay/elasticsearch,lydonchandra/elasticsearch,gingerwizard/elasticsearch,Ansh90/elasticsearch,mnylen/elasticsearch,khiraiwa/elasticsearch,YosuaMichael/elasticsearch,hirdesh2008/elasticsearch,golubev/elasticsearch,ckclark/elasticsearch,gmarz/elasticsearch,wuranbo/elasticsearch,szroland/elasticsearch,elancom/elasticsearch,andrejserafim/elasticsearch,himanshuag/elasticsearch,raishiv/elasticsearch,andrejserafim/elasticsearch,vietlq/elasticsearch,combinatorist/elasticsearch,snikch/elasticsearch,wuranbo/elasticsearch,kingaj/elasticsearch,markharwood/elasticsearch,heng4fun/elasticsearch,himanshuag/elasticsearch,njlawton/elasticsearch,ThalaivaStars/OrgRepo1,luiseduardohdbackup/elasticsearch,nellicus/elasticsearch,jw0201/elastic,knight1128/elasticsearch,karthikjaps/elasticsearch,uschindler/elasticsearch,xpandan/elasticsearch,PhaedrusTheGreek/elasticsearch,wbowling/elasticsearch,mmaracic/elasticsearch,kalburgimanjunath/elasticsearch,jeteve/elasticsearch,jw0201/elastic,scorpionvicky/elasticsearch,luiseduardohdbackup/elasticsearch,ivansun1010/elasticsearch,peschlowp/elasticsearch,beiske/elasticsearch,jimhooker2002/elasticsearch,liweinan0423/elasticsearch,sarwarbhuiyan/elasticsearch,JervyShi/elasticsearch,raishiv/elasticsearch,rajanm/elasticsearch,huypx1292/elasticsearch,18098924759/elasticsearch,springning/elasticsearch,HonzaKral/elasticsearch,yanjunh/elasticsearch,kevinkluge/elasticsearch,vvcephei/elasticsearch,sscarduzio/elasticsearch,amit-shar/elasticsearch,linglaiyao1314/elasticsearch,mapr/elasticsearch,kubum/elasticsearch,fooljohnny/elasticsearch,jaynblue/elasticsearch,liweinan0423/elasticsearch,sposam/elasticsearch,fernandozhu/elasticsearch,lzo/elasticsearch-1,Liziyao/elasticsearch,sposam/elasticsearch,socialrank/elasticsearch,sc0ttkclark/elasticsearch,nazarewk/elasticsearch,kubum/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,Uiho/elasticsearch,fred84/elasticsearch,snikch/elasticsearch,VukDukic/elasticsearch,ricardocerq/elasticsearch,knight1128/elasticsearch,koxa29/elasticsearch,mjhennig/elasticsearch,milodky/elasticsearch,xingguang2013/elasticsearch,btiernay/elasticsearch,PhaedrusTheGreek/elasticsearch,naveenhooda2000/elasticsearch,ZTE-PaaS/elasticsearch,hanswang/elasticsearch,Helen-Zhao/elasticsearch,gmarz/elasticsearch,avikurapati/elasticsearch,ouyangkongtong/elasticsearch,davidvgalbraith/elasticsearch,ulkas/elasticsearch,girirajsharma/elasticsearch,ouyangkongtong/elasticsearch,JSCooke/elasticsearch,linglaiyao1314/elasticsearch,andrestc/elasticsearch,mkis-/elasticsearch,kimimj/elasticsearch,xingguang2013/elasticsearch,mrorii/elasticsearch,himanshuag/elasticsearch,F0lha/elasticsearch,Shekharrajak/elasticsearch,springning/elasticsearch,smflorentino/elasticsearch,pablocastro/elasticsearch,HarishAtGitHub/elasticsearch,LewayneNaidoo/elasticsearch,beiske/elasticsearch,gfyoung/elasticsearch,gmarz/elasticsearch,luiseduardohdbackup/elasticsearch,apepper/elasticsearch,tkssharma/elasticsearch,Stacey-Gammon/elasticsearch,queirozfcom/elasticsearch,zhaocloud/elasticsearch,petmit/elasticsearch,strapdata/elassandra,spiegela/elasticsearch,AshishThakur/elasticsearch,micpalmia/elasticsearch,MichaelLiZhou/elasticsearch,TonyChai24/ESSource,Collaborne/elasticsearch,gfyoung/elasticsearch,yuy168/elasticsearch,AleksKochev/elasticsearch,humandb/elasticsearch,combinatorist/elasticsearch,mbrukman/elasticsearch,jimhooker2002/elasticsearch,micpalmia/elasticsearch,tsohil/elasticsearch,Chhunlong/elasticsearch,pozhidaevak/elasticsearch,Brijeshrpatel9/elasticsearch,loconsolutions/elasticsearch,sneivandt/elasticsearch,btiernay/elasticsearch,ckclark/elasticsearch,aglne/elasticsearch,tsohil/elasticsearch,wangyuxue/elasticsearch,dataduke/elasticsearch,jprante/elasticsearch,kkirsche/elasticsearch,vingupta3/elasticsearch,opendatasoft/elasticsearch,iantruslove/elasticsearch,lzo/elasticsearch-1,coding0011/elasticsearch,feiqitian/elasticsearch,sjohnr/elasticsearch,naveenhooda2000/elasticsearch,drewr/elasticsearch,janmejay/elasticsearch,zeroctu/elasticsearch,dpursehouse/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,amaliujia/elasticsearch,Liziyao/elasticsearch,ricardocerq/elasticsearch,iamjakob/elasticsearch,scorpionvicky/elasticsearch,NBSW/elasticsearch,btiernay/elasticsearch,AndreKR/elasticsearch,KimTaehee/elasticsearch,winstonewert/elasticsearch,ImpressTV/elasticsearch,wayeast/elasticsearch,maddin2016/elasticsearch,yuy168/elasticsearch,IanvsPoplicola/elasticsearch,apepper/elasticsearch,vrkansagara/elasticsearch,Collaborne/elasticsearch,masaruh/elasticsearch,sposam/elasticsearch,opendatasoft/elasticsearch,tkssharma/elasticsearch,fforbeck/elasticsearch,JackyMai/elasticsearch,bestwpw/elasticsearch,kingaj/elasticsearch,mohit/elasticsearch,pozhidaevak/elasticsearch,fooljohnny/elasticsearch,HarishAtGitHub/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,ivansun1010/elasticsearch,Stacey-Gammon/elasticsearch,mikemccand/elasticsearch,dantuffery/elasticsearch,btiernay/elasticsearch,jimczi/elasticsearch,tebriel/elasticsearch,JervyShi/elasticsearch,mmaracic/elasticsearch,kingaj/elasticsearch,aglne/elasticsearch,ZTE-PaaS/elasticsearch,kimimj/elasticsearch,Widen/elasticsearch,mgalushka/elasticsearch,dylan8902/elasticsearch,masterweb121/elasticsearch,YosuaMichael/elasticsearch,s1monw/elasticsearch,Shepard1212/elasticsearch,rento19962/elasticsearch,lks21c/elasticsearch,nomoa/elasticsearch,scorpionvicky/elasticsearch,fforbeck/elasticsearch,pozhidaevak/elasticsearch,cwurm/elasticsearch,dongjoon-hyun/elasticsearch,alexshadow007/elasticsearch,awislowski/elasticsearch,Fsero/elasticsearch,mgalushka/elasticsearch,smflorentino/elasticsearch,acchen97/elasticsearch,MetSystem/elasticsearch,btiernay/elasticsearch,nazarewk/elasticsearch,Uiho/elasticsearch,drewr/elasticsearch,cnfire/elasticsearch-1,kubum/elasticsearch,vvcephei/elasticsearch,markllama/elasticsearch,masterweb121/elasticsearch,rento19962/elasticsearch,chrismwendt/elasticsearch,diendt/elasticsearch,alexbrasetvik/elasticsearch,lchennup/elasticsearch,maddin2016/elasticsearch,beiske/elasticsearch,vingupta3/elasticsearch,sarwarbhuiyan/elasticsearch,luiseduardohdbackup/elasticsearch,winstonewert/elasticsearch,salyh/elasticsearch,drewr/elasticsearch,jsgao0/elasticsearch,wbowling/elasticsearch,JackyMai/elasticsearch,Fsero/elasticsearch,wuranbo/elasticsearch,ckclark/elasticsearch,dataduke/elasticsearch,mnylen/elasticsearch,onegambler/elasticsearch,ivansun1010/elasticsearch,tcucchietti/elasticsearch,iamjakob/elasticsearch,ThiagoGarciaAlves/elasticsearch,i-am-Nathan/elasticsearch,hanswang/elasticsearch,lydonchandra/elasticsearch,beiske/elasticsearch,koxa29/elasticsearch,AshishThakur/elasticsearch,shreejay/elasticsearch,strapdata/elassandra-test,awislowski/elasticsearch,sreeramjayan/elasticsearch,tebriel/elasticsearch,mgalushka/elasticsearch,MjAbuz/elasticsearch,kenshin233/elasticsearch,sdauletau/elasticsearch,nrkkalyan/elasticsearch,elasticdog/elasticsearch,easonC/elasticsearch,javachengwc/elasticsearch,caengcjd/elasticsearch,dataduke/elasticsearch,Helen-Zhao/elasticsearch,infusionsoft/elasticsearch,nomoa/elasticsearch,spiegela/elasticsearch,janmejay/elasticsearch,amit-shar/elasticsearch,yuy168/elasticsearch,janmejay/elasticsearch,umeshdangat/elasticsearch,jango2015/elasticsearch,rmuir/elasticsearch,wayeast/elasticsearch,apepper/elasticsearch,hirdesh2008/elasticsearch,mmaracic/elasticsearch,nomoa/elasticsearch,ouyangkongtong/elasticsearch,lightslife/elasticsearch,lmtwga/elasticsearch,hechunwen/elasticsearch,sjohnr/elasticsearch,kalburgimanjunath/elasticsearch,wbowling/elasticsearch,liweinan0423/elasticsearch,wimvds/elasticsearch,Kakakakakku/elasticsearch,abibell/elasticsearch,LeoYao/elasticsearch,strapdata/elassandra,mcku/elasticsearch,petabytedata/elasticsearch,nknize/elasticsearch,Liziyao/elasticsearch,lightslife/elasticsearch,phani546/elasticsearch,mcku/elasticsearch,sposam/elasticsearch,Chhunlong/elasticsearch,MetSystem/elasticsearch,tsohil/elasticsearch,alexshadow007/elasticsearch,StefanGor/elasticsearch,Uiho/elasticsearch,Asimov4/elasticsearch,rajanm/elasticsearch,geidies/elasticsearch,kalburgimanjunath/elasticsearch,wangtuo/elasticsearch,rlugojr/elasticsearch,weipinghe/elasticsearch,lks21c/elasticsearch,pritishppai/elasticsearch,ThiagoGarciaAlves/elasticsearch,EasonYi/elasticsearch,adrianbk/elasticsearch,xingguang2013/elasticsearch,ESamir/elasticsearch,koxa29/elasticsearch,jpountz/elasticsearch,SergVro/elasticsearch,tsohil/elasticsearch,pranavraman/elasticsearch,ThalaivaStars/OrgRepo1,jaynblue/elasticsearch,huypx1292/elasticsearch,sc0ttkclark/elasticsearch,glefloch/elasticsearch,MisterAndersen/elasticsearch,NBSW/elasticsearch,fekaputra/elasticsearch,ZTE-PaaS/elasticsearch,zkidkid/elasticsearch,henakamaMSFT/elasticsearch,iacdingping/elasticsearch,robin13/elasticsearch,queirozfcom/elasticsearch,MjAbuz/elasticsearch,jbertouch/elasticsearch,nknize/elasticsearch,vroyer/elassandra,mrorii/elasticsearch,s1monw/elasticsearch,amaliujia/elasticsearch,btiernay/elasticsearch,Asimov4/elasticsearch,avikurapati/elasticsearch,Collaborne/elasticsearch,ThalaivaStars/OrgRepo1,jeteve/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,ckclark/elasticsearch,ZTE-PaaS/elasticsearch,thecocce/elasticsearch,pritishppai/elasticsearch,skearns64/elasticsearch,bawse/elasticsearch,coding0011/elasticsearch,strapdata/elassandra5-rc,awislowski/elasticsearch,Chhunlong/elasticsearch,acchen97/elasticsearch,franklanganke/elasticsearch,fekaputra/elasticsearch,ThalaivaStars/OrgRepo1,queirozfcom/elasticsearch,LeoYao/elasticsearch,lydonchandra/elasticsearch,amaliujia/elasticsearch,strapdata/elassandra5-rc,baishuo/elasticsearch_v2.1.0-baishuo,JervyShi/elasticsearch,wangtuo/elasticsearch,humandb/elasticsearch,henakamaMSFT/elasticsearch,tahaemin/elasticsearch,elancom/elasticsearch,sauravmondallive/elasticsearch,cwurm/elasticsearch,truemped/elasticsearch,Flipkart/elasticsearch,wimvds/elasticsearch,weipinghe/elasticsearch,janmejay/elasticsearch,Stacey-Gammon/elasticsearch,vroyer/elassandra,JSCooke/elasticsearch,MjAbuz/elasticsearch,phani546/elasticsearch,Helen-Zhao/elasticsearch,Collaborne/elasticsearch,sscarduzio/elasticsearch,tebriel/elasticsearch,phani546/elasticsearch,javachengwc/elasticsearch,dpursehouse/elasticsearch,lightslife/elasticsearch,Charlesdong/elasticsearch,cwurm/elasticsearch,hechunwen/elasticsearch,skearns64/elasticsearch,nellicus/elasticsearch,slavau/elasticsearch,jango2015/elasticsearch,ulkas/elasticsearch,strapdata/elassandra5-rc,pranavraman/elasticsearch,fforbeck/elasticsearch,andrestc/elasticsearch,Siddartha07/elasticsearch,markllama/elasticsearch,Kakakakakku/elasticsearch,dongjoon-hyun/elasticsearch,HarishAtGitHub/elasticsearch,F0lha/elasticsearch,mikemccand/elasticsearch,kalburgimanjunath/elasticsearch,Shekharrajak/elasticsearch,AndreKR/elasticsearch,loconsolutions/elasticsearch,xpandan/elasticsearch,knight1128/elasticsearch,iantruslove/elasticsearch,snikch/elasticsearch,nrkkalyan/elasticsearch,TonyChai24/ESSource,snikch/elasticsearch,huypx1292/elasticsearch,luiseduardohdbackup/elasticsearch,avikurapati/elasticsearch,elancom/elasticsearch,anti-social/elasticsearch,wayeast/elasticsearch,Kakakakakku/elasticsearch,18098924759/elasticsearch,StefanGor/elasticsearch,fernandozhu/elasticsearch,feiqitian/elasticsearch,Fsero/elasticsearch,mmaracic/elasticsearch,rhoml/elasticsearch,Clairebi/ElasticsearchClone,szroland/elasticsearch,elasticdog/elasticsearch,sscarduzio/elasticsearch,myelin/elasticsearch,awislowski/elasticsearch,mnylen/elasticsearch,Charlesdong/elasticsearch,girirajsharma/elasticsearch,caengcjd/elasticsearch,bawse/elasticsearch,bestwpw/elasticsearch,iantruslove/elasticsearch,MichaelLiZhou/elasticsearch,ESamir/elasticsearch,MaineC/elasticsearch,iacdingping/elasticsearch,ricardocerq/elasticsearch,sdauletau/elasticsearch,zhaocloud/elasticsearch,henakamaMSFT/elasticsearch,wenpos/elasticsearch,areek/elasticsearch,bestwpw/elasticsearch,hydro2k/elasticsearch,peschlowp/elasticsearch,kimimj/elasticsearch,smflorentino/elasticsearch,18098924759/elasticsearch,golubev/elasticsearch,andrestc/elasticsearch,wayeast/elasticsearch,rento19962/elasticsearch,ulkas/elasticsearch,wangyuxue/elasticsearch,18098924759/elasticsearch,bawse/elasticsearch,lydonchandra/elasticsearch,fforbeck/elasticsearch,lydonchandra/elasticsearch,JSCooke/elasticsearch,alexbrasetvik/elasticsearch,humandb/elasticsearch,mgalushka/elasticsearch,easonC/elasticsearch,Liziyao/elasticsearch,jbertouch/elasticsearch,nrkkalyan/elasticsearch,umeshdangat/elasticsearch,slavau/elasticsearch,rmuir/elasticsearch,yanjunh/elasticsearch,rlugojr/elasticsearch,Brijeshrpatel9/elasticsearch,mkis-/elasticsearch,njlawton/elasticsearch,djschny/elasticsearch,mohit/elasticsearch,milodky/elasticsearch,i-am-Nathan/elasticsearch,dongjoon-hyun/elasticsearch,bawse/elasticsearch,VukDukic/elasticsearch,mute/elasticsearch,EasonYi/elasticsearch,dataduke/elasticsearch,yongminxia/elasticsearch,javachengwc/elasticsearch,achow/elasticsearch,weipinghe/elasticsearch,MisterAndersen/elasticsearch,KimTaehee/elasticsearch,MjAbuz/elasticsearch,lightslife/elasticsearch,mute/elasticsearch,fooljohnny/elasticsearch,geidies/elasticsearch,infusionsoft/elasticsearch,overcome/elasticsearch,hafkensite/elasticsearch,palecur/elasticsearch,drewr/elasticsearch,bawse/elasticsearch,ESamir/elasticsearch,hydro2k/elasticsearch,ajhalani/elasticsearch,hechunwen/elasticsearch,wimvds/elasticsearch,zeroctu/elasticsearch,marcuswr/elasticsearch-dateline,peschlowp/elasticsearch,Ansh90/elasticsearch,lightslife/elasticsearch,clintongormley/elasticsearch,i-am-Nathan/elasticsearch,MjAbuz/elasticsearch,mikemccand/elasticsearch,StefanGor/elasticsearch,tahaemin/elasticsearch,shreejay/elasticsearch,javachengwc/elasticsearch,obourgain/elasticsearch,acchen97/elasticsearch,KimTaehee/elasticsearch,kcompher/elasticsearch,C-Bish/elasticsearch,NBSW/elasticsearch,nilabhsagar/elasticsearch,IanvsPoplicola/elasticsearch,yuy168/elasticsearch,rlugojr/elasticsearch,hanswang/elasticsearch,NBSW/elasticsearch,areek/elasticsearch,vingupta3/elasticsearch,jprante/elasticsearch,phani546/elasticsearch,diendt/elasticsearch,AndreKR/elasticsearch,NBSW/elasticsearch,Flipkart/elasticsearch,fernandozhu/elasticsearch,amit-shar/elasticsearch,naveenhooda2000/elasticsearch,xuzha/elasticsearch,s1monw/elasticsearch,Liziyao/elasticsearch,vingupta3/elasticsearch,dylan8902/elasticsearch,djschny/elasticsearch,kalburgimanjunath/elasticsearch,martinstuga/elasticsearch,smflorentino/elasticsearch,koxa29/elasticsearch,uschindler/elasticsearch,pozhidaevak/elasticsearch,overcome/elasticsearch,zhiqinghuang/elasticsearch,Widen/elasticsearch,sreeramjayan/elasticsearch,tahaemin/elasticsearch,mkis-/elasticsearch,ImpressTV/elasticsearch,masterweb121/elasticsearch,kingaj/elasticsearch,mnylen/elasticsearch,markwalkom/elasticsearch,HarishAtGitHub/elasticsearch,yynil/elasticsearch,codebunt/elasticsearch,iamjakob/elasticsearch,SergVro/elasticsearch,kevinkluge/elasticsearch,strapdata/elassandra,coding0011/elasticsearch,wbowling/elasticsearch,zhaocloud/elasticsearch,abhijitiitr/es,queirozfcom/elasticsearch,mute/elasticsearch,achow/elasticsearch,rhoml/elasticsearch,KimTaehee/elasticsearch,abibell/elasticsearch,marcuswr/elasticsearch-dateline,hirdesh2008/elasticsearch,mjason3/elasticsearch,jaynblue/elasticsearch,hanst/elasticsearch,KimTaehee/elasticsearch,jpountz/elasticsearch,codebunt/elasticsearch,yongminxia/elasticsearch,scottsom/elasticsearch,boliza/elasticsearch,PhaedrusTheGreek/elasticsearch,hirdesh2008/elasticsearch,pablocastro/elasticsearch,lchennup/elasticsearch,nknize/elasticsearch,achow/elasticsearch,C-Bish/elasticsearch,sjohnr/elasticsearch,pritishppai/elasticsearch,kcompher/elasticsearch,Shekharrajak/elasticsearch,vroyer/elasticassandra,markllama/elasticsearch,tahaemin/elasticsearch,kingaj/elasticsearch,jprante/elasticsearch,qwerty4030/elasticsearch,geidies/elasticsearch,karthikjaps/elasticsearch,jimhooker2002/elasticsearch,MaineC/elasticsearch,lmtwga/elasticsearch,C-Bish/elasticsearch,glefloch/elasticsearch,iantruslove/elasticsearch,Widen/elasticsearch,Widen/elasticsearch,queirozfcom/elasticsearch,episerver/elasticsearch,zhiqinghuang/elasticsearch,chrismwendt/elasticsearch,gfyoung/elasticsearch,sarwarbhuiyan/elasticsearch,rmuir/elasticsearch,JervyShi/elasticsearch,JervyShi/elasticsearch,kunallimaye/elasticsearch,YosuaMichael/elasticsearch,IanvsPoplicola/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,kalimatas/elasticsearch,ivansun1010/elasticsearch,easonC/elasticsearch,cnfire/elasticsearch-1,clintongormley/elasticsearch,EasonYi/elasticsearch,Uiho/elasticsearch,mrorii/elasticsearch,hirdesh2008/elasticsearch,andrestc/elasticsearch,scottsom/elasticsearch,martinstuga/elasticsearch,mbrukman/elasticsearch,jimczi/elasticsearch,sauravmondallive/elasticsearch,umeshdangat/elasticsearch,wangtuo/elasticsearch,iamjakob/elasticsearch,wayeast/elasticsearch,feiqitian/elasticsearch,socialrank/elasticsearch,rhoml/elasticsearch,lks21c/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,hirdesh2008/elasticsearch,camilojd/elasticsearch,zhiqinghuang/elasticsearch,kenshin233/elasticsearch,rhoml/elasticsearch,sreeramjayan/elasticsearch,rajanm/elasticsearch,mjason3/elasticsearch,pranavraman/elasticsearch,hirdesh2008/elasticsearch,dataduke/elasticsearch,gmarz/elasticsearch,combinatorist/elasticsearch,kunallimaye/elasticsearch,rmuir/elasticsearch,AleksKochev/elasticsearch,sneivandt/elasticsearch,vingupta3/elasticsearch,khiraiwa/elasticsearch,lmtwga/elasticsearch,nrkkalyan/elasticsearch,Brijeshrpatel9/elasticsearch,yanjunh/elasticsearch,ouyangkongtong/elasticsearch,jpountz/elasticsearch,fred84/elasticsearch,strapdata/elassandra5-rc,girirajsharma/elasticsearch,amaliujia/elasticsearch,infusionsoft/elasticsearch,aglne/elasticsearch,mute/elasticsearch,kkirsche/elasticsearch,Charlesdong/elasticsearch,henakamaMSFT/elasticsearch,C-Bish/elasticsearch,lzo/elasticsearch-1,jchampion/elasticsearch,iacdingping/elasticsearch,easonC/elasticsearch,wangtuo/elasticsearch,pablocastro/elasticsearch,sc0ttkclark/elasticsearch,iantruslove/elasticsearch,kunallimaye/elasticsearch,vietlq/elasticsearch,Asimov4/elasticsearch,andrestc/elasticsearch,ckclark/elasticsearch,glefloch/elasticsearch,elasticdog/elasticsearch,Shekharrajak/elasticsearch,wuranbo/elasticsearch,peschlowp/elasticsearch,kcompher/elasticsearch,dataduke/elasticsearch,iamjakob/elasticsearch,adrianbk/elasticsearch,hanswang/elasticsearch,Collaborne/elasticsearch,beiske/elasticsearch,naveenhooda2000/elasticsearch,queirozfcom/elasticsearch,MisterAndersen/elasticsearch,boliza/elasticsearch,abibell/elasticsearch,dantuffery/elasticsearch,likaiwalkman/elasticsearch,scorpionvicky/elasticsearch,wayeast/elasticsearch,skearns64/elasticsearch,kimimj/elasticsearch,slavau/elasticsearch,tsohil/elasticsearch,lzo/elasticsearch-1,strapdata/elassandra-test,opendatasoft/elasticsearch,tkssharma/elasticsearch,ricardocerq/elasticsearch,fernandozhu/elasticsearch,xingguang2013/elasticsearch,rento19962/elasticsearch,kaneshin/elasticsearch,MichaelLiZhou/elasticsearch,xuzha/elasticsearch,shreejay/elasticsearch,jbertouch/elasticsearch,petmit/elasticsearch,knight1128/elasticsearch,sauravmondallive/elasticsearch,nilabhsagar/elasticsearch,weipinghe/elasticsearch,Clairebi/ElasticsearchClone,markllama/elasticsearch,raishiv/elasticsearch,s1monw/elasticsearch,martinstuga/elasticsearch,markllama/elasticsearch,ESamir/elasticsearch,obourgain/elasticsearch,karthikjaps/elasticsearch,ricardocerq/elasticsearch,slavau/elasticsearch,pranavraman/elasticsearch,Charlesdong/elasticsearch,Flipkart/elasticsearch,njlawton/elasticsearch,overcome/elasticsearch,abhijitiitr/es,karthikjaps/elasticsearch,jprante/elasticsearch,scottsom/elasticsearch,hechunwen/elasticsearch,hanswang/elasticsearch,polyfractal/elasticsearch,huanzhong/elasticsearch,mikemccand/elasticsearch,LeoYao/elasticsearch,LewayneNaidoo/elasticsearch,episerver/elasticsearch,cwurm/elasticsearch,linglaiyao1314/elasticsearch,linglaiyao1314/elasticsearch,vrkansagara/elasticsearch,alexshadow007/elasticsearch,vietlq/elasticsearch,avikurapati/elasticsearch,elasticdog/elasticsearch,boliza/elasticsearch,nellicus/elasticsearch,HonzaKral/elasticsearch,wbowling/elasticsearch,kaneshin/elasticsearch,petabytedata/elasticsearch,bestwpw/elasticsearch,jimhooker2002/elasticsearch,golubev/elasticsearch,kaneshin/elasticsearch,xingguang2013/elasticsearch,codebunt/elasticsearch,mmaracic/elasticsearch,polyfractal/elasticsearch,raishiv/elasticsearch,schonfeld/elasticsearch,abhijitiitr/es,mapr/elasticsearch,Helen-Zhao/elasticsearch,caengcjd/elasticsearch,kimimj/elasticsearch,Fsero/elasticsearch,qwerty4030/elasticsearch,a2lin/elasticsearch,wittyameta/elasticsearch,apepper/elasticsearch,sarwarbhuiyan/elasticsearch,jbertouch/elasticsearch,NBSW/elasticsearch,wimvds/elasticsearch,yongminxia/elasticsearch,martinstuga/elasticsearch,kaneshin/elasticsearch,ydsakyclguozi/elasticsearch,lzo/elasticsearch-1,wittyameta/elasticsearch,feiqitian/elasticsearch,scottsom/elasticsearch,lightslife/elasticsearch,xuzha/elasticsearch,Ansh90/elasticsearch,rhoml/elasticsearch,mute/elasticsearch,infusionsoft/elasticsearch,GlenRSmith/elasticsearch,jpountz/elasticsearch,vroyer/elasticassandra,zkidkid/elasticsearch,PhaedrusTheGreek/elasticsearch,sc0ttkclark/elasticsearch,a2lin/elasticsearch,brandonkearby/elasticsearch,MaineC/elasticsearch,camilojd/elasticsearch,uschindler/elasticsearch,iamjakob/elasticsearch,easonC/elasticsearch,nomoa/elasticsearch,achow/elasticsearch,scorpionvicky/elasticsearch,pranavraman/elasticsearch,Widen/elasticsearch,strapdata/elassandra,iacdingping/elasticsearch,acchen97/elasticsearch,zeroctu/elasticsearch,zhiqinghuang/elasticsearch,kevinkluge/elasticsearch,kevinkluge/elasticsearch,slavau/elasticsearch,luiseduardohdbackup/elasticsearch,tahaemin/elasticsearch,lmtwga/elasticsearch,overcome/elasticsearch,robin13/elasticsearch,jsgao0/elasticsearch,heng4fun/elasticsearch,vietlq/elasticsearch,bestwpw/elasticsearch,abhijitiitr/es,SergVro/elasticsearch,mjhennig/elasticsearch,loconsolutions/elasticsearch,salyh/elasticsearch,Chhunlong/elasticsearch,franklanganke/elasticsearch,sreeramjayan/elasticsearch,rajanm/elasticsearch,pablocastro/elasticsearch,markwalkom/elasticsearch,petabytedata/elasticsearch,socialrank/elasticsearch,ThiagoGarciaAlves/elasticsearch,xingguang2013/elasticsearch,gfyoung/elasticsearch,sjohnr/elasticsearch,hanswang/elasticsearch,himanshuag/elasticsearch,kalimatas/elasticsearch,F0lha/elasticsearch,LeoYao/elasticsearch,markwalkom/elasticsearch,sdauletau/elasticsearch,rmuir/elasticsearch,gingerwizard/elasticsearch,naveenhooda2000/elasticsearch,shreejay/elasticsearch,HarishAtGitHub/elasticsearch,vrkansagara/elasticsearch,Charlesdong/elasticsearch,hydro2k/elasticsearch,mikemccand/elasticsearch,Siddartha07/elasticsearch,markwalkom/elasticsearch,sjohnr/elasticsearch,zhaocloud/elasticsearch,Brijeshrpatel9/elasticsearch,kkirsche/elasticsearch,glefloch/elasticsearch,GlenRSmith/elasticsearch,masterweb121/elasticsearch,ydsakyclguozi/elasticsearch,spiegela/elasticsearch,jaynblue/elasticsearch,elancom/elasticsearch,franklanganke/elasticsearch,brwe/elasticsearch,markwalkom/elasticsearch,mm0/elasticsearch,jw0201/elastic,mbrukman/elasticsearch,lmtwga/elasticsearch,fooljohnny/elasticsearch,LeoYao/elasticsearch,Helen-Zhao/elasticsearch,onegambler/elasticsearch,markharwood/elasticsearch,Microsoft/elasticsearch,mkis-/elasticsearch,ouyangkongtong/elasticsearch,artnowo/elasticsearch,Uiho/elasticsearch,pablocastro/elasticsearch,trangvh/elasticsearch,peschlowp/elasticsearch,wimvds/elasticsearch,zkidkid/elasticsearch,vvcephei/elasticsearch,HonzaKral/elasticsearch,truemped/elasticsearch,mjason3/elasticsearch,bestwpw/elasticsearch,Rygbee/elasticsearch,PhaedrusTheGreek/elasticsearch,jimhooker2002/elasticsearch,zkidkid/elasticsearch,mapr/elasticsearch,trangvh/elasticsearch,brwe/elasticsearch,masaruh/elasticsearch,nrkkalyan/elasticsearch,Clairebi/ElasticsearchClone,tkssharma/elasticsearch,vroyer/elassandra,ckclark/elasticsearch,springning/elasticsearch,mjason3/elasticsearch,diendt/elasticsearch,VukDukic/elasticsearch,GlenRSmith/elasticsearch,Shepard1212/elasticsearch,xpandan/elasticsearch,hanswang/elasticsearch,truemped/elasticsearch,geidies/elasticsearch,HarishAtGitHub/elasticsearch,ckclark/elasticsearch,skearns64/elasticsearch,anti-social/elasticsearch,apepper/elasticsearch,Clairebi/ElasticsearchClone,palecur/elasticsearch,nilabhsagar/elasticsearch,sposam/elasticsearch,fekaputra/elasticsearch,Ansh90/elasticsearch,knight1128/elasticsearch,gingerwizard/elasticsearch,ydsakyclguozi/elasticsearch,amit-shar/elasticsearch,dylan8902/elasticsearch,truemped/elasticsearch,amit-shar/elasticsearch,wenpos/elasticsearch,ajhalani/elasticsearch,huypx1292/elasticsearch,Microsoft/elasticsearch,Siddartha07/elasticsearch,ImpressTV/elasticsearch,huypx1292/elasticsearch,vingupta3/elasticsearch,chirilo/elasticsearch,dongjoon-hyun/elasticsearch,ThiagoGarciaAlves/elasticsearch,Flipkart/elasticsearch,mgalushka/elasticsearch,jchampion/elasticsearch,vvcephei/elasticsearch,knight1128/elasticsearch,Microsoft/elasticsearch,sscarduzio/elasticsearch,mm0/elasticsearch,schonfeld/elasticsearch,Shepard1212/elasticsearch,strapdata/elassandra-test,areek/elasticsearch,Liziyao/elasticsearch,diendt/elasticsearch,wittyameta/elasticsearch,mapr/elasticsearch,Shekharrajak/elasticsearch,jimhooker2002/elasticsearch,Ansh90/elasticsearch,amaliujia/elasticsearch,davidvgalbraith/elasticsearch,petmit/elasticsearch,nellicus/elasticsearch,ydsakyclguozi/elasticsearch,szroland/elasticsearch,hanst/elasticsearch,franklanganke/elasticsearch,markharwood/elasticsearch,phani546/elasticsearch,baishuo/elasticsearch_v2.1.0-baishuo,HonzaKral/elasticsearch,Chhunlong/elasticsearch,kkirsche/elasticsearch,snikch/elasticsearch,clintongormley/elasticsearch,caengcjd/elasticsearch,episerver/elasticsearch,kingaj/elasticsearch,kcompher/elasticsearch,elancom/elasticsearch,truemped/elasticsearch,Rygbee/elasticsearch,andrejserafim/elasticsearch,drewr/elasticsearch,knight1128/elasticsearch,strapdata/elassandra,mjhennig/elasticsearch,zhaocloud/elasticsearch,lmtwga/elasticsearch,markllama/elasticsearch,mjason3/elasticsearch,yuy168/elasticsearch,StefanGor/elasticsearch,abibell/elasticsearch,jw0201/elastic,alexshadow007/elasticsearch,mbrukman/elasticsearch,fred84/elasticsearch,sdauletau/elasticsearch,AndreKR/elasticsearch,jsgao0/elasticsearch,Siddartha07/elasticsearch,himanshuag/elasticsearch,ajhalani/elasticsearch,szroland/elasticsearch,franklanganke/elasticsearch,petabytedata/elasticsearch,xuzha/elasticsearch,Uiho/elasticsearch,a2lin/elasticsearch,thecocce/elasticsearch,nellicus/elasticsearch,18098924759/elasticsearch,chirilo/elasticsearch,nknize/elasticsearch
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.warmer; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerResponse; import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse; import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerResponse; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.mapper.FieldMapper.Loading; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.warmer.IndexWarmerMissingException; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.hamcrest.Matchers; import org.junit.Test; import java.util.Locale; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.*; public class SimpleIndicesWarmerTests extends ElasticsearchIntegrationTest { @Test public void simpleWarmerTests() { createIndex("test"); ensureGreen(); PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("warmer_1") .setSearchRequest(client().prepareSearch("test").setTypes("a1").setQuery(QueryBuilders.termQuery("field", "value1"))) .execute().actionGet(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); putWarmerResponse = client().admin().indices().preparePutWarmer("warmer_2") .setSearchRequest(client().prepareSearch("test").setTypes("a2").setQuery(QueryBuilders.termQuery("field", "value2"))) .execute().actionGet(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet(); GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("tes*") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(2)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1")); assertThat(getWarmersResponse.getWarmers().get("test").get(1).name(), equalTo("warmer_2")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_*") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(2)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1")); assertThat(getWarmersResponse.getWarmers().get("test").get(1).name(), equalTo("warmer_2")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_1") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_2") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_2")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addTypes("a*").addWarmers("warmer_2") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_2")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addTypes("a1").addWarmers("warmer_2") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(0)); } @Test public void templateWarmer() { client().admin().indices().preparePutTemplate("template_1") .setSource("{\n" + " \"template\" : \"*\",\n" + " \"warmers\" : {\n" + " \"warmer_1\" : {\n" + " \"types\" : [],\n" + " \"source\" : {\n" + " \"query\" : {\n" + " \"match_all\" : {}\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}") .execute().actionGet(); createIndex("test"); ensureGreen(); ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); IndexWarmersMetaData warmersMetaData = clusterState.metaData().index("test").custom(IndexWarmersMetaData.TYPE); assertThat(warmersMetaData, Matchers.notNullValue()); assertThat(warmersMetaData.entries().size(), equalTo(1)); client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet(); } @Test public void createIndexWarmer() { assertAcked(prepareCreate("test") .setSource("{\n" + " \"warmers\" : {\n" + " \"warmer_1\" : {\n" + " \"types\" : [],\n" + " \"source\" : {\n" + " \"query\" : {\n" + " \"match_all\" : {}\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}")); ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); IndexWarmersMetaData warmersMetaData = clusterState.metaData().index("test").custom(IndexWarmersMetaData.TYPE); assertThat(warmersMetaData, Matchers.notNullValue()); assertThat(warmersMetaData.entries().size(), equalTo(1)); client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet(); } @Test public void deleteNonExistentIndexWarmerTest() { createIndex("test"); try { client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("foo").execute().actionGet(); fail("warmer foo should not exist"); } catch (IndexWarmerMissingException ex) { assertThat(ex.names()[0], equalTo("foo")); } } @Test public void deleteIndexWarmerTest() { createIndex("test"); ensureGreen(); PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer") .setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery())) .get(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("test").get(); assertThat(getWarmersResponse.warmers().size(), equalTo(1)); ObjectObjectCursor<String, ImmutableList<IndexWarmersMetaData.Entry>> entry = getWarmersResponse.warmers().iterator().next(); assertThat(entry.key, equalTo("test")); assertThat(entry.value.size(), equalTo(1)); assertThat(entry.value.iterator().next().name(), equalTo("custom_warmer")); DeleteWarmerResponse deleteWarmerResponse = client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("custom_warmer").get(); assertThat(deleteWarmerResponse.isAcknowledged(), equalTo(true)); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").get(); assertThat(getWarmersResponse.warmers().size(), equalTo(0)); } @Test // issue 3246 public void ensureThatIndexWarmersCanBeChangedOnRuntime() throws Exception { createIndex("test"); ensureGreen(); PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer") .setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); client().prepareIndex("test", "test", "1").setSource("foo", "bar").setRefresh(true).execute().actionGet(); logger.info("--> Disabling warmers execution"); client().admin().indices().prepareUpdateSettings("test").setSettings(ImmutableSettings.builder().put("index.warmer.enabled", false)).execute().actionGet(); long warmerRunsAfterDisabling = getWarmerRuns(); assertThat(warmerRunsAfterDisabling, greaterThanOrEqualTo(1L)); client().prepareIndex("test", "test", "2").setSource("foo2", "bar2").setRefresh(true).execute().actionGet(); assertThat(getWarmerRuns(), equalTo(warmerRunsAfterDisabling)); } @Test public void gettingAllWarmersUsingAllAndWildcardsShouldWork() throws Exception { createIndex("test"); ensureGreen(); PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer") .setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); PutWarmerResponse anotherPutWarmerResponse = client().admin().indices().preparePutWarmer("second_custom_warmer") .setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); assertThat(anotherPutWarmerResponse.isAcknowledged(), equalTo(true)); GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("*").addWarmers("*").get(); assertThat(getWarmersResponse.warmers().size(), is(1)); getWarmersResponse = client().admin().indices().prepareGetWarmers("_all").addWarmers("_all").get(); assertThat(getWarmersResponse.warmers().size(), is(1)); getWarmersResponse = client().admin().indices().prepareGetWarmers("t*").addWarmers("c*").get(); assertThat(getWarmersResponse.warmers().size(), is(1)); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("custom_warmer", "second_custom_warmer").get(); assertThat(getWarmersResponse.warmers().size(), is(1)); } private long getWarmerRuns() { IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setWarmer(true).execute().actionGet(); return indicesStatsResponse.getIndex("test").getPrimaries().warmer.total(); } private long getSegmentsMemoryUsage(String idx) { IndicesSegmentResponse response = client().admin().indices().segments(Requests.indicesSegmentsRequest(idx)).actionGet(); IndexSegments indicesSegments = response.getIndices().get(idx); long total = 0; for (IndexShardSegments indexShardSegments : indicesSegments) { for (ShardSegments shardSegments : indexShardSegments) { for (Segment segment : shardSegments) { logger.debug("+=" + segment.memoryInBytes + " " + indexShardSegments.getShardId() + " " + shardSegments.getIndex()); total += segment.memoryInBytes; } } } return total; } private enum LoadingMethod { LAZY { @Override CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) { return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.LAZY_VALUE)); } }, EAGER { @Override CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) { return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.EAGER_VALUE)); } @Override boolean isLazy() { return false; } }, EAGER_PER_FIELD { @Override CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) throws Exception { return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.LAZY_VALUE)).addMapping(type, JsonXContent.contentBuilder() .startObject() .startObject(type) .startObject("properties") .startObject(fieldName) .field("type", "string") .startObject("norms") .field("loading", Loading.EAGER_VALUE) .endObject() .endObject() .endObject() .endObject() .endObject() ); } @Override boolean isLazy() { return false; } }; private static Settings SINGLE_SHARD_NO_REPLICA = ImmutableSettings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build(); abstract CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) throws Exception; boolean isLazy() { return true; } } public void testEagerLoading() throws Exception { for (LoadingMethod method : LoadingMethod.values()) { logger.debug("METHOD " + method); String indexName = method.name().toLowerCase(Locale.ROOT); assertAcked(method.createIndex(indexName, "t", "foo")); client().prepareIndex(indexName, "t", "1").setSource("foo", "bar").setRefresh(true).execute().actionGet(); ensureGreen(indexName); long memoryUsage0 = getSegmentsMemoryUsage(indexName); // queries load norms if they were not loaded before client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("foo", "bar")).execute().actionGet(); long memoryUsage1 = getSegmentsMemoryUsage(indexName); if (method.isLazy()) { assertThat(memoryUsage1, greaterThan(memoryUsage0)); } else { assertThat(memoryUsage1, equalTo(memoryUsage0)); } } } }
src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.indices.warmer; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.google.common.collect.ImmutableList; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.segments.IndexSegments; import org.elasticsearch.action.admin.indices.segments.IndexShardSegments; import org.elasticsearch.action.admin.indices.segments.IndicesSegmentResponse; import org.elasticsearch.action.admin.indices.segments.ShardSegments; import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.admin.indices.warmer.delete.DeleteWarmerResponse; import org.elasticsearch.action.admin.indices.warmer.get.GetWarmersResponse; import org.elasticsearch.action.admin.indices.warmer.put.PutWarmerResponse; import org.elasticsearch.client.Requests; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.settings.ImmutableSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.engine.Segment; import org.elasticsearch.index.mapper.FieldMapper.Loading; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.warmer.IndexWarmerMissingException; import org.elasticsearch.search.warmer.IndexWarmersMetaData; import org.elasticsearch.test.ElasticsearchIntegrationTest; import org.hamcrest.Matchers; import org.junit.Test; import java.util.Locale; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.*; public class SimpleIndicesWarmerTests extends ElasticsearchIntegrationTest { @Test public void simpleWarmerTests() { createIndex("test"); ensureGreen(); PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("warmer_1") .setSearchRequest(client().prepareSearch("test").setTypes("a1").setQuery(QueryBuilders.termQuery("field", "value1"))) .execute().actionGet(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); putWarmerResponse = client().admin().indices().preparePutWarmer("warmer_2") .setSearchRequest(client().prepareSearch("test").setTypes("a2").setQuery(QueryBuilders.termQuery("field", "value2"))) .execute().actionGet(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet(); GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("tes*") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(2)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1")); assertThat(getWarmersResponse.getWarmers().get("test").get(1).name(), equalTo("warmer_2")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_*") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(2)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1")); assertThat(getWarmersResponse.getWarmers().get("test").get(1).name(), equalTo("warmer_2")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_1") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_1")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("warmer_2") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_2")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addTypes("a*").addWarmers("warmer_2") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").size(), equalTo(1)); assertThat(getWarmersResponse.getWarmers().get("test").get(0).name(), equalTo("warmer_2")); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addTypes("a1").addWarmers("warmer_2") .execute().actionGet(); assertThat(getWarmersResponse.getWarmers().size(), equalTo(0)); } @Test public void templateWarmer() { client().admin().indices().preparePutTemplate("template_1") .setSource("{\n" + " \"template\" : \"*\",\n" + " \"warmers\" : {\n" + " \"warmer_1\" : {\n" + " \"types\" : [],\n" + " \"source\" : {\n" + " \"query\" : {\n" + " \"match_all\" : {}\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}") .execute().actionGet(); createIndex("test"); ensureGreen(); ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); IndexWarmersMetaData warmersMetaData = clusterState.metaData().index("test").custom(IndexWarmersMetaData.TYPE); assertThat(warmersMetaData, Matchers.notNullValue()); assertThat(warmersMetaData.entries().size(), equalTo(1)); client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet(); } @Test public void createIndexWarmer() { assertAcked(prepareCreate("test") .setSource("{\n" + " \"warmers\" : {\n" + " \"warmer_1\" : {\n" + " \"types\" : [],\n" + " \"source\" : {\n" + " \"query\" : {\n" + " \"match_all\" : {}\n" + " }\n" + " }\n" + " }\n" + " }\n" + "}")); ClusterState clusterState = client().admin().cluster().prepareState().execute().actionGet().getState(); IndexWarmersMetaData warmersMetaData = clusterState.metaData().index("test").custom(IndexWarmersMetaData.TYPE); assertThat(warmersMetaData, Matchers.notNullValue()); assertThat(warmersMetaData.entries().size(), equalTo(1)); client().prepareIndex("test", "type1", "1").setSource("field", "value1").setRefresh(true).execute().actionGet(); client().prepareIndex("test", "type1", "2").setSource("field", "value2").setRefresh(true).execute().actionGet(); } @Test public void deleteNonExistentIndexWarmerTest() { createIndex("test"); try { client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("foo").execute().actionGet(1000); fail("warmer foo should not exist"); } catch (IndexWarmerMissingException ex) { assertThat(ex.names()[0], equalTo("foo")); } } @Test public void deleteIndexWarmerTest() { createIndex("test"); ensureGreen(); PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer") .setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery())) .get(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("test").get(); assertThat(getWarmersResponse.warmers().size(), equalTo(1)); ObjectObjectCursor<String, ImmutableList<IndexWarmersMetaData.Entry>> entry = getWarmersResponse.warmers().iterator().next(); assertThat(entry.key, equalTo("test")); assertThat(entry.value.size(), equalTo(1)); assertThat(entry.value.iterator().next().name(), equalTo("custom_warmer")); DeleteWarmerResponse deleteWarmerResponse = client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("custom_warmer").get(); assertThat(deleteWarmerResponse.isAcknowledged(), equalTo(true)); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").get(); assertThat(getWarmersResponse.warmers().size(), equalTo(0)); } @Test // issue 3246 public void ensureThatIndexWarmersCanBeChangedOnRuntime() throws Exception { createIndex("test"); ensureGreen(); PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer") .setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); client().prepareIndex("test", "test", "1").setSource("foo", "bar").setRefresh(true).execute().actionGet(); logger.info("--> Disabling warmers execution"); client().admin().indices().prepareUpdateSettings("test").setSettings(ImmutableSettings.builder().put("index.warmer.enabled", false)).execute().actionGet(); long warmerRunsAfterDisabling = getWarmerRuns(); assertThat(warmerRunsAfterDisabling, greaterThanOrEqualTo(1L)); client().prepareIndex("test", "test", "2").setSource("foo2", "bar2").setRefresh(true).execute().actionGet(); assertThat(getWarmerRuns(), equalTo(warmerRunsAfterDisabling)); } @Test public void gettingAllWarmersUsingAllAndWildcardsShouldWork() throws Exception { createIndex("test"); ensureGreen(); PutWarmerResponse putWarmerResponse = client().admin().indices().preparePutWarmer("custom_warmer") .setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); assertThat(putWarmerResponse.isAcknowledged(), equalTo(true)); PutWarmerResponse anotherPutWarmerResponse = client().admin().indices().preparePutWarmer("second_custom_warmer") .setSearchRequest(client().prepareSearch("test").setTypes("test").setQuery(QueryBuilders.matchAllQuery())) .execute().actionGet(); assertThat(anotherPutWarmerResponse.isAcknowledged(), equalTo(true)); GetWarmersResponse getWarmersResponse = client().admin().indices().prepareGetWarmers("*").addWarmers("*").get(); assertThat(getWarmersResponse.warmers().size(), is(1)); getWarmersResponse = client().admin().indices().prepareGetWarmers("_all").addWarmers("_all").get(); assertThat(getWarmersResponse.warmers().size(), is(1)); getWarmersResponse = client().admin().indices().prepareGetWarmers("t*").addWarmers("c*").get(); assertThat(getWarmersResponse.warmers().size(), is(1)); getWarmersResponse = client().admin().indices().prepareGetWarmers("test").addWarmers("custom_warmer", "second_custom_warmer").get(); assertThat(getWarmersResponse.warmers().size(), is(1)); } private long getWarmerRuns() { IndicesStatsResponse indicesStatsResponse = client().admin().indices().prepareStats("test").clear().setWarmer(true).execute().actionGet(); return indicesStatsResponse.getIndex("test").getPrimaries().warmer.total(); } private long getSegmentsMemoryUsage(String idx) { IndicesSegmentResponse response = client().admin().indices().segments(Requests.indicesSegmentsRequest(idx)).actionGet(); IndexSegments indicesSegments = response.getIndices().get(idx); long total = 0; for (IndexShardSegments indexShardSegments : indicesSegments) { for (ShardSegments shardSegments : indexShardSegments) { for (Segment segment : shardSegments) { logger.debug("+=" + segment.memoryInBytes + " " + indexShardSegments.getShardId() + " " + shardSegments.getIndex()); total += segment.memoryInBytes; } } } return total; } private enum LoadingMethod { LAZY { @Override CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) { return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.LAZY_VALUE)); } }, EAGER { @Override CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) { return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.EAGER_VALUE)); } @Override boolean isLazy() { return false; } }, EAGER_PER_FIELD { @Override CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) throws Exception { return client().admin().indices().prepareCreate(indexName).setSettings(ImmutableSettings.builder().put(SINGLE_SHARD_NO_REPLICA).put(SearchService.NORMS_LOADING_KEY, Loading.LAZY_VALUE)).addMapping(type, JsonXContent.contentBuilder() .startObject() .startObject(type) .startObject("properties") .startObject(fieldName) .field("type", "string") .startObject("norms") .field("loading", Loading.EAGER_VALUE) .endObject() .endObject() .endObject() .endObject() .endObject() ); } @Override boolean isLazy() { return false; } }; private static Settings SINGLE_SHARD_NO_REPLICA = ImmutableSettings.builder().put("number_of_shards", 1).put("number_of_replicas", 0).build(); abstract CreateIndexRequestBuilder createIndex(String indexName, String type, String fieldName) throws Exception; boolean isLazy() { return true; } } public void testEagerLoading() throws Exception { for (LoadingMethod method : LoadingMethod.values()) { logger.debug("METHOD " + method); String indexName = method.name().toLowerCase(Locale.ROOT); assertAcked(method.createIndex(indexName, "t", "foo")); client().prepareIndex(indexName, "t", "1").setSource("foo", "bar").setRefresh(true).execute().actionGet(); ensureGreen(indexName); long memoryUsage0 = getSegmentsMemoryUsage(indexName); // queries load norms if they were not loaded before client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("foo", "bar")).execute().actionGet(); long memoryUsage1 = getSegmentsMemoryUsage(indexName); if (method.isLazy()) { assertThat(memoryUsage1, greaterThan(memoryUsage0)); } else { assertThat(memoryUsage1, equalTo(memoryUsage0)); } } } }
[Test] remove timeout from deleteWarmer call with many shards that might just take a while
src/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java
[Test] remove timeout from deleteWarmer call with many shards that might just take a while
<ide><path>rc/test/java/org/elasticsearch/indices/warmer/SimpleIndicesWarmerTests.java <ide> @Test <ide> public void deleteNonExistentIndexWarmerTest() { <ide> createIndex("test"); <del> <ide> try { <del> client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("foo").execute().actionGet(1000); <add> client().admin().indices().prepareDeleteWarmer().setIndices("test").setNames("foo").execute().actionGet(); <ide> fail("warmer foo should not exist"); <ide> } catch (IndexWarmerMissingException ex) { <ide> assertThat(ex.names()[0], equalTo("foo"));
Java
lgpl-2.1
8799c71ff34e743ce850639bcacffc3028d402ca
0
maxbiostat/beast-mcmc,maxbiostat/beast-mcmc,codeaudit/beast-mcmc,adamallo/beast-mcmc,4ment/beast-mcmc,codeaudit/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc,beast-dev/beast-mcmc,maxbiostat/beast-mcmc,beast-dev/beast-mcmc,maxbiostat/beast-mcmc,4ment/beast-mcmc,maxbiostat/beast-mcmc,codeaudit/beast-mcmc,adamallo/beast-mcmc,codeaudit/beast-mcmc,beast-dev/beast-mcmc,maxbiostat/beast-mcmc,4ment/beast-mcmc,adamallo/beast-mcmc,adamallo/beast-mcmc,beast-dev/beast-mcmc,codeaudit/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc,4ment/beast-mcmc,codeaudit/beast-mcmc,beast-dev/beast-mcmc,adamallo/beast-mcmc
/* * ConstExponential.java * * Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.evolution.coalescent; /** * This class models exponential growth from an initial population size which * then transitions back to a constant population size. * * @author Andrew Rambaut * @author Alexei Drummond * * @version $ID$ * */ public class ConstExpConst extends ConstExponential { /** * Construct demographic model with default settings */ public ConstExpConst(Type units) { super(units); } public double getTime1() { return time1; } public void setTime1(double time1) { this.time1 = time1; } public double getTime2() { return time1 + (-Math.log(getN1()/getN0())/getGrowthRate()); } public void setProportion(double p) { this.setN1(getN0() * p); } // Implementation of abstract methods public double getDemographic(double t) { double N0 = getN0(); double N1 = getN1(); double r = getGrowthRate(); double t1 = getTime1(); if (t < t1) { return N0; } double t2 = getTime2(); if (t >= t2) { return N1; } return N0 * Math.exp(-r*(t - t1)); } /** * Returns value of demographic intensity function at time t * (= integral 1/N(x) dx from 0 to t). */ public double getIntensity(double t) { double time1 = getTime1(); double time2 = getTime2(); double oneOverN0 = 1.0 / getN0(); if (t < time1) { return (t * oneOverN0); } double oneOverNt = 1.0 / getDemographic(t); if (t > time1 && t < time2) { return (t * oneOverN0) + ( 0.5 * (t-time1) * (oneOverNt-oneOverN0) ); } double oneOverN1 = 1.0 / getN1(); if (t >= time2) { return (t * oneOverN0) + ( 0.5 * (time2-time1) * (oneOverN1-oneOverN0) ) + (oneOverN1 * (t-time2)); } throw new RuntimeException("Not implemented!"); } public double getInverseIntensity(double x) { throw new RuntimeException("Not implemented!"); } public int getNumArguments() { return 4; } public String getArgumentName(int n) { switch (n) { case 0: return "N0"; case 1: return "r"; case 2: return "N1"; case 3: return "time1"; } throw new IllegalArgumentException("Argument " + n + " does not exist"); } public double getArgument(int n) { switch (n) { case 0: return getN0(); case 1: return getGrowthRate(); case 2: return getN1(); case 3: return getTime1(); } throw new IllegalArgumentException("Argument " + n + " does not exist"); } public void setArgument(int n, double value) { switch (n) { case 0: setN0(value); break; case 1: setGrowthRate(value); break; case 2: setN1(value); break; case 3: setTime1(value); break; default: throw new IllegalArgumentException("Argument " + n + " does not exist"); } } public double getLowerBound(int n) { return 0.0; } public double getUpperBound(int n) { return Double.POSITIVE_INFINITY; } // // private stuff // private double time1 = 0.0; }
src/dr/evolution/coalescent/ConstExpConst.java
/* * ConstExponential.java * * Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut * * This file is part of BEAST. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership and licensing. * * BEAST is free software; you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * BEAST is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with BEAST; if not, write to the * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, * Boston, MA 02110-1301 USA */ package dr.evolution.coalescent; /** * This class models exponential growth from an initial population size which * then transitions back to a constant population size. * * @author Andrew Rambaut * @author Alexei Drummond * * @version $ID$ * */ public class ConstExpConst extends ConstExponential { /** * Construct demographic model with default settings */ public ConstExpConst(Type units) { super(units); } public double getTime1() { return time1; } public void setTime1(double time1) { this.time1 = time1; } public double getTime2() { return time1 + (-Math.log(getN1()/getN0())/getGrowthRate()); } public void setProportion(double p) { this.setN1(getN0() * p); } // Implementation of abstract methods public double getDemographic(double t) { double N0 = getN0(); double N1 = getN1(); double r = getGrowthRate(); double t1 = getTime1(); if (t < t1) { return N0; } double t2 = getTime2(); if (t >= t2) { return N1; } return N0 * Math.exp(-r*(t - t1)); } /** * Returns value of demographic intensity function at time t * (= integral 1/N(x) dx from 0 to t). */ public double getIntensity(double t) { throw new RuntimeException("Not implemented!"); } public double getInverseIntensity(double x) { throw new RuntimeException("Not implemented!"); } public int getNumArguments() { return 4; } public String getArgumentName(int n) { switch (n) { case 0: return "N0"; case 1: return "r"; case 2: return "N1"; case 3: return "time1"; } throw new IllegalArgumentException("Argument " + n + " does not exist"); } public double getArgument(int n) { switch (n) { case 0: return getN0(); case 1: return getGrowthRate(); case 2: return getN1(); case 3: return getTime1(); } throw new IllegalArgumentException("Argument " + n + " does not exist"); } public void setArgument(int n, double value) { switch (n) { case 0: setN0(value); break; case 1: setGrowthRate(value); break; case 2: setN1(value); break; case 3: setTime1(value); break; default: throw new IllegalArgumentException("Argument " + n + " does not exist"); } } public double getLowerBound(int n) { return 0.0; } public double getUpperBound(int n) { return Double.POSITIVE_INFINITY; } // // private stuff // private double time1 = 0.0; }
Implemented getIntensity in ConstExpConst
src/dr/evolution/coalescent/ConstExpConst.java
Implemented getIntensity in ConstExpConst
<ide><path>rc/dr/evolution/coalescent/ConstExpConst.java <ide> * Returns value of demographic intensity function at time t <ide> * (= integral 1/N(x) dx from 0 to t). <ide> */ <del> public double getIntensity(double t) { <add> public double getIntensity(double t) { <add> double time1 = getTime1(); <add> double time2 = getTime2(); <add> double oneOverN0 = 1.0 / getN0(); <ide> <del> throw new RuntimeException("Not implemented!"); <del> } <add> if (t < time1) { <add> return (t * oneOverN0); <add> } <add> double oneOverNt = 1.0 / getDemographic(t); <add> if (t > time1 && t < time2) { <add> return (t * oneOverN0) + ( 0.5 * (t-time1) * (oneOverNt-oneOverN0) ); <add> } <add> double oneOverN1 = 1.0 / getN1(); <add> if (t >= time2) { <add> return (t * oneOverN0) + ( 0.5 * (time2-time1) * (oneOverN1-oneOverN0) ) + (oneOverN1 * (t-time2)); <add> } <add> throw new RuntimeException("Not implemented!"); <add> } <ide> <ide> public double getInverseIntensity(double x) { <ide>
Java
epl-1.0
dfeeb22b31ee4b6ae740635c1176d4e2e9f47a1c
0
wdliu/egit,wdliu/egit,blizzy78/egit,SmithAndr/egit,paulvi/egit,SmithAndr/egit,collaborative-modeling/egit,collaborative-modeling/egit,paulvi/egit
/******************************************************************************* * Copyright (c) 2010-2013 SAP AG and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Mathias Kinzler (SAP AG) - initial implementation * Stefan Lay (SAP AG) - improvements *******************************************************************************/ package org.eclipse.egit.ui.internal.clone; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IProjectDescription; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IWorkspaceRunnable; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.egit.core.op.ConnectProviderOperation; import org.eclipse.egit.ui.Activator; import org.eclipse.egit.ui.internal.UIIcons; import org.eclipse.egit.ui.internal.UIText; import org.eclipse.egit.ui.internal.clone.GitCloneSourceProviderExtension.CloneSourceProvider; import org.eclipse.egit.ui.internal.provisional.wizards.GitRepositoryInfo; import org.eclipse.egit.ui.internal.provisional.wizards.IRepositorySearchResult; import org.eclipse.egit.ui.internal.provisional.wizards.NoRepositoryInfoException; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.jface.operation.IRunnableWithProgress; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.Repository; import org.eclipse.ui.IImportWizard; import org.eclipse.ui.IWorkbench; import org.eclipse.ui.IWorkingSet; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.actions.NewProjectAction; /** * A wizard which allows to optionally clone a repository and to import projects from a repository. */ public class GitImportWizard extends AbstractGitCloneWizard implements IImportWizard { private static final String GIT_IMPORT_SECTION = "GitImportWizard"; //$NON-NLS-1$ private GitSelectRepositoryPage selectRepoPage = new GitSelectRepositoryPage(); private GitSelectWizardPage importWithDirectoriesPage = new GitSelectWizardPage(){ public void setVisible(boolean visible) { if (existingRepo == null && visible && (cloneDestination.cloneSettingsChanged())) { setCallerRunsCloneOperation(true); try { final GitRepositoryInfo repositoryInfo = currentSearchResult.getGitRepositoryInfo(); performClone(repositoryInfo); importWithDirectoriesPage.getControl().getDisplay().asyncExec(new Runnable() { public void run() { runCloneOperation(getContainer(), repositoryInfo); cloneDestination.saveSettingsForClonedRepo(); }}); } catch (URISyntaxException e) { Activator.error(UIText.GitImportWizard_errorParsingURI, e); } catch (NoRepositoryInfoException e) { Activator.error(UIText.GitImportWizard_noRepositoryInfo, e); } catch (Exception e) { Activator.error(e.getMessage(), e); } } super.setVisible(visible); } }; private GitProjectsImportPage projectsImportPage = new GitProjectsImportPage() { public void setVisible(boolean visible) { if (visible) setProjectsList(importWithDirectoriesPage.getPath()); super.setVisible(visible); } }; private GitCreateGeneralProjectPage createGeneralProjectPage = new GitCreateGeneralProjectPage() { public void setVisible(boolean visible) { if (visible) setPath(importWithDirectoriesPage.getPath()); super.setVisible(visible); } }; private Repository existingRepo; /** * The default constructor */ public GitImportWizard() { this(null); } /** * Construct the import wizard based on given repository search result. The * wizard skips the repository location page in this case. * * @param searchResult * the search result to initialize the import wizard with. */ public GitImportWizard(IRepositorySearchResult searchResult) { super(searchResult); setWindowTitle(UIText.GitImportWizard_WizardTitle); setDefaultPageImageDescriptor(UIIcons.WIZBAN_IMPORT_REPO); setDialogSettings(getImportWizardDialogSettings()); } @Override protected void addPreClonePages() { if (!hasSearchResult()) addPage(selectRepoPage); } @Override protected void addPostClonePages() { addPage(importWithDirectoriesPage); addPage(projectsImportPage); addPage(createGeneralProjectPage); } @Override protected List<CloneSourceProvider> getCloneSourceProviders() { List<CloneSourceProvider> cloneSourceProvider = super.getCloneSourceProviders(); cloneSourceProvider.add(0, CloneSourceProvider.LOCAL); return cloneSourceProvider; } public void init(IWorkbench workbench, IStructuredSelection selection) { // nothing to do } @Override public IWizardPage getNextPage(IWizardPage page) { if (page == selectRepoPage) { existingRepo = selectRepoPage.getRepository(); importWithDirectoriesPage.setRepository(selectRepoPage .getRepository()); return importWithDirectoriesPage; } else if (page == cloneDestination) { existingRepo = null; importWithDirectoriesPage.setRepository(getTargetRepository()); return importWithDirectoriesPage; } else if (page == importWithDirectoriesPage) switch (importWithDirectoriesPage.getWizardSelection()) { case GitSelectWizardPage.EXISTING_PROJECTS_WIZARD: return projectsImportPage; case GitSelectWizardPage.NEW_WIZARD: return null; case GitSelectWizardPage.GENERAL_WIZARD: return createGeneralProjectPage; } else if (page == createGeneralProjectPage || page == projectsImportPage) return null; return super.getNextPage(page); } private Repository getTargetRepository() { if (existingRepo != null) return existingRepo; else try { return org.eclipse.egit.core.Activator .getDefault() .getRepositoryCache() .lookupRepository( new File(cloneDestination.getDestinationFile(), Constants.DOT_GIT)); } catch (IOException e) { Activator .error("Error looking up repository at " + cloneDestination.getDestinationFile(), e); //$NON-NLS-1$ return null; } } @Override public boolean performFinish() { try { getContainer().run(true, true, new IRunnableWithProgress() { public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { importProjects(monitor); } }); } catch (InvocationTargetException e) { Activator .handleError(e.getCause().getMessage(), e.getCause(), true); return false; } catch (InterruptedException e) { Activator.handleError( UIText.GitCreateProjectViaWizardWizard_AbortedMessage, e, true); return false; } return true; } @Override public boolean canFinish() { switch (importWithDirectoriesPage.getWizardSelection()) { case GitSelectWizardPage.EXISTING_PROJECTS_WIZARD: return projectsImportPage.isPageComplete(); case GitSelectWizardPage.NEW_WIZARD: return true; case GitSelectWizardPage.GENERAL_WIZARD: return createGeneralProjectPage.isPageComplete(); } return super.canFinish(); } private void importProjects(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { switch (importWithDirectoriesPage.getWizardSelection()) { case GitSelectWizardPage.EXISTING_PROJECTS_WIZARD: { final Set<ProjectRecord> projectsToCreate = new HashSet<ProjectRecord>(); final List<IWorkingSet> workingSets = new ArrayList<IWorkingSet>(); final Repository[] repository = new Repository[1]; // get the data from the pages in the UI thread PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { projectsToCreate.addAll(projectsImportPage .getCheckedProjects()); IWorkingSet[] workingSetArray = projectsImportPage .getSelectedWorkingSets(); workingSets.addAll(Arrays.asList(workingSetArray)); repository[0] = getTargetRepository(); projectsImportPage.saveWidgetValues(); } }); ProjectUtils.createProjects(projectsToCreate, repository[0], workingSets.toArray(new IWorkingSet[workingSets.size()]), monitor); break; } case GitSelectWizardPage.NEW_WIZARD: { final File[] repoDir = new File[1]; PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { repoDir[0] = getTargetRepository().getDirectory(); } }); final List<IProject> previousProjects = Arrays .asList(ResourcesPlugin.getWorkspace().getRoot() .getProjects()); PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { new NewProjectAction(PlatformUI.getWorkbench() .getActiveWorkbenchWindow()).run(); } }); IWorkspaceRunnable wsr = new IWorkspaceRunnable() { public void run(IProgressMonitor actMonitor) throws CoreException { IProject[] currentProjects = ResourcesPlugin.getWorkspace() .getRoot().getProjects(); for (IProject current : currentProjects) if (!previousProjects.contains(current)) { ConnectProviderOperation cpo = new ConnectProviderOperation( current, repoDir[0]); cpo.execute(actMonitor); } } }; try { ResourcesPlugin.getWorkspace().run(wsr, monitor); } catch (CoreException e) { throw new InvocationTargetException(e); } break; } case GitSelectWizardPage.GENERAL_WIZARD: { final String[] projectName = new String[1]; final boolean[] defaultLocation = new boolean[1]; final String[] path = new String[1]; final File[] repoDir = new File[1]; // get the data from the page in the UI thread PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { projectName[0] = createGeneralProjectPage.getProjectName(); defaultLocation[0] = createGeneralProjectPage .isDefaultLocation(); path[0] = importWithDirectoriesPage.getPath(); repoDir[0] = getTargetRepository().getDirectory(); } }); try { IWorkspaceRunnable wsr = new IWorkspaceRunnable() { public void run(IProgressMonitor actMonitor) throws CoreException { final IProjectDescription desc = ResourcesPlugin .getWorkspace().newProjectDescription( projectName[0]); desc.setLocation(new Path(path[0])); IProject prj = ResourcesPlugin.getWorkspace().getRoot() .getProject(desc.getName()); prj.create(desc, actMonitor); prj.open(actMonitor); ConnectProviderOperation cpo = new ConnectProviderOperation( prj, repoDir[0]); cpo.execute(new NullProgressMonitor()); ResourcesPlugin.getWorkspace().getRoot().refreshLocal( IResource.DEPTH_ONE, actMonitor); } }; ResourcesPlugin.getWorkspace().run(wsr, monitor); } catch (CoreException e) { throw new InvocationTargetException(e); } break; } } } static IDialogSettings getImportWizardDialogSettings() { IDialogSettings settings = Activator.getDefault().getDialogSettings(); IDialogSettings wizardSettings = settings .getSection(GitImportWizard.GIT_IMPORT_SECTION); if (wizardSettings == null) { wizardSettings = settings .addNewSection(GitImportWizard.GIT_IMPORT_SECTION); } return wizardSettings; } }
org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/clone/GitImportWizard.java
/******************************************************************************* * Copyright (c) 2010-2013 SAP AG and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * Mathias Kinzler (SAP AG) - initial implementation * Stefan Lay (SAP AG) - improvements *******************************************************************************/ package org.eclipse.egit.ui.internal.clone; import java.io.File; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.eclipse.core.resources.IProject; import org.eclipse.core.resources.IProjectDescription; import org.eclipse.core.resources.IResource; import org.eclipse.core.resources.IWorkspaceRunnable; import org.eclipse.core.resources.ResourcesPlugin; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.NullProgressMonitor; import org.eclipse.core.runtime.Path; import org.eclipse.egit.core.op.ConnectProviderOperation; import org.eclipse.egit.ui.Activator; import org.eclipse.egit.ui.internal.UIIcons; import org.eclipse.egit.ui.internal.UIText; import org.eclipse.egit.ui.internal.clone.GitCloneSourceProviderExtension.CloneSourceProvider; import org.eclipse.egit.ui.internal.provisional.wizards.GitRepositoryInfo; import org.eclipse.egit.ui.internal.provisional.wizards.IRepositorySearchResult; import org.eclipse.egit.ui.internal.provisional.wizards.NoRepositoryInfoException; import org.eclipse.jface.dialogs.IDialogSettings; import org.eclipse.jface.operation.IRunnableWithProgress; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jgit.lib.Constants; import org.eclipse.jgit.lib.Repository; import org.eclipse.ui.IImportWizard; import org.eclipse.ui.IWorkbench; import org.eclipse.ui.IWorkingSet; import org.eclipse.ui.PlatformUI; import org.eclipse.ui.actions.NewProjectAction; /** * A wizard which allows to optionally clone a repository and to import projects from a repository. */ public class GitImportWizard extends AbstractGitCloneWizard implements IImportWizard { private static final String GIT_IMPORT_SECTION = "GitImportWizard"; //$NON-NLS-1$ private GitSelectRepositoryPage selectRepoPage = new GitSelectRepositoryPage(); private GitSelectWizardPage importWithDirectoriesPage = new GitSelectWizardPage(){ public void setVisible(boolean visible) { if (existingRepo == null && visible && (cloneDestination.cloneSettingsChanged())) { setCallerRunsCloneOperation(true); try { final GitRepositoryInfo repositoryInfo = currentSearchResult.getGitRepositoryInfo(); performClone(repositoryInfo); importWithDirectoriesPage.getControl().getDisplay().asyncExec(new Runnable() { public void run() { runCloneOperation(getContainer(), repositoryInfo); cloneDestination.saveSettingsForClonedRepo(); }}); } catch (URISyntaxException e) { Activator.error(UIText.GitImportWizard_errorParsingURI, e); } catch (NoRepositoryInfoException e) { Activator.error(UIText.GitImportWizard_noRepositoryInfo, e); } catch (Exception e) { Activator.error(e.getMessage(), e); } } super.setVisible(visible); } }; private GitProjectsImportPage projectsImportPage = new GitProjectsImportPage() ; private GitCreateGeneralProjectPage createGeneralProjectPage = new GitCreateGeneralProjectPage(); private Repository existingRepo; /** * The default constructor */ public GitImportWizard() { this(null); } /** * Construct the import wizard based on given repository search result. The * wizard skips the repository location page in this case. * * @param searchResult * the search result to initialize the import wizard with. */ public GitImportWizard(IRepositorySearchResult searchResult) { super(searchResult); setWindowTitle(UIText.GitImportWizard_WizardTitle); setDefaultPageImageDescriptor(UIIcons.WIZBAN_IMPORT_REPO); setDialogSettings(getImportWizardDialogSettings()); } @Override protected void addPreClonePages() { if (!hasSearchResult()) addPage(selectRepoPage); } @Override protected void addPostClonePages() { addPage(importWithDirectoriesPage); addPage(projectsImportPage); addPage(createGeneralProjectPage); } @Override protected List<CloneSourceProvider> getCloneSourceProviders() { List<CloneSourceProvider> cloneSourceProvider = super.getCloneSourceProviders(); cloneSourceProvider.add(0, CloneSourceProvider.LOCAL); return cloneSourceProvider; } public void init(IWorkbench workbench, IStructuredSelection selection) { // nothing to do } @Override public IWizardPage getNextPage(IWizardPage page) { if (page == selectRepoPage) { existingRepo = selectRepoPage.getRepository(); importWithDirectoriesPage.setRepository(selectRepoPage .getRepository()); return importWithDirectoriesPage; } else if (page == cloneDestination) { existingRepo = null; importWithDirectoriesPage.setRepository(getTargetRepository()); return importWithDirectoriesPage; } else if (page == importWithDirectoriesPage) switch (importWithDirectoriesPage.getWizardSelection()) { case GitSelectWizardPage.EXISTING_PROJECTS_WIZARD: projectsImportPage.setProjectsList(importWithDirectoriesPage .getPath()); return projectsImportPage; case GitSelectWizardPage.NEW_WIZARD: return null; case GitSelectWizardPage.GENERAL_WIZARD: createGeneralProjectPage.setPath(importWithDirectoriesPage .getPath()); return createGeneralProjectPage; } else if (page == createGeneralProjectPage || page == projectsImportPage) return null; return super.getNextPage(page); } private Repository getTargetRepository() { if (existingRepo != null) return existingRepo; else try { return org.eclipse.egit.core.Activator .getDefault() .getRepositoryCache() .lookupRepository( new File(cloneDestination.getDestinationFile(), Constants.DOT_GIT)); } catch (IOException e) { Activator .error("Error looking up repository at " + cloneDestination.getDestinationFile(), e); //$NON-NLS-1$ return null; } } @Override public boolean performFinish() { try { getContainer().run(true, true, new IRunnableWithProgress() { public void run(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { importProjects(monitor); } }); } catch (InvocationTargetException e) { Activator .handleError(e.getCause().getMessage(), e.getCause(), true); return false; } catch (InterruptedException e) { Activator.handleError( UIText.GitCreateProjectViaWizardWizard_AbortedMessage, e, true); return false; } return true; } @Override public boolean canFinish() { switch (importWithDirectoriesPage.getWizardSelection()) { case GitSelectWizardPage.EXISTING_PROJECTS_WIZARD: return projectsImportPage.isPageComplete(); case GitSelectWizardPage.NEW_WIZARD: return true; case GitSelectWizardPage.GENERAL_WIZARD: return createGeneralProjectPage.isPageComplete(); } return super.canFinish(); } private void importProjects(IProgressMonitor monitor) throws InvocationTargetException, InterruptedException { switch (importWithDirectoriesPage.getWizardSelection()) { case GitSelectWizardPage.EXISTING_PROJECTS_WIZARD: { final Set<ProjectRecord> projectsToCreate = new HashSet<ProjectRecord>(); final List<IWorkingSet> workingSets = new ArrayList<IWorkingSet>(); final Repository[] repository = new Repository[1]; // get the data from the pages in the UI thread PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { projectsToCreate.addAll(projectsImportPage .getCheckedProjects()); IWorkingSet[] workingSetArray = projectsImportPage .getSelectedWorkingSets(); workingSets.addAll(Arrays.asList(workingSetArray)); repository[0] = getTargetRepository(); projectsImportPage.saveWidgetValues(); } }); ProjectUtils.createProjects(projectsToCreate, repository[0], workingSets.toArray(new IWorkingSet[workingSets.size()]), monitor); break; } case GitSelectWizardPage.NEW_WIZARD: { final File[] repoDir = new File[1]; PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { repoDir[0] = getTargetRepository().getDirectory(); } }); final List<IProject> previousProjects = Arrays .asList(ResourcesPlugin.getWorkspace().getRoot() .getProjects()); PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { new NewProjectAction(PlatformUI.getWorkbench() .getActiveWorkbenchWindow()).run(); } }); IWorkspaceRunnable wsr = new IWorkspaceRunnable() { public void run(IProgressMonitor actMonitor) throws CoreException { IProject[] currentProjects = ResourcesPlugin.getWorkspace() .getRoot().getProjects(); for (IProject current : currentProjects) if (!previousProjects.contains(current)) { ConnectProviderOperation cpo = new ConnectProviderOperation( current, repoDir[0]); cpo.execute(actMonitor); } } }; try { ResourcesPlugin.getWorkspace().run(wsr, monitor); } catch (CoreException e) { throw new InvocationTargetException(e); } break; } case GitSelectWizardPage.GENERAL_WIZARD: { final String[] projectName = new String[1]; final boolean[] defaultLocation = new boolean[1]; final String[] path = new String[1]; final File[] repoDir = new File[1]; // get the data from the page in the UI thread PlatformUI.getWorkbench().getDisplay().syncExec(new Runnable() { public void run() { projectName[0] = createGeneralProjectPage.getProjectName(); defaultLocation[0] = createGeneralProjectPage .isDefaultLocation(); path[0] = importWithDirectoriesPage.getPath(); repoDir[0] = getTargetRepository().getDirectory(); } }); try { IWorkspaceRunnable wsr = new IWorkspaceRunnable() { public void run(IProgressMonitor actMonitor) throws CoreException { final IProjectDescription desc = ResourcesPlugin .getWorkspace().newProjectDescription( projectName[0]); desc.setLocation(new Path(path[0])); IProject prj = ResourcesPlugin.getWorkspace().getRoot() .getProject(desc.getName()); prj.create(desc, actMonitor); prj.open(actMonitor); ConnectProviderOperation cpo = new ConnectProviderOperation( prj, repoDir[0]); cpo.execute(new NullProgressMonitor()); ResourcesPlugin.getWorkspace().getRoot().refreshLocal( IResource.DEPTH_ONE, actMonitor); } }; ResourcesPlugin.getWorkspace().run(wsr, monitor); } catch (CoreException e) { throw new InvocationTargetException(e); } break; } } } static IDialogSettings getImportWizardDialogSettings() { IDialogSettings settings = Activator.getDefault().getDialogSettings(); IDialogSettings wizardSettings = settings .getSection(GitImportWizard.GIT_IMPORT_SECTION); if (wizardSettings == null) { wizardSettings = settings .addNewSection(GitImportWizard.GIT_IMPORT_SECTION); } return wizardSettings; } }
Import wizard: Move page init code from getNextPage to setVisible getNextPage can be called multiple times (and even before the page is really shown), so getNextPage should not be doing any work for the page. setVisible seems to be the right place for this, see other wizards. With this, the logic is only called once and it seems to fix the problem described in the comments of I23939def6d1a3b88a0812b53fee98a403a664c23. Change-Id: I5411af8279455921fe9544039d2a94dc8e6f99bf Signed-off-by: Robin Stocker <[email protected]>
org.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/clone/GitImportWizard.java
Import wizard: Move page init code from getNextPage to setVisible
<ide><path>rg.eclipse.egit.ui/src/org/eclipse/egit/ui/internal/clone/GitImportWizard.java <ide> } <ide> }; <ide> <del> private GitProjectsImportPage projectsImportPage = new GitProjectsImportPage() ; <del> <del> private GitCreateGeneralProjectPage createGeneralProjectPage = new GitCreateGeneralProjectPage(); <add> private GitProjectsImportPage projectsImportPage = new GitProjectsImportPage() { <add> public void setVisible(boolean visible) { <add> if (visible) <add> setProjectsList(importWithDirectoriesPage.getPath()); <add> super.setVisible(visible); <add> } <add> }; <add> <add> private GitCreateGeneralProjectPage createGeneralProjectPage = new GitCreateGeneralProjectPage() { <add> public void setVisible(boolean visible) { <add> if (visible) <add> setPath(importWithDirectoriesPage.getPath()); <add> super.setVisible(visible); <add> } <add> }; <ide> <ide> private Repository existingRepo; <ide> <ide> } else if (page == importWithDirectoriesPage) <ide> switch (importWithDirectoriesPage.getWizardSelection()) { <ide> case GitSelectWizardPage.EXISTING_PROJECTS_WIZARD: <del> projectsImportPage.setProjectsList(importWithDirectoriesPage <del> .getPath()); <ide> return projectsImportPage; <ide> case GitSelectWizardPage.NEW_WIZARD: <ide> return null; <ide> case GitSelectWizardPage.GENERAL_WIZARD: <del> createGeneralProjectPage.setPath(importWithDirectoriesPage <del> .getPath()); <ide> return createGeneralProjectPage; <del> <ide> } <ide> else if (page == createGeneralProjectPage <ide> || page == projectsImportPage)
JavaScript
mit
9cbacd89650a3faff58a6f0aa7c93b97ba39940c
0
TallerWebSolutions/choko,TallerWebSolutions/choko
'use strict'; /** * @file Form extension controllers. */ angular.module('choko') .controller('FormController', ['$scope', function ($scope) { $scope.form.id = $scope.form.id || 'form-' + $scope.form.name; }]) .controller('ElementController', ['$scope', function ($scope) { var elementName = !$scope.element.isSubform ? $scope.element.name : $scope.subform.name + '-' + $scope.element.name; $scope.element.template = $scope.element.template || '/templates/' + $scope.element.type + '.html'; $scope.element.id = $scope.element.id || 'element-' + $scope.form.name + '-' + elementName; }]) .controller('FileElementController', ['$scope', '$controller', '$upload', function ($scope, $controller, $upload) { // Inherit ElementController. $controller('ElementController', { $scope: $scope }); $scope.progress = 0; // Initialize files container. // @todo support multiple files. if (!$scope.subform) { var file = $scope.data[$scope.element.name] || null; $scope.data[$scope.element.name] = file instanceof Object ? $scope.data[$scope.element.name].id : null; } else { var file = $scope.data[$scope.subform.name][$scope.element.name] || null; $scope.data[$scope.subform.name][$scope.element.name] = file instanceof Object ? file.id : null; }; $scope.onFileSelect = function($files) { for (var i = 0; i < $files.length; i++) { var file = $files[i]; $scope.upload = $upload.upload({ url: '/file', file: file }) .progress(function(evt) { $scope.progress = parseInt(100.0 * evt.loaded / evt.total); }) .success(function(data, status, headers, config) { if (!$scope.subform) { $scope.data[$scope.element.name] = data.data.id; } else{ $scope.data[$scope.subform.name][$scope.element.name] = data.data.id; }; }); } }; }]) .controller('ButtonController', ['$scope', '$controller', function ($scope, $controller) { // Inherit ElementController. $controller('ElementController', { $scope: $scope }); $scope.call = function(func, args) { $scope[func].apply(this, args); }; }]) .controller('WYSIWYGController', ['$scope', function ($scope) { $scope.options = { height: $scope.element.height || 300, toolbar: [ ['style', ['style']], ['style', ['bold', 'italic', 'underline', 'clear']], ['para', ['ul', 'ol', 'paragraph']], ['insert', ['picture', 'video', 'link']], ['table', ['table']] ] }; }]) .controller('ReferenceElementController', ['$scope', '$controller', 'Choko', 'Params', function ($scope, $controller, Choko, Params) { // Inherit ElementController. $controller('ElementController', { $scope: $scope }); // Parse query params. Object.keys($scope.element.reference.query || {}).forEach(function(param) { $scope.element.reference.query[param] = Params.parse($scope.element.reference.query[param], $scope); }); // Parse reference params. Object.keys($scope.element.reference.params || {}).forEach(function(param) { $scope.element.reference.params[param] = Params.parse($scope.element.reference.params[param], $scope); }); var query = { type: $scope.element.reference.type }; // Add element defined query. if ($scope.element.reference.query) { angular.extend(query, $scope.element.reference.query); } // Get reference items to make a options list. $scope.element.options = Choko.get(query); $scope.element.options.$promise.then(function(response) { $scope.element.options = response; // Use radios if less then 5 options. $scope.fewOptions = ($scope.element.options && Object.keys($scope.element.options).length <= 5); }); // Initialize data container if needed. $scope.data[$scope.element.name] = $scope.data[$scope.element.name] || []; // Toggle selection for a given option by name. $scope.toggleSelection = function(option) { var index = $scope.data[$scope.element.name].indexOf(option); // Is currently selected. if (index > -1) { $scope.data[$scope.element.name].splice(index, 1); } // Is newly selected. else { $scope.data[$scope.element.name].push(option); } }; }]) .controller('InlineReferenceElementController', ['$scope', '$controller', 'Choko', function ($scope, $controller, Choko) { // Inherit ElementController. $controller('ElementController', { $scope: $scope }); var multiple = $scope.element.reference.multiple; // Subform errors are handled separately. $scope.errors = null; if (multiple) { // Initialize items container. if ($scope.data[$scope.element.name]) { $scope.items = $scope.data[$scope.element.name]; } else { $scope.items = $scope.data[$scope.element.name] = []; } // Initilize local data container. $scope.data = {}; $scope.saveItem = function(key) { // @todo: validate item. // Add item and cleanup data container and items. if (key != undefined) { $scope.items[key] = $scope.data; } else { $scope.items.push($scope.data); } $scope.data = {}; // Reset form to original state. delete $scope.element.subform; }; $scope.removeItem = function(index) { $scope.items.splice(index, 1); }; } else { if (!$scope.data[$scope.element.name]) { $scope.data[$scope.element.name] = {}; } } $scope.setSubForm = function(type, sub, data, key) { // Start by destroying the subform and its data. // @todo: eventually we may want to add a confirmation, if form is "dirty". delete $scope.element.subform; $scope.subform = {}; // Get the new subform from the REST server. Choko.get({type: 'form', key: 'type-' + type}, function(response) { var subform = $scope.element.subform = response; $scope.subform.name = $scope.element.name; // We are editing a item, store data. if (data) { $scope.editing = true; // Make a copy of original data for restoring on cancel. $scope.data = angular.copy(data); } else { $scope.editing = false; } if (multiple) { subform.elements.push({ name: 'add', title: 'Save', type: 'button', click: 'saveItem', arguments: [key], classes: ['btn-default'], weight: 15 }); subform.elements.push({ name: 'cancel', title: 'Cancel', type: 'button', click: 'cancel', classes: ['btn-link'], weight: 20 }); } if (sub) { // Set subform element type to subform short name. $scope.data.type = subform.shortName; } }); }; if (multiple) { if ($scope.element.reference.subtypes && $scope.element.reference.subtypes.length == 1) { $scope.setSubForm($scope.element.reference.subtypes[0]); } $scope.cancel = function() { delete $scope.element.subform; $scope.data = {}; }; } else { $scope.setSubForm($scope.element.reference.type); } }]) .controller('InlineReferenceElementItemController', ['$scope', function ($scope) { $scope.editItem = function() { $scope.setSubForm($scope.typeName(), !!$scope.element.reference.subtypes, $scope.item, $scope.key); }; $scope.typeName = function() { var typeName = $scope.element.reference.type; // If it has subtypes, i.e. it's a polymorphic type, get the actual type // being added to load the correct form. if ($scope.element.reference.subtypes) { $scope.element.reference.subtypes.forEach(function(subtype) { if (subtype.shortName == $scope.item.type) { typeName = subtype.name; } }); } return typeName; }; }]) .controller('TagElementController', ['$scope', '$controller', 'Choko', function($scope, $controller, Choko){ // Inherit ElementController. $controller('ReferenceElementController', { $scope: $scope }); $scope.tags = [] $scope.filter = {}; $scope.element.options.$promise.then(function(options) { delete options.$promise; delete options.$resolved; if(options) { Object.keys(options).forEach(function (name) { if (options[name].isTag) delete options[name].isTag; $scope.tags.push(options[name]); }); } var selectedTags = $scope.data[$scope.element.name] || []; $scope.data[$scope.element.name] = []; selectedTags.forEach(function(selectedTag) { $scope.data[$scope.element.name].push(options[selectedTag]); }); }); $scope.tagTransform = function (newTag) { var item = {}; item[$scope.element.reference.titleField] = newTag; angular.extend(item, $scope.element.reference.params); return item; }; }])
applications/default/extensions/form/public/js/form.controllers.js
'use strict'; /** * @file Form extension controllers. */ angular.module('choko') .controller('FormController', ['$scope', function ($scope) { $scope.form.id = $scope.form.id || 'form-' + $scope.form.name; }]) .controller('ElementController', ['$scope', function ($scope) { var elementName = !$scope.element.isSubform ? $scope.element.name : $scope.subform.name + '-' + $scope.element.name; $scope.element.template = $scope.element.template || '/templates/' + $scope.element.type + '.html'; $scope.element.id = $scope.element.id || 'element-' + $scope.form.name + '-' + elementName; }]) .controller('FileElementController', ['$scope', '$controller', '$upload', function ($scope, $controller, $upload) { // Inherit ElementController. $controller('ElementController', { $scope: $scope }); $scope.progress = 0; // Initialize files container. // @todo support multiple files. if (!$scope.subform) { var file = $scope.data[$scope.element.name] || null; $scope.data[$scope.element.name] = file instanceof Object ? $scope.data[$scope.element.name].id : null; } else { var file = $scope.data[$scope.subform.name][$scope.element.name] || null; $scope.data[$scope.subform.name][$scope.element.name] = file instanceof Object ? file.id : null; }; $scope.onFileSelect = function($files) { for (var i = 0; i < $files.length; i++) { var file = $files[i]; $scope.upload = $upload.upload({ url: '/file', file: file }) .progress(function(evt) { $scope.progress = parseInt(100.0 * evt.loaded / evt.total); }) .success(function(data, status, headers, config) { if (!$scope.subform) { $scope.data[$scope.element.name] = data.data.id; } else{ $scope.data[$scope.subform.name][$scope.element.name] = data.data.id; }; }); } }; }]) .controller('ButtonController', ['$scope', '$controller', function ($scope, $controller) { // Inherit ElementController. $controller('ElementController', { $scope: $scope }); $scope.call = function(func, args) { $scope[func].apply(this, args); }; }]) .controller('WYSIWYGController', ['$scope', function ($scope) { $scope.options = { height: $scope.element.height || 300, toolbar: [ ['style', ['style']], ['style', ['bold', 'italic', 'underline', 'clear']], ['para', ['ul', 'ol', 'paragraph']], ['insert', ['picture', 'video', 'link']], ['table', ['table']] ] }; }]) .controller('ReferenceElementController', ['$scope', '$controller', 'Choko', 'Params', function ($scope, $controller, Choko, Params) { // Inherit ElementController. $controller('ElementController', { $scope: $scope }); // Parse query params. Object.keys($scope.element.reference.query || {}).forEach(function(param) { $scope.element.reference.query[param] = Params.parse($scope.element.reference.query[param], $scope); }); // Parse reference params. Object.keys($scope.element.reference.params || {}).forEach(function(param) { $scope.element.reference.params[param] = Params.parse($scope.element.reference.params[param], $scope); }); var query = { type: $scope.element.reference.type }; // Add element defined query. if ($scope.element.reference.query) { angular.extend(query, $scope.element.reference.query); } // Get reference items to make a options list. $scope.element.options = Choko.get(query); $scope.element.options.$promise.then(function(response) { $scope.element.options = response; // Use radios if less then 5 options. $scope.fewOptions = ($scope.element.options && Object.keys($scope.element.options).length <= 5); }); // Initialize data container if needed. $scope.data[$scope.element.name] = $scope.data[$scope.element.name] || []; // Toggle selection for a given option by name. $scope.toggleSelection = function(option) { var index = $scope.data[$scope.element.name].indexOf(option); // Is currently selected. if (index > -1) { $scope.data[$scope.element.name].splice(index, 1); } // Is newly selected. else { $scope.data[$scope.element.name].push(option); } }; }]) .controller('InlineReferenceElementController', ['$scope', '$controller', 'Choko', function ($scope, $controller, Choko) { // Inherit ElementController. $controller('ElementController', { $scope: $scope }); var multiple = $scope.element.reference.multiple; // Subform errors are handled separately. $scope.errors = null; if (multiple) { // Initialize items container. if ($scope.data[$scope.element.name]) { $scope.items = $scope.data[$scope.element.name]; } else { $scope.items = $scope.data[$scope.element.name] = []; } // Initilize local data container. $scope.data = {}; $scope.saveItem = function(key) { // @todo: validate item. // Add item and cleanup data container and items. if (key != undefined) { $scope.items[key] = $scope.data; } else { $scope.items.push($scope.data); } $scope.data = {}; // Reset form to original state. delete $scope.element.subform; }; $scope.removeItem = function(index) { $scope.items.splice(index, 1); }; } else { if (!$scope.data[$scope.element.name]) { $scope.data[$scope.element.name] = {}; } } $scope.setSubForm = function(type, sub, data, key) { // Start by destroying the subform and its data. // @todo: eventually we may want to add a confirmation, if form is "dirty". delete $scope.element.subform; $scope.subform = {}; // Get the new subform from the REST server. Choko.get({type: 'form', key: 'type-' + type}, function(response) { var subform = $scope.element.subform = response; $scope.subform.name = $scope.element.name; // We are editing a item, store data. if (data) { $scope.editing = true; // Make a copy of original data for restoring on cancel. $scope.data = angular.copy(data); } else { $scope.editing = false; } if (multiple) { subform.elements.push({ name: 'add', title: 'Save', type: 'button', click: 'saveItem', arguments: [key], classes: ['btn-default'], weight: 15 }); subform.elements.push({ name: 'cancel', title: 'Cancel', type: 'button', click: 'cancel', classes: ['btn-link'], weight: 20 }); } if (sub) { // Set subform element type to subform short name. $scope.data.type = subform.shortName; } }); }; if (multiple) { if ($scope.element.reference.subtypes && $scope.element.reference.subtypes.length == 1) { $scope.setSubForm($scope.element.reference.subtypes[0]); } $scope.cancel = function() { delete $scope.element.subform; $scope.data = {}; }; } else { $scope.setSubForm($scope.element.reference.type); } }]) .controller('InlineReferenceElementItemController', ['$scope', function ($scope) { $scope.editItem = function() { $scope.setSubForm($scope.typeName(), !!$scope.element.reference.subtypes, $scope.item, $scope.key); }; $scope.typeName = function() { var typeName = $scope.element.reference.type; // If it has subtypes, i.e. it's a polymorphic type, get the actual type // being added to load the correct form. if ($scope.element.reference.subtypes) { $scope.element.reference.subtypes.forEach(function(subtype) { if (subtype.shortName == $scope.item.type) { typeName = subtype.name; } }); } return typeName; }; }]) .controller('TagElementController', ['$scope', '$controller', 'Choko', function($scope, $controller, Choko){ // Inherit ElementController. $controller('ReferenceElementController', { $scope: $scope }); $scope.tags = [] $scope.filter = {}; $scope.element.options.$promise.then(function(options) { delete options.$promise; delete options.$resolved; if(options) { Object.keys(options).forEach(function (name) { $scope.tags.push(options[name]); }); } var selectedTags = $scope.data[$scope.element.name] || []; $scope.data[$scope.element.name] = []; selectedTags.forEach(function(selectedTag) { $scope.data[$scope.element.name].push(options[selectedTag]); }); }); $scope.tagTransform = function (newTag) { var item = {}; item[$scope.element.reference.titleField] = newTag; angular.extend(item, $scope.element.reference.params); return item; }; }])
Removed isTag attribute from tags elements.
applications/default/extensions/form/public/js/form.controllers.js
Removed isTag attribute from tags elements.
<ide><path>pplications/default/extensions/form/public/js/form.controllers.js <ide> <ide> if(options) { <ide> Object.keys(options).forEach(function (name) { <add> if (options[name].isTag) delete options[name].isTag; <ide> $scope.tags.push(options[name]); <ide> }); <ide> }
Java
apache-2.0
54f86e1da3546ea25f152b0335049f5b96fef4c2
0
dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android,dimagi/commcare-android
package org.commcare.dalvik.activities; import java.math.BigInteger; import java.security.MessageDigest; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import org.commcare.android.database.SqlStorage; import org.commcare.android.database.app.models.UserKeyRecord; import org.commcare.android.database.global.models.ApplicationRecord; import org.commcare.android.framework.CommCareActivity; import org.commcare.android.framework.ManagedUi; import org.commcare.android.framework.UiElement; import org.commcare.android.javarosa.AndroidLogger; import org.commcare.android.models.notifications.MessageTag; import org.commcare.android.models.notifications.NotificationMessage; import org.commcare.android.models.notifications.NotificationMessageFactory; import org.commcare.android.models.notifications.NotificationMessageFactory.StockMessages; import org.commcare.android.tasks.DataPullTask; import org.commcare.android.tasks.ManageKeyRecordListener; import org.commcare.android.tasks.ManageKeyRecordTask; import org.commcare.android.tasks.templates.HttpCalloutTask.HttpCalloutOutcomes; import org.commcare.android.util.DemoUserUtil; import org.commcare.android.util.SessionUnavailableException; import org.commcare.android.view.ViewUtil; import org.commcare.dalvik.BuildConfig; import org.commcare.dalvik.R; import org.commcare.dalvik.activities.CommCareHomeActivity; import org.commcare.dalvik.application.CommCareApp; import org.commcare.dalvik.application.CommCareApplication; import org.commcare.dalvik.dialogs.CustomProgressDialog; import org.commcare.dalvik.preferences.CommCarePreferences; import org.javarosa.core.services.Logger; import org.javarosa.core.services.locale.Localization; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.os.Bundle; import android.preference.PreferenceManager; import android.text.Editable; import android.text.InputType; import android.text.TextWatcher; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewTreeObserver.OnGlobalLayoutListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.Spinner; import android.widget.TextView; import android.widget.Toast; /** * @author ctsims */ @ManagedUi(R.layout.screen_login) public class LoginActivity extends CommCareActivity<LoginActivity> implements OnItemSelectedListener { private static final String TAG = LoginActivity.class.getSimpleName(); public final static int MENU_DEMO = Menu.FIRST; public final static String NOTIFICATION_MESSAGE_LOGIN = "login_message"; public final static String ALREADY_LOGGED_IN = "la_loggedin"; private final static String KEY_LAST_APP = "id_of_last_selected"; @UiElement(value=R.id.login_button, locale="login.button") Button login; @UiElement(value = R.id.screen_login_bad_password, locale = "login.bad.password") TextView errorBox; @UiElement(R.id.edit_username) EditText username; @UiElement(R.id.edit_password) EditText password; @UiElement(R.id.screen_login_banner_pane) View banner; @UiElement(R.id.str_version) TextView versionDisplay; @UiElement(R.id.login_button) Button loginButton; public static final int TASK_KEY_EXCHANGE = 1; SqlStorage<UserKeyRecord> storage; private Map<String,ApplicationRecord> namesToRecords = new HashMap<>(); private int editTextColor; private View.OnKeyListener l; private final TextWatcher textWatcher = new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { setStyleDefault(); } }; public void setStyleDefault() { LoginBoxesStatus.Normal.setStatus(this); username.setCompoundDrawablesWithIntrinsicBounds(getResources().getDrawable(R.drawable.icon_user_neutral50), null, null, null); password.setCompoundDrawablesWithIntrinsicBounds(getResources().getDrawable(R.drawable.icon_lock_neutral50), null, null, null); loginButton.setBackgroundColor(getResources().getColor(R.color.cc_brand_color)); loginButton.setTextColor(getResources().getColor(R.color.cc_neutral_bg)); } public enum LoginBoxesStatus { Normal(R.color.login_edit_text_color), Error(R.color.login_edit_text_color_error); private final int colorAttr; LoginBoxesStatus(int colorAttr){ this.colorAttr = colorAttr; } public int getColor(Context ctx){ int color = ctx.getResources().getColor(colorAttr); if (BuildConfig.DEBUG) { Log.d("LoginBoxesStatus", "Color for status " + this.toString() + " is: " + color); } return color; } public void setStatus(LoginActivity lact){ lact.setLoginBoxesColor(this.getColor(lact)); } } /* * (non-Javadoc) * @see org.commcare.android.framework.CommCareActivity#onCreate(android.os.Bundle) */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); username.setInputType(InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS | InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD); LoginBoxesStatus.Normal.setStatus(this); final SharedPreferences prefs = CommCareApplication._().getCurrentApp().getAppPreferences(); //Only on the initial creation if(savedInstanceState == null) { String lastUser = prefs.getString(CommCarePreferences.LAST_LOGGED_IN_USER, null); if(lastUser != null) { username.setText(lastUser); password.requestFocus(); } } login.setOnClickListener(new OnClickListener() { public void onClick(View arg0) { errorBox.setVisibility(View.GONE); ViewUtil.hideVirtualKeyboard(LoginActivity.this); //Try logging in locally if(tryLocalLogin(false)) { return; } startOta(); } }); username.addTextChangedListener(textWatcher); password.addTextChangedListener(textWatcher); versionDisplay.setText(CommCareApplication._().getCurrentVersionString()); username.setHint(Localization.get("login.username")); password.setHint(Localization.get("login.password")); final View activityRootView = findViewById(R.id.screen_login_main); activityRootView.getViewTreeObserver().addOnGlobalLayoutListener(new OnGlobalLayoutListener() { /* * (non-Javadoc) * @see android.view.ViewTreeObserver.OnGlobalLayoutListener#onGlobalLayout() */ @Override public void onGlobalLayout() { int hideAll = LoginActivity.this.getResources().getInteger(R.integer.login_screen_hide_all_cuttoff); int hideBanner = LoginActivity.this.getResources().getInteger(R.integer.login_screen_hide_banner_cuttoff); int height = activityRootView.getHeight(); if(height < hideAll) { versionDisplay.setVisibility(View.GONE); banner.setVisibility(View.GONE); } else if(height < hideBanner) { banner.setVisibility(View.GONE); } else { // Override default CommCare banner if requested String customBannerURI = prefs.getString(CommCarePreferences.BRAND_BANNER_LOGIN, ""); if (!"".equals(customBannerURI)) { Bitmap bitmap = ViewUtil.inflateDisplayImage(LoginActivity.this, customBannerURI); if (bitmap != null) { ImageView bannerView = (ImageView) banner.findViewById(R.id.screen_login_top_banner); bannerView.setImageBitmap(bitmap); } } banner.setVisibility(View.VISIBLE); } } }); } public String getActivityTitle() { //TODO: "Login"? return null; } private void startOta() { // We should go digest auth this user on the server and see whether to // pull them down. SharedPreferences prefs = CommCareApplication._().getCurrentApp().getAppPreferences(); // TODO Auto-generated method stub // TODO: we don't actually always want to do this. We need to have an // alternate route where we log in locally and sync (with unsent form // submissions) more centrally. DataPullTask<LoginActivity> dataPuller = new DataPullTask<LoginActivity>(getUsername(), password.getText().toString(), prefs.getString("ota-restore-url", LoginActivity.this.getString(R.string.ota_restore_url)), prefs.getString("key_server", LoginActivity.this.getString(R.string.key_server)), LoginActivity.this) { @Override protected void deliverResult( LoginActivity receiver, Integer result) { if (result == null) { // The task crashed unexpectedly receiver.raiseLoginMessage(StockMessages.Restore_Unknown, true); return; } switch(result) { case DataPullTask.AUTH_FAILED: receiver.raiseLoginMessage(StockMessages.Auth_BadCredentials, false); break; case DataPullTask.BAD_DATA: receiver.raiseLoginMessage(StockMessages.Remote_BadRestore, true); break; case DataPullTask.DOWNLOAD_SUCCESS: if(!tryLocalLogin(true)) { receiver.raiseLoginMessage(StockMessages.Auth_CredentialMismatch, true); } else { break; } case DataPullTask.UNREACHABLE_HOST: receiver.raiseLoginMessage(StockMessages.Remote_NoNetwork, true); break; case DataPullTask.CONNECTION_TIMEOUT: receiver.raiseLoginMessage(StockMessages.Remote_Timeout, true); break; case DataPullTask.SERVER_ERROR: receiver.raiseLoginMessage(StockMessages.Remote_ServerError, true); break; case DataPullTask.UNKNOWN_FAILURE: receiver.raiseLoginMessage(StockMessages.Restore_Unknown, true); break; } } /* * (non-Javadoc) * @see org.commcare.android.tasks.templates.CommCareTask#deliverUpdate(java.lang.Object, java.lang.Object[]) */ @Override protected void deliverUpdate(LoginActivity receiver, Integer... update) { if(update[0] == DataPullTask.PROGRESS_STARTED) { receiver.updateProgress(Localization.get("sync.progress.purge"), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_CLEANED) { receiver.updateProgress(Localization.get("sync.progress.authing"), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_AUTHED) { receiver.updateProgress(Localization.get("sync.progress.downloading"), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_DOWNLOADING) { receiver.updateProgress(Localization.get("sync.process.downloading.progress", new String[] {String.valueOf(update[1])}), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_PROCESSING) { receiver.updateProgress(Localization.get("sync.process.processing", new String[] {String.valueOf(update[1]), String.valueOf(update[2])}), DataPullTask.DATA_PULL_TASK_ID); receiver.updateProgressBar(update[1], update[2], DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_RECOVERY_NEEDED) { receiver.updateProgress(Localization.get("sync.recover.needed"), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_RECOVERY_STARTED) { receiver.updateProgress(Localization.get("sync.recover.started"), DataPullTask.DATA_PULL_TASK_ID); } } /* * (non-Javadoc) * @see org.commcare.android.tasks.templates.CommCareTask#deliverError(java.lang.Object, java.lang.Exception) */ @Override protected void deliverError( LoginActivity receiver, Exception e) { receiver.raiseLoginMessage(StockMessages.Restore_Unknown, true); } }; dataPuller.connect(this); dataPuller.execute(); } /* * (non-Javadoc) * * @see android.app.Activity#onResume() */ @Override protected void onResume() { super.onResume(); try { //TODO: there is a weird circumstance where we're logging in somewhere else and this gets locked. if (CommCareApplication._().getSession().isActive() && CommCareApplication._().getSession().getLoggedInUser() != null) { Intent i = new Intent(); i.putExtra(ALREADY_LOGGED_IN, true); setResult(RESULT_OK, i); CommCareApplication._().clearNotifications(NOTIFICATION_MESSAGE_LOGIN); finish(); return; } } catch (SessionUnavailableException sue) { // Nothing, we're logging in here anyway } //If we arrived at LoginActivity from clicking the regular app icon, and there //are no longer any available apps, we want to redirect to CCHomeActivity if (CommCareApplication._().getReadyAppRecords().size() == 0) { Intent i = new Intent(this, CommCareHomeActivity.class); startActivity(i); } // Otherwise, update the login screen refreshView(); } private String getUsername() { return username.getText().toString().toLowerCase().trim(); } private boolean tryLocalLogin(final boolean warnMultipleAccounts) { //TODO: check username/password for emptiness return tryLocalLogin(getUsername(), password.getText().toString(), warnMultipleAccounts); } private boolean tryLocalLogin(final String username, String password, final boolean warnMultipleAccounts) { try{ // TODO: We don't actually even use this anymore other than for hte // local login count, which seems super silly. UserKeyRecord matchingRecord = null; int count = 0; for(UserKeyRecord record : storage()) { if(!record.getUsername().equals(username)) { continue; } count++; String hash = record.getPasswordHash(); if(hash.contains("$")) { String alg = "sha1"; String salt = hash.split("\\$")[1]; String check = hash.split("\\$")[2]; MessageDigest md = MessageDigest.getInstance("SHA-1"); BigInteger number = new BigInteger(1, md.digest((salt+password).getBytes())); String hashed = number.toString(16); while (hashed.length() < check.length()) { hashed = "0" + hashed; } if (hash.equals(alg + "$" + salt + "$" + hashed)) { matchingRecord = record; } } } final boolean triggerTooManyUsers = count > 1 && warnMultipleAccounts; ManageKeyRecordTask<LoginActivity> task = new ManageKeyRecordTask<LoginActivity>(this, TASK_KEY_EXCHANGE, username, password, CommCareApplication._().getCurrentApp(), new ManageKeyRecordListener<LoginActivity>() { @Override public void keysLoginComplete(LoginActivity r) { if(triggerTooManyUsers) { // We've successfully pulled down new user data. // Should see if the user already has a sandbox and let // them know that their old data doesn't transition r.raiseMessage(NotificationMessageFactory.message(StockMessages.Auth_RemoteCredentialsChanged), true); Logger.log(AndroidLogger.TYPE_USER, "User " + username + " has logged in for the first time with a new password. They may have unsent data in their other sandbox"); } r.done(); } @Override public void keysReadyForSync(LoginActivity r) { // TODO: we only wanna do this on the _first_ try. Not // subsequent ones (IE: On return from startOta) r.startOta(); } @Override public void keysDoneOther(LoginActivity r, HttpCalloutOutcomes outcome) { switch(outcome) { case AuthFailed: Logger.log(AndroidLogger.TYPE_USER, "auth failed"); r.raiseLoginMessage(StockMessages.Auth_BadCredentials, false); break; case BadResponse: Logger.log(AndroidLogger.TYPE_USER, "bad response"); r.raiseLoginMessage(StockMessages.Remote_BadRestore, true); break; case NetworkFailure: Logger.log(AndroidLogger.TYPE_USER, "bad network"); r.raiseLoginMessage(StockMessages.Remote_NoNetwork, false); break; case NetworkFailureBadPassword: Logger.log(AndroidLogger.TYPE_USER, "bad network"); r.raiseLoginMessage(StockMessages.Remote_NoNetwork_BadPass, true); break; case BadCertificate: Logger.log(AndroidLogger.TYPE_USER, "bad certificate"); r.raiseLoginMessage(StockMessages.BadSSLCertificate, false); break; case UnkownError: Logger.log(AndroidLogger.TYPE_USER, "unknown"); r.raiseLoginMessage(StockMessages.Restore_Unknown, true); break; default: return; } } }) { @Override protected void deliverUpdate(LoginActivity receiver, String... update) { receiver.updateProgress(update[0], TASK_KEY_EXCHANGE); } }; task.connect(this); task.execute(); return true; }catch (Exception e) { e.printStackTrace(); return false; } } private void done() { Intent i = new Intent(); setResult(RESULT_OK, i); CommCareApplication._().clearNotifications(NOTIFICATION_MESSAGE_LOGIN); finish(); } private SqlStorage<UserKeyRecord> storage() throws SessionUnavailableException{ if(storage == null) { storage = CommCareApplication._().getAppStorage(UserKeyRecord.class); } return storage; } public void finished(int status) { } /* (non-Javadoc) * @see android.app.Activity#onCreateOptionsMenu(android.view.Menu) */ @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); menu.add(0, MENU_DEMO, 0, Localization.get("login.menu.demo")).setIcon(android.R.drawable.ic_menu_preferences); return true; } /* (non-Javadoc) * @see android.app.Activity#onOptionsItemSelected(android.view.MenuItem) */ @Override public boolean onOptionsItemSelected(MenuItem item) { boolean otherResult = super.onOptionsItemSelected(item); switch(item.getItemId()) { case MENU_DEMO: //Make sure we have a demo user DemoUserUtil.checkOrCreateDemoUser(this, CommCareApplication._().getCurrentApp()); //Now try to log in as the demo user tryLocalLogin(DemoUserUtil.DEMO_USER, DemoUserUtil.DEMO_USER, false); return true; default: return otherResult; } } private void raiseLoginMessage(MessageTag messageTag, boolean showTop) { NotificationMessage message = NotificationMessageFactory.message(messageTag, NOTIFICATION_MESSAGE_LOGIN); raiseMessage(message, showTop); } private void raiseMessage(NotificationMessage message, boolean showTop) { String toastText = message.getTitle(); if (showTop) { CommCareApplication._().reportNotificationMessage(message); toastText = Localization.get("notification.for.details.wrapper", new String[] {toastText}); } //either way LoginBoxesStatus.Error.setStatus(this); username.setCompoundDrawablesWithIntrinsicBounds(getResources().getDrawable(R.drawable.icon_user_attnneg), null, null, null); password.setCompoundDrawablesWithIntrinsicBounds(getResources().getDrawable(R.drawable.icon_lock_attnneg), null, null, null); loginButton.setBackgroundColor(getResources().getColor(R.color.cc_attention_negative_bg)); loginButton.setTextColor(getResources().getColor(R.color.cc_attention_negative_text)); errorBox.setVisibility(View.VISIBLE); errorBox.setText(toastText); Toast.makeText(this, toastText, Toast.LENGTH_LONG).show(); } /** * Sets the login boxes (user/pass) to the given color. * @param color Color code */ private void setLoginBoxesColor(int color) { username.setTextColor(color); password.setTextColor(color); } /* * (non-Javadoc) * @see org.commcare.android.framework.CommCareActivity#generateProgressDialog(int) * * Implementation of generateProgressDialog() for DialogController -- other methods * handled entirely in CommCareActivity */ @Override public CustomProgressDialog generateProgressDialog(int taskId) { CustomProgressDialog dialog; switch (taskId) { case TASK_KEY_EXCHANGE: dialog = CustomProgressDialog.newInstance(Localization.get("key.manage.title"), Localization.get("key.manage.start"), taskId); break; case DataPullTask.DATA_PULL_TASK_ID: dialog = CustomProgressDialog.newInstance(Localization.get("sync.progress.title"), Localization.get("sync.progress.starting"), taskId); dialog.addCancelButton(); dialog.addProgressBar(); break; default: Log.w(TAG, "taskId passed to generateProgressDialog does not match " + "any valid possibilities in LoginActivity"); return null; } return dialog; } /* * (non-Javadoc) * @see org.commcare.android.framework.CommCareActivity#isBackEnabled() */ @Override public boolean isBackEnabled() { return false; } private void refreshView() { Spinner spinner = (Spinner) findViewById(R.id.app_selection_spinner); TextView message = (TextView) findViewById(R.id.welcome_msg); ArrayList<ApplicationRecord> readyApps = CommCareApplication._().getReadyAppRecords(); if (readyApps.size() <= 1) { spinner.setVisibility(View.GONE); message.setText(R.string.login_welcome_single); return; } message.setText(R.string.login_welcome_multiple); ArrayList<String> appNames = new ArrayList<>(); ArrayList<String> appIds = new ArrayList<>(); for (ApplicationRecord r : readyApps) { String name = r.getDisplayName(); appNames.add(name); appIds.add(r.getUniqueId()); namesToRecords.put(name, r); } ArrayAdapter<String> adapter = new ArrayAdapter<>(this, R.layout.spinner_text_view, appNames); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spinner.setAdapter(adapter); spinner.setOnItemSelectedListener(this); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); // The unique of id of the last app that was selected in the drop-down menu String lastApp = prefs.getString(KEY_LAST_APP,""); int position = 0; // If there is a last app, set the spinner selection to be that app if (!"".equals(lastApp)) { position = appIds.indexOf(lastApp); // If this last app has since been deleted, set the position to 0 if (position == -1) { position = 0; } } spinner.setSelection(position); spinner.setVisibility(View.VISIBLE); } @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); String selected = (String) parent.getItemAtPosition(position); ApplicationRecord r = namesToRecords.get(selected); // Set the id of the last selected app prefs.edit().putString(KEY_LAST_APP, r.getUniqueId()).commit(); CommCareApplication._().initializeAppResources(new CommCareApp(r)); // Refresh UI for potential new language loadFields(false); } @Override public void onNothingSelected(AdapterView<?> parent) { return; } }
app/src/org/commcare/dalvik/activities/LoginActivity.java
package org.commcare.dalvik.activities; import java.math.BigInteger; import java.security.MessageDigest; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import org.commcare.android.database.SqlStorage; import org.commcare.android.database.app.models.UserKeyRecord; import org.commcare.android.database.global.models.ApplicationRecord; import org.commcare.android.framework.CommCareActivity; import org.commcare.android.framework.ManagedUi; import org.commcare.android.framework.UiElement; import org.commcare.android.javarosa.AndroidLogger; import org.commcare.android.models.notifications.MessageTag; import org.commcare.android.models.notifications.NotificationMessage; import org.commcare.android.models.notifications.NotificationMessageFactory; import org.commcare.android.models.notifications.NotificationMessageFactory.StockMessages; import org.commcare.android.tasks.DataPullTask; import org.commcare.android.tasks.ManageKeyRecordListener; import org.commcare.android.tasks.ManageKeyRecordTask; import org.commcare.android.tasks.templates.HttpCalloutTask.HttpCalloutOutcomes; import org.commcare.android.util.DemoUserUtil; import org.commcare.android.util.SessionUnavailableException; import org.commcare.android.view.ViewUtil; import org.commcare.dalvik.BuildConfig; import org.commcare.dalvik.R; import org.commcare.dalvik.activities.CommCareHomeActivity; import org.commcare.dalvik.application.CommCareApp; import org.commcare.dalvik.application.CommCareApplication; import org.commcare.dalvik.dialogs.CustomProgressDialog; import org.commcare.dalvik.preferences.CommCarePreferences; import org.javarosa.core.services.Logger; import org.javarosa.core.services.locale.Localization; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.os.Bundle; import android.preference.PreferenceManager; import android.text.Editable; import android.text.InputType; import android.text.TextWatcher; import android.util.Log; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.view.ViewTreeObserver.OnGlobalLayoutListener; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.ImageView; import android.widget.Spinner; import android.widget.TextView; import android.widget.Toast; /** * @author ctsims */ @ManagedUi(R.layout.screen_login) public class LoginActivity extends CommCareActivity<LoginActivity> implements OnItemSelectedListener { private static final String TAG = LoginActivity.class.getSimpleName(); public final static int MENU_DEMO = Menu.FIRST; public final static String NOTIFICATION_MESSAGE_LOGIN = "login_message"; public final static String ALREADY_LOGGED_IN = "la_loggedin"; private final static String KEY_LAST_APP = "id_of_last_selected"; @UiElement(value=R.id.login_button, locale="login.button") Button login; @UiElement(value = R.id.screen_login_bad_password, locale = "login.bad.password") TextView errorBox; @UiElement(R.id.edit_username) EditText username; @UiElement(R.id.edit_password) EditText password; @UiElement(R.id.screen_login_banner_pane) View banner; @UiElement(R.id.str_version) TextView versionDisplay; @UiElement(R.id.login_button) Button loginButton; public static final int TASK_KEY_EXCHANGE = 1; SqlStorage<UserKeyRecord> storage; private Map<String,ApplicationRecord> namesToRecords = new HashMap<>(); private int editTextColor; private View.OnKeyListener l; private final TextWatcher textWatcher = new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { setStyleDefault(); } }; public void setStyleDefault() { LoginBoxesStatus.Normal.setStatus(this); username.setCompoundDrawablesWithIntrinsicBounds(getResources().getDrawable(R.drawable.icon_user_neutral50), null, null, null); password.setCompoundDrawablesWithIntrinsicBounds(getResources().getDrawable(R.drawable.icon_lock_neutral50), null, null, null); loginButton.setBackgroundColor(getResources().getColor(R.color.cc_brand_color)); loginButton.setTextColor(getResources().getColor(R.color.cc_neutral_bg)); } public enum LoginBoxesStatus { Normal(R.color.login_edit_text_color), Error(R.color.login_edit_text_color_error); private final int colorAttr; LoginBoxesStatus(int colorAttr){ this.colorAttr = colorAttr; } public int getColor(Context ctx){ int color = ctx.getResources().getColor(colorAttr); if (BuildConfig.DEBUG) { Log.d("LoginBoxesStatus", "Color for status " + this.toString() + " is: " + color); } return color; } public void setStatus(LoginActivity lact){ lact.setLoginBoxesColor(this.getColor(lact)); } } /* * (non-Javadoc) * @see org.commcare.android.framework.CommCareActivity#onCreate(android.os.Bundle) */ @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); username.setInputType(InputType.TYPE_TEXT_FLAG_NO_SUGGESTIONS | InputType.TYPE_TEXT_VARIATION_VISIBLE_PASSWORD); LoginBoxesStatus.Normal.setStatus(this); final SharedPreferences prefs = CommCareApplication._().getCurrentApp().getAppPreferences(); //Only on the initial creation if(savedInstanceState == null) { String lastUser = prefs.getString(CommCarePreferences.LAST_LOGGED_IN_USER, null); if(lastUser != null) { username.setText(lastUser); password.requestFocus(); } } login.setOnClickListener(new OnClickListener() { public void onClick(View arg0) { errorBox.setVisibility(View.GONE); ViewUtil.hideVirtualKeyboard(LoginActivity.this); //Try logging in locally if(tryLocalLogin(false)) { return; } startOta(); } }); username.addTextChangedListener(textWatcher); password.addTextChangedListener(textWatcher); versionDisplay.setText(CommCareApplication._().getCurrentVersionString()); username.setHint(Localization.get("login.username")); password.setHint(Localization.get("login.password")); final View activityRootView = findViewById(R.id.screen_login_main); activityRootView.getViewTreeObserver().addOnGlobalLayoutListener(new OnGlobalLayoutListener() { /* * (non-Javadoc) * @see android.view.ViewTreeObserver.OnGlobalLayoutListener#onGlobalLayout() */ @Override public void onGlobalLayout() { int hideAll = LoginActivity.this.getResources().getInteger(R.integer.login_screen_hide_all_cuttoff); int hideBanner = LoginActivity.this.getResources().getInteger(R.integer.login_screen_hide_banner_cuttoff); int height = activityRootView.getHeight(); if(height < hideAll) { versionDisplay.setVisibility(View.GONE); banner.setVisibility(View.GONE); } else if(height < hideBanner) { banner.setVisibility(View.GONE); } else { // Override default CommCare banner if requested String customBannerURI = prefs.getString(CommCarePreferences.BRAND_BANNER_LOGIN, ""); if (!"".equals(customBannerURI)) { Bitmap bitmap = ViewUtil.inflateDisplayImage(LoginActivity.this, customBannerURI); if (bitmap != null) { ImageView bannerView = (ImageView) banner.findViewById(R.id.screen_login_top_banner); bannerView.setImageBitmap(bitmap); } } banner.setVisibility(View.VISIBLE); } } }); } public String getActivityTitle() { //TODO: "Login"? return null; } private void startOta() { // We should go digest auth this user on the server and see whether to // pull them down. SharedPreferences prefs = CommCareApplication._().getCurrentApp().getAppPreferences(); // TODO Auto-generated method stub // TODO: we don't actually always want to do this. We need to have an // alternate route where we log in locally and sync (with unsent form // submissions) more centrally. DataPullTask<LoginActivity> dataPuller = new DataPullTask<LoginActivity>(getUsername(), password.getText().toString(), prefs.getString("ota-restore-url", LoginActivity.this.getString(R.string.ota_restore_url)), prefs.getString("key_server", LoginActivity.this.getString(R.string.key_server)), LoginActivity.this) { @Override protected void deliverResult( LoginActivity receiver, Integer result) { if (result == null) { // The task crashed unexpectedly receiver.raiseLoginMessage(StockMessages.Restore_Unknown, true); return; } switch(result) { case DataPullTask.AUTH_FAILED: receiver.raiseLoginMessage(StockMessages.Auth_BadCredentials, false); break; case DataPullTask.BAD_DATA: receiver.raiseLoginMessage(StockMessages.Remote_BadRestore, true); break; case DataPullTask.DOWNLOAD_SUCCESS: if(!tryLocalLogin(true)) { receiver.raiseLoginMessage(StockMessages.Auth_CredentialMismatch, true); } else { break; } case DataPullTask.UNREACHABLE_HOST: receiver.raiseLoginMessage(StockMessages.Remote_NoNetwork, true); break; case DataPullTask.CONNECTION_TIMEOUT: receiver.raiseLoginMessage(StockMessages.Remote_Timeout, true); break; case DataPullTask.SERVER_ERROR: receiver.raiseLoginMessage(StockMessages.Remote_ServerError, true); break; case DataPullTask.UNKNOWN_FAILURE: receiver.raiseLoginMessage(StockMessages.Restore_Unknown, true); break; } } /* * (non-Javadoc) * @see org.commcare.android.tasks.templates.CommCareTask#deliverUpdate(java.lang.Object, java.lang.Object[]) */ @Override protected void deliverUpdate(LoginActivity receiver, Integer... update) { if(update[0] == DataPullTask.PROGRESS_STARTED) { receiver.updateProgress(Localization.get("sync.progress.purge"), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_CLEANED) { receiver.updateProgress(Localization.get("sync.progress.authing"), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_AUTHED) { receiver.updateProgress(Localization.get("sync.progress.downloading"), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_DOWNLOADING) { receiver.updateProgress(Localization.get("sync.process.downloading.progress", new String[] {String.valueOf(update[1])}), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_PROCESSING) { receiver.updateProgress(Localization.get("sync.process.processing", new String[] {String.valueOf(update[1]), String.valueOf(update[2])}), DataPullTask.DATA_PULL_TASK_ID); receiver.updateProgressBar(update[1], update[2], DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_RECOVERY_NEEDED) { receiver.updateProgress(Localization.get("sync.recover.needed"), DataPullTask.DATA_PULL_TASK_ID); } else if(update[0] == DataPullTask.PROGRESS_RECOVERY_STARTED) { receiver.updateProgress(Localization.get("sync.recover.started"), DataPullTask.DATA_PULL_TASK_ID); } } /* * (non-Javadoc) * @see org.commcare.android.tasks.templates.CommCareTask#deliverError(java.lang.Object, java.lang.Exception) */ @Override protected void deliverError( LoginActivity receiver, Exception e) { receiver.raiseLoginMessage(StockMessages.Restore_Unknown, true); } }; dataPuller.connect(this); dataPuller.execute(); } /* * (non-Javadoc) * * @see android.app.Activity#onResume() */ @Override protected void onResume() { super.onResume(); try { //TODO: there is a weird circumstance where we're logging in somewhere else and this gets locked. if (CommCareApplication._().getSession().isActive() && CommCareApplication._().getSession().getLoggedInUser() != null) { Intent i = new Intent(); i.putExtra(ALREADY_LOGGED_IN, true); setResult(RESULT_OK, i); CommCareApplication._().clearNotifications(NOTIFICATION_MESSAGE_LOGIN); finish(); return; } } catch (SessionUnavailableException sue) { // Nothing, we're logging in here anyway } //If we arrived at LoginActivity from clicking the regular app icon, and there //are no longer any available apps, we want to redirect to CCHomeActivity if (CommCareApplication._().getReadyAppRecords().size() == 0) { Intent i = new Intent(this, CommCareHomeActivity.class); startActivity(i); } // Otherwise, update the login screen refreshView(); } private String getUsername() { return username.getText().toString().toLowerCase().trim(); } private boolean tryLocalLogin(final boolean warnMultipleAccounts) { //TODO: check username/password for emptiness return tryLocalLogin(getUsername(), password.getText().toString(), warnMultipleAccounts); } private boolean tryLocalLogin(final String username, String password, final boolean warnMultipleAccounts) { try{ // TODO: We don't actually even use this anymore other than for hte // local login count, which seems super silly. UserKeyRecord matchingRecord = null; int count = 0; for(UserKeyRecord record : storage()) { if(!record.getUsername().equals(username)) { continue; } count++; String hash = record.getPasswordHash(); if(hash.contains("$")) { String alg = "sha1"; String salt = hash.split("\\$")[1]; String check = hash.split("\\$")[2]; MessageDigest md = MessageDigest.getInstance("SHA-1"); BigInteger number = new BigInteger(1, md.digest((salt+password).getBytes())); String hashed = number.toString(16); while (hashed.length() < check.length()) { hashed = "0" + hashed; } if (hash.equals(alg + "$" + salt + "$" + hashed)) { matchingRecord = record; } } } final boolean triggerTooManyUsers = count > 1 && warnMultipleAccounts; ManageKeyRecordTask<LoginActivity> task = new ManageKeyRecordTask<LoginActivity>(this, TASK_KEY_EXCHANGE, username, password, CommCareApplication._().getCurrentApp(), new ManageKeyRecordListener<LoginActivity>() { @Override public void keysLoginComplete(LoginActivity r) { if(triggerTooManyUsers) { // We've successfully pulled down new user data. // Should see if the user already has a sandbox and let // them know that their old data doesn't transition r.raiseMessage(NotificationMessageFactory.message(StockMessages.Auth_RemoteCredentialsChanged), true); Logger.log(AndroidLogger.TYPE_USER, "User " + username + " has logged in for the first time with a new password. They may have unsent data in their other sandbox"); } r.done(); } @Override public void keysReadyForSync(LoginActivity r) { // TODO: we only wanna do this on the _first_ try. Not // subsequent ones (IE: On return from startOta) r.startOta(); } @Override public void keysDoneOther(LoginActivity r, HttpCalloutOutcomes outcome) { switch(outcome) { case AuthFailed: Logger.log(AndroidLogger.TYPE_USER, "auth failed"); r.raiseLoginMessage(StockMessages.Auth_BadCredentials, false); break; case BadResponse: Logger.log(AndroidLogger.TYPE_USER, "bad response"); r.raiseLoginMessage(StockMessages.Remote_BadRestore, true); break; case NetworkFailure: Logger.log(AndroidLogger.TYPE_USER, "bad network"); r.raiseLoginMessage(StockMessages.Remote_NoNetwork, false); break; case NetworkFailureBadPassword: Logger.log(AndroidLogger.TYPE_USER, "bad network"); r.raiseLoginMessage(StockMessages.Remote_NoNetwork_BadPass, true); break; case BadCertificate: Logger.log(AndroidLogger.TYPE_USER, "bad certificate"); r.raiseLoginMessage(StockMessages.BadSSLCertificate, false); break; case UnkownError: Logger.log(AndroidLogger.TYPE_USER, "unknown"); r.raiseLoginMessage(StockMessages.Restore_Unknown, true); break; default: return; } } }) { @Override protected void deliverUpdate(LoginActivity receiver, String... update) { receiver.updateProgress(update[0], TASK_KEY_EXCHANGE); } }; task.connect(this); task.execute(); return true; }catch (Exception e) { e.printStackTrace(); return false; } } private void done() { Intent i = new Intent(); setResult(RESULT_OK, i); CommCareApplication._().clearNotifications(NOTIFICATION_MESSAGE_LOGIN); finish(); } private SqlStorage<UserKeyRecord> storage() throws SessionUnavailableException{ if(storage == null) { storage = CommCareApplication._().getAppStorage(UserKeyRecord.class); } return storage; } public void finished(int status) { } /* (non-Javadoc) * @see android.app.Activity#onCreateOptionsMenu(android.view.Menu) */ @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); menu.add(0, MENU_DEMO, 0, Localization.get("login.menu.demo")).setIcon(android.R.drawable.ic_menu_preferences); return true; } /* (non-Javadoc) * @see android.app.Activity#onOptionsItemSelected(android.view.MenuItem) */ @Override public boolean onOptionsItemSelected(MenuItem item) { boolean otherResult = super.onOptionsItemSelected(item); switch(item.getItemId()) { case MENU_DEMO: //Make sure we have a demo user DemoUserUtil.checkOrCreateDemoUser(this, CommCareApplication._().getCurrentApp()); //Now try to log in as the demo user tryLocalLogin(DemoUserUtil.DEMO_USER, DemoUserUtil.DEMO_USER, false); return true; default: return otherResult; } } private void raiseLoginMessage(MessageTag messageTag, boolean showTop) { NotificationMessage message = NotificationMessageFactory.message(messageTag, NOTIFICATION_MESSAGE_LOGIN); raiseMessage(message, showTop); } private void raiseMessage(NotificationMessage message, boolean showTop) { String toastText = message.getTitle(); if (showTop) { CommCareApplication._().reportNotificationMessage(message); toastText = Localization.get("notification.for.details.wrapper", new String[] {toastText}); } //either way LoginBoxesStatus.Error.setStatus(this); username.setCompoundDrawablesWithIntrinsicBounds(getResources().getDrawable(R.drawable.icon_user_attnneg), null, null, null); password.setCompoundDrawablesWithIntrinsicBounds(getResources().getDrawable(R.drawable.icon_lock_attnneg), null, null, null); loginButton.setBackgroundColor(getResources().getColor(R.color.cc_attention_negative_bg)); loginButton.setTextColor(getResources().getColor(R.color.cc_attention_negative_text)); errorBox.setVisibility(View.VISIBLE); errorBox.setText(toastText); Toast.makeText(this, toastText, Toast.LENGTH_LONG).show(); } /** * Sets the login boxes (user/pass) to the given color. * @param color Color code */ private void setLoginBoxesColor(int color) { username.setTextColor(color); password.setTextColor(color); } /* * (non-Javadoc) * @see org.commcare.android.framework.CommCareActivity#generateProgressDialog(int) * * Implementation of generateProgressDialog() for DialogController -- other methods * handled entirely in CommCareActivity */ @Override public CustomProgressDialog generateProgressDialog(int taskId) { CustomProgressDialog dialog; switch (taskId) { case TASK_KEY_EXCHANGE: dialog = CustomProgressDialog.newInstance(Localization.get("key.manage.title"), Localization.get("key.manage.start"), taskId); break; case DataPullTask.DATA_PULL_TASK_ID: dialog = CustomProgressDialog.newInstance(Localization.get("sync.progress.title"), Localization.get("sync.progress.starting"), taskId); dialog.addCancelButton(); dialog.addProgressBar(); break; default: Log.w(TAG, "taskId passed to generateProgressDialog does not match " + "any valid possibilities in LoginActivity"); return null; } return dialog; } /* * (non-Javadoc) * @see org.commcare.android.framework.CommCareActivity#isBackEnabled() */ @Override public boolean isBackEnabled() { return false; } private void refreshView() { Spinner spinner = (Spinner) findViewById(R.id.app_selection_spinner); TextView message = (TextView) findViewById(R.id.welcome_msg); ArrayList<ApplicationRecord> readyApps = CommCareApplication._().getReadyAppRecords(); if (readyApps.size() <= 1) { spinner.setVisibility(View.GONE); message.setText(R.string.login_welcome_single); return; } message.setText(R.string.login_welcome_multiple); ArrayList<String> appNames = new ArrayList<>(); ArrayList<String> appIds = new ArrayList<>(); for (ApplicationRecord r : readyApps) { String name = r.getDisplayName(); appNames.add(name); appIds.add(r.getUniqueId()); namesToRecords.put(name, r); } ArrayAdapter<String> adapter = new ArrayAdapter<>(this, R.layout.spinner_text_view, appNames); adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); spinner.setAdapter(adapter); spinner.setOnItemSelectedListener(this); SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); // The unique of id of the last app that was selected in the drop-down menu String lastApp = prefs.getString(KEY_LAST_APP,""); int position = 0; // If there is a last app, set the spinner selection to be that app if (!"".equals(lastApp)) { position = appIds.indexOf(lastApp); // If this last app has since been deleted, set the position to 0 if (position == -1) { position = 0; } } spinner.setSelection(position); spinner.setVisibility(View.VISIBLE); } @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this); String selected = (String) parent.getItemAtPosition(position); ApplicationRecord r = namesToRecords.get(selected); // Set the id of the last selected app prefs.edit().putString(KEY_LAST_APP, r.getUniqueId()).commit(); CommCareApplication._().initializeAppResources(new CommCareApp(r)); // Refresh UI for potential new language loadFields(false); } @Override public void onNothingSelected(AdapterView<?> parent) { // TODO Auto-generated method stub } }
remove TODO comment
app/src/org/commcare/dalvik/activities/LoginActivity.java
remove TODO comment
<ide><path>pp/src/org/commcare/dalvik/activities/LoginActivity.java <ide> <ide> @Override <ide> public void onNothingSelected(AdapterView<?> parent) { <del> // TODO Auto-generated method stub <add> return; <ide> } <ide> <ide> }
Java
apache-2.0
082c80ec60205b55f7113d2af38b8b594d8d4571
0
flipkart-incubator/Poseidon
/* * Copyright 2016 Flipkart Internet, pvt ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.flipkart.poseidon.api; import org.eclipse.jetty.servlet.FilterMapping; import javax.servlet.DispatcherType; import javax.servlet.Filter; import java.util.*; /** * Created by shrey.garg on 21/05/16. */ public class JettyFilterConfiguration { private Filter filter; private List<String> mappings; private Map<String, String> initParameters = new HashMap<>(); private EnumSet<DispatcherType> dispatcherTypes; public JettyFilterConfiguration(Filter filter) { if (filter == null) { throw new IllegalArgumentException("Filter configurations cannot be empty"); } this.filter = filter; this.mappings = Collections.singletonList("/*"); this.dispatcherTypes = EnumSet.of(DispatcherType.REQUEST); } public JettyFilterConfiguration(Filter filter, List<String> mappings) { if (filter == null || mappings == null) { throw new IllegalArgumentException("Filter configurations cannot be empty"); } this.filter = filter; this.mappings = mappings; this.dispatcherTypes = EnumSet.of(DispatcherType.REQUEST); } public JettyFilterConfiguration(Filter filter, List<String> mappings, EnumSet<DispatcherType> dispatcherTypes) { if (filter == null || mappings == null || dispatcherTypes == null) { throw new IllegalArgumentException("Filter configurations cannot be empty"); } this.filter = filter; this.mappings = mappings; this.dispatcherTypes = dispatcherTypes; } public Filter getFilter() { return filter; } public List<String> getMappings() { return mappings; } public Map<String, String> getInitParameters() { return initParameters; } public EnumSet<DispatcherType> getDispatcherTypes() { return dispatcherTypes; } public void setMappings(List<String> mappings) { if (mappings == null) { throw new IllegalArgumentException("Filter configurations cannot be empty"); } this.mappings = mappings; } public void setDispatcherTypes(EnumSet<DispatcherType> dispatcherTypes) { if (dispatcherTypes == null) { throw new IllegalArgumentException("Filter configurations cannot be empty"); } this.dispatcherTypes = dispatcherTypes; } public void setInitParameters(Map<String, String> initParameters) { if (initParameters == null) { throw new IllegalArgumentException("Filter configurations cannot be empty"); } this.initParameters = initParameters; } }
container/src/main/java/com/flipkart/poseidon/api/JettyFilterConfiguration.java
package com.flipkart.poseidon.api; import org.eclipse.jetty.servlet.FilterMapping; import javax.servlet.DispatcherType; import javax.servlet.Filter; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Created by shrey.garg on 21/05/16. */ public class JettyFilterConfiguration { private Filter filter; private List<String> mappings; private Map<String, String> initParameters = new HashMap<>(); private EnumSet<DispatcherType> dispatcherTypes; public JettyFilterConfiguration(Filter filter, List<String> mappings, EnumSet<DispatcherType> dispatcherTypes) { if (filter == null || mappings == null || dispatcherTypes == null) { throw new IllegalArgumentException("Filter configurations cannot be empty"); } this.filter = filter; this.mappings = mappings; this.dispatcherTypes = dispatcherTypes; } public Filter getFilter() { return filter; } public List<String> getMappings() { return mappings; } public Map<String, String> getInitParameters() { return initParameters; } public void setInitParameters(Map<String, String> initParameters) { if (initParameters == null) { throw new IllegalArgumentException("Filter configurations cannot be empty"); } this.initParameters = initParameters; } public EnumSet<DispatcherType> getDispatcherTypes() { return dispatcherTypes; } }
more options for creating FilterConfigurations
container/src/main/java/com/flipkart/poseidon/api/JettyFilterConfiguration.java
more options for creating FilterConfigurations
<ide><path>ontainer/src/main/java/com/flipkart/poseidon/api/JettyFilterConfiguration.java <add>/* <add> * Copyright 2016 Flipkart Internet, pvt ltd. <add> * <add> * Licensed under the Apache License, Version 2.0 (the "License"); <add> * you may not use this file except in compliance with the License. <add> * You may obtain a copy of the License at <add> * <add> * http://www.apache.org/licenses/LICENSE-2.0 <add> * <add> * Unless required by applicable law or agreed to in writing, software <add> * distributed under the License is distributed on an "AS IS" BASIS, <add> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <add> * See the License for the specific language governing permissions and <add> * limitations under the License. <add> */ <add> <ide> package com.flipkart.poseidon.api; <ide> <ide> import org.eclipse.jetty.servlet.FilterMapping; <ide> <ide> import javax.servlet.DispatcherType; <ide> import javax.servlet.Filter; <del>import java.util.EnumSet; <del>import java.util.HashMap; <del>import java.util.List; <del>import java.util.Map; <add>import java.util.*; <ide> <ide> /** <ide> * Created by shrey.garg on 21/05/16. <ide> private List<String> mappings; <ide> private Map<String, String> initParameters = new HashMap<>(); <ide> private EnumSet<DispatcherType> dispatcherTypes; <add> <add> public JettyFilterConfiguration(Filter filter) { <add> if (filter == null) { <add> throw new IllegalArgumentException("Filter configurations cannot be empty"); <add> } <add> <add> this.filter = filter; <add> this.mappings = Collections.singletonList("/*"); <add> this.dispatcherTypes = EnumSet.of(DispatcherType.REQUEST); <add> } <add> <add> public JettyFilterConfiguration(Filter filter, List<String> mappings) { <add> if (filter == null || mappings == null) { <add> throw new IllegalArgumentException("Filter configurations cannot be empty"); <add> } <add> <add> this.filter = filter; <add> this.mappings = mappings; <add> this.dispatcherTypes = EnumSet.of(DispatcherType.REQUEST); <add> } <ide> <ide> public JettyFilterConfiguration(Filter filter, List<String> mappings, EnumSet<DispatcherType> dispatcherTypes) { <ide> if (filter == null || mappings == null || dispatcherTypes == null) { <ide> return initParameters; <ide> } <ide> <add> public EnumSet<DispatcherType> getDispatcherTypes() { <add> return dispatcherTypes; <add> } <add> <add> public void setMappings(List<String> mappings) { <add> if (mappings == null) { <add> throw new IllegalArgumentException("Filter configurations cannot be empty"); <add> } <add> this.mappings = mappings; <add> } <add> <add> public void setDispatcherTypes(EnumSet<DispatcherType> dispatcherTypes) { <add> if (dispatcherTypes == null) { <add> throw new IllegalArgumentException("Filter configurations cannot be empty"); <add> } <add> this.dispatcherTypes = dispatcherTypes; <add> } <add> <ide> public void setInitParameters(Map<String, String> initParameters) { <ide> if (initParameters == null) { <ide> throw new IllegalArgumentException("Filter configurations cannot be empty"); <ide> <ide> this.initParameters = initParameters; <ide> } <del> <del> public EnumSet<DispatcherType> getDispatcherTypes() { <del> return dispatcherTypes; <del> } <ide> }
Java
apache-2.0
545f2d33ece78e75996f2fbd74526ab9a7ecb81f
0
apache/pdfbox,kalaspuffar/pdfbox,apache/pdfbox,kalaspuffar/pdfbox
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.cos; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pdfbox.filter.DecodeOptions; import org.apache.pdfbox.filter.Filter; import org.apache.pdfbox.filter.FilterFactory; import org.apache.pdfbox.io.IOUtils; import org.apache.pdfbox.io.RandomAccess; import org.apache.pdfbox.io.RandomAccessInputStream; import org.apache.pdfbox.io.RandomAccessOutputStream; import org.apache.pdfbox.io.RandomAccessRead; import org.apache.pdfbox.io.RandomAccessReadBuffer; import org.apache.pdfbox.io.RandomAccessReadView; import org.apache.pdfbox.io.ScratchFile; /** * This class represents a stream object in a PDF document. * * @author Ben Litchfield */ public class COSStream extends COSDictionary implements Closeable { // backing store, in-memory or on-disk private RandomAccess randomAccess; // used as a temp buffer when creating a new stream private ScratchFile scratchFile; // indicates if the scratchfile was created within this COSStream instance private boolean closeScratchFile = false; // true if there's an open OutputStream private boolean isWriting; // random access view to be read from private RandomAccessReadView randomAccessReadView; private static final Log LOG = LogFactory.getLog(COSStream.class); /** * Creates a new stream with an empty dictionary. * <p> * Try to avoid using this constructor because it creates a new scratch file in memory. Instead, * use {@link COSDocument#createCOSStream() document.getDocument().createCOSStream()} which will * use the existing scratch file (in memory or in temp file) of the document. * </p> */ public COSStream() { this(null); } /** * Creates a new stream with an empty dictionary. Data is stored in the given scratch file. * * @param scratchFile Scratch file for writing stream data. */ public COSStream(ScratchFile scratchFile) { setInt(COSName.LENGTH, 0); this.scratchFile = scratchFile; } /** * Creates a new stream with an empty dictionary. Data is read from the given random accessview. Written data is stored * in the given scratch file. * * @param scratchFile Scratch file for writing stream data. * @throws IOException if the length of the random access view isn't available */ public COSStream(ScratchFile scratchFile, RandomAccessReadView randomAccessReadView) throws IOException { this(scratchFile); this.randomAccessReadView = randomAccessReadView; setInt(COSName.LENGTH, (int) randomAccessReadView.length()); } /** * Throws if the random access backing store has been closed. Helpful for catching cases where * a user tries to use a COSStream which has outlived its COSDocument. */ private void checkClosed() throws IOException { if (randomAccess != null && randomAccess.isClosed()) { throw new IOException("COSStream has been closed and cannot be read. " + "Perhaps its enclosing PDDocument has been closed?"); // Tip for debugging: look at the destination file with an editor, you'll see an // incomplete stream at the bottom. } } private ScratchFile getScratchFile() { if (scratchFile == null) { scratchFile = ScratchFile.getMainMemoryOnlyInstance(); closeScratchFile = true; } return scratchFile; } /** * Returns a new InputStream which reads the encoded PDF stream data. Experts only! * * @return InputStream containing raw, encoded PDF stream data. * @throws IOException If the stream could not be read. */ public InputStream createRawInputStream() throws IOException { checkClosed(); if (isWriting) { throw new IllegalStateException("Cannot read while there is an open stream writer"); } if (randomAccess == null) { if (randomAccessReadView != null) { randomAccessReadView.seek(0); return new RandomAccessInputStream(randomAccessReadView); } else { throw new IOException( "Create InputStream called without data being written before to stream."); } } else { return new RandomAccessInputStream(randomAccess); } } /** * Returns a new InputStream which reads the decoded stream data. * * @return InputStream containing decoded stream data. * @throws IOException If the stream could not be read. */ public COSInputStream createInputStream() throws IOException { return createInputStream(DecodeOptions.DEFAULT); } public COSInputStream createInputStream(DecodeOptions options) throws IOException { InputStream input = createRawInputStream(); return COSInputStream.create(getFilterList(), this, input, options); } /** * Returns a new RandomAccessRead which reads the decoded stream data. * * @return RandomAccessRead containing decoded stream data. * @throws IOException If the stream could not be read. */ public RandomAccessRead createView() throws IOException { List<Filter> filterList = getFilterList(); if (filterList.isEmpty()) { if (randomAccess == null && randomAccessReadView != null) { return new RandomAccessReadView(randomAccessReadView, 0, randomAccessReadView.length()); } else { return new RandomAccessReadBuffer(createRawInputStream()); } } else { Set<Filter> filterSet = new HashSet<>(filterList); if (filterSet.size() != filterList.size()) { throw new IOException("Duplicate"); } InputStream input = createRawInputStream(); ByteArrayOutputStream output = new ByteArrayOutputStream(); // apply filters for (int i = 0; i < filterList.size(); i++) { if (i > 0) { input = new ByteArrayInputStream(output.toByteArray()); output.reset(); } try { filterList.get(i).decode(input, output, this, i, DecodeOptions.DEFAULT); } finally { IOUtils.closeQuietly(input); } } return new RandomAccessReadBuffer(output.toByteArray()); } } /** * Returns a new OutputStream for writing stream data, using the current filters. * * @return OutputStream for un-encoded stream data. * @throws IOException If the output stream could not be created. */ public OutputStream createOutputStream() throws IOException { return createOutputStream(null); } /** * Returns a new OutputStream for writing stream data, using and the given filters. * * @param filters COSArray or COSName of filters to be used. * @return OutputStream for un-encoded stream data. * @throws IOException If the output stream could not be created. */ public OutputStream createOutputStream(COSBase filters) throws IOException { checkClosed(); if (isWriting) { throw new IllegalStateException("Cannot have more than one open stream writer."); } // apply filters, if any if (filters != null) { setItem(COSName.FILTER, filters); } if (randomAccess != null) randomAccess.clear(); else randomAccess = getScratchFile().createBuffer(); OutputStream randomOut = new RandomAccessOutputStream(randomAccess); OutputStream cosOut = new COSOutputStream(getFilterList(), this, randomOut, getScratchFile()); isWriting = true; return new FilterOutputStream(cosOut) { @Override public void write(byte[] b, int off, int len) throws IOException { this.out.write(b, off, len); } @Override public void close() throws IOException { super.close(); setInt(COSName.LENGTH, (int)randomAccess.length()); isWriting = false; } }; } /** * Returns a new OutputStream for writing encoded PDF data. Experts only! * * @return OutputStream for raw PDF stream data. * @throws IOException If the output stream could not be created. */ public OutputStream createRawOutputStream() throws IOException { checkClosed(); if (isWriting) { throw new IllegalStateException("Cannot have more than one open stream writer."); } if (randomAccess != null) randomAccess.clear(); else randomAccess = getScratchFile().createBuffer(); OutputStream out = new RandomAccessOutputStream(randomAccess); isWriting = true; return new FilterOutputStream(out) { @Override public void write(byte[] b, int off, int len) throws IOException { this.out.write(b, off, len); } @Override public void close() throws IOException { super.close(); setInt(COSName.LENGTH, (int)randomAccess.length()); isWriting = false; } }; } /** * Returns the list of filters. */ private List<Filter> getFilterList() throws IOException { List<Filter> filterList; COSBase filters = getFilters(); if (filters instanceof COSName) { filterList = new ArrayList<>(1); filterList.add(FilterFactory.INSTANCE.getFilter((COSName)filters)); } else if (filters instanceof COSArray) { COSArray filterArray = (COSArray)filters; filterList = new ArrayList<>(filterArray.size()); for (int i = 0; i < filterArray.size(); i++) { COSBase base = filterArray.get(i); if (!(base instanceof COSName)) { throw new IOException("Forbidden type in filter array: " + (base == null ? "null" : base.getClass().getName())); } filterList.add(FilterFactory.INSTANCE.getFilter((COSName) base)); } } else { filterList = new ArrayList<>(); } return filterList; } /** * Returns the length of the encoded stream. * * @return length in bytes */ public long getLength() { if (isWriting) { throw new IllegalStateException("There is an open OutputStream associated with this " + "COSStream. It must be closed before querying the " + "length of this COSStream."); } return getInt(COSName.LENGTH, 0); } /** * This will return the filters to apply to the byte stream. * The method will return * <ul> * <li>null if no filters are to be applied * <li>a COSName if one filter is to be applied * <li>a COSArray containing COSNames if multiple filters are to be applied * </ul> * * @return the COSBase object representing the filters */ public COSBase getFilters() { return getDictionaryObject(COSName.FILTER); } /** * Returns the contents of the stream as a PDF "text string". */ public String toTextString() { try (InputStream input = createInputStream()) { byte[] array = IOUtils.toByteArray(input); COSString string = new COSString(array); return string.getString(); } catch (IOException e) { LOG.debug("An exception occurred trying to get the content - returning empty string instead", e); return ""; } } @Override public Object accept(ICOSVisitor visitor) throws IOException { return visitor.visitFromStream(this); } @Override public void close() throws IOException { if (closeScratchFile && scratchFile != null) { scratchFile.close(); scratchFile = null; } // marks the scratch file pages as free if (randomAccess != null) { randomAccess.close(); randomAccess = null; } if (randomAccessReadView != null) { randomAccessReadView.close(); randomAccessReadView = null; } } /** * Indicates whether the stream contains any data or not. * * @return true if the stream contains any data */ public boolean hasData() { return randomAccess != null || randomAccessReadView != null; } }
pdfbox/src/main/java/org/apache/pdfbox/cos/COSStream.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pdfbox.cos; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.Closeable; import java.io.FilterOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.pdfbox.filter.DecodeOptions; import org.apache.pdfbox.filter.Filter; import org.apache.pdfbox.filter.FilterFactory; import org.apache.pdfbox.io.IOUtils; import org.apache.pdfbox.io.RandomAccess; import org.apache.pdfbox.io.RandomAccessInputStream; import org.apache.pdfbox.io.RandomAccessOutputStream; import org.apache.pdfbox.io.RandomAccessRead; import org.apache.pdfbox.io.RandomAccessReadBuffer; import org.apache.pdfbox.io.RandomAccessReadView; import org.apache.pdfbox.io.ScratchFile; /** * This class represents a stream object in a PDF document. * * @author Ben Litchfield */ public class COSStream extends COSDictionary implements Closeable { // backing store, in-memory or on-disk private RandomAccess randomAccess; // used as a temp buffer when creating a new stream private ScratchFile scratchFile; // indicates if the scratchfile was created within this COSStream instance private boolean closeScratchFile = false; // true if there's an open OutputStream private boolean isWriting; // random access view to be read from private RandomAccessReadView randomAccessReadView; private static final Log LOG = LogFactory.getLog(COSStream.class); /** * Creates a new stream with an empty dictionary. * <p> * Try to avoid using this constructor because it creates a new scratch file in memory. Instead, * use {@link COSDocument#createCOSStream() document.getDocument().createCOSStream()} which will * use the existing scratch file (in memory or in temp file) of the document. * </p> */ public COSStream() { this(null); } /** * Creates a new stream with an empty dictionary. Data is stored in the given scratch file. * * @param scratchFile Scratch file for writing stream data. */ public COSStream(ScratchFile scratchFile) { setInt(COSName.LENGTH, 0); this.scratchFile = scratchFile; } /** * Creates a new stream with an empty dictionary. Data is read from the given random accessview. Written data is stored * in the given scratch file. * * @param scratchFile Scratch file for writing stream data. * @throws IOException if the length of the random access view isn't available */ public COSStream(ScratchFile scratchFile, RandomAccessReadView randomAccessReadView) throws IOException { this(scratchFile); this.randomAccessReadView = randomAccessReadView; setInt(COSName.LENGTH, (int) randomAccessReadView.length()); } /** * Throws if the random access backing store has been closed. Helpful for catching cases where * a user tries to use a COSStream which has outlived its COSDocument. */ private void checkClosed() throws IOException { if (randomAccess != null && randomAccess.isClosed()) { throw new IOException("COSStream has been closed and cannot be read. " + "Perhaps its enclosing PDDocument has been closed?"); // Tip for debugging: look at the destination file with an editor, you'll see an // incomplete stream at the bottom. } } private ScratchFile getScratchFile() { if (scratchFile == null) { scratchFile = ScratchFile.getMainMemoryOnlyInstance(); closeScratchFile = true; } return scratchFile; } /** * Returns a new InputStream which reads the encoded PDF stream data. Experts only! * * @return InputStream containing raw, encoded PDF stream data. * @throws IOException If the stream could not be read. */ public InputStream createRawInputStream() throws IOException { checkClosed(); if (isWriting) { throw new IllegalStateException("Cannot read while there is an open stream writer"); } if (randomAccess == null) { if (randomAccessReadView != null) { randomAccessReadView.seek(0); return new RandomAccessInputStream(randomAccessReadView); } else { throw new IOException( "Create InputStream called without data being written before to stream."); } } else { return new RandomAccessInputStream(randomAccess); } } /** * Returns a new InputStream which reads the decoded stream data. * * @return InputStream containing decoded stream data. * @throws IOException If the stream could not be read. */ public COSInputStream createInputStream() throws IOException { return createInputStream(DecodeOptions.DEFAULT); } public COSInputStream createInputStream(DecodeOptions options) throws IOException { InputStream input = createRawInputStream(); return COSInputStream.create(getFilterList(), this, input, options); } /** * Returns a new RandomAccessRead which reads the decoded stream data. * * @return RandomAccessRead containing decoded stream data. * @throws IOException If the stream could not be read. */ public RandomAccessRead createView() throws IOException { List<Filter> filterList = getFilterList(); if (filterList.isEmpty()) { if (randomAccess == null && randomAccessReadView != null) { return new RandomAccessReadView(randomAccessReadView, 0, randomAccessReadView.length()); } else { return new RandomAccessReadBuffer(createRawInputStream()); } } else { Set<Filter> filterSet = new HashSet<>(filterList); if (filterSet.size() != filterList.size()) { throw new IOException("Duplicate"); } InputStream input = createRawInputStream(); ByteArrayOutputStream output = new ByteArrayOutputStream(); // apply filters for (int i = 0; i < filterList.size(); i++) { if (i > 0) { input = new ByteArrayInputStream(output.toByteArray()); output.reset(); } try { filterList.get(i).decode(input, output, this, i, DecodeOptions.DEFAULT); } finally { IOUtils.closeQuietly(input); } } return new RandomAccessReadBuffer(output.toByteArray()); } } /** * Returns a new OutputStream for writing stream data, using the current filters. * * @return OutputStream for un-encoded stream data. * @throws IOException If the output stream could not be created. */ public OutputStream createOutputStream() throws IOException { return createOutputStream(null); } /** * Returns a new OutputStream for writing stream data, using and the given filters. * * @param filters COSArray or COSName of filters to be used. * @return OutputStream for un-encoded stream data. * @throws IOException If the output stream could not be created. */ public OutputStream createOutputStream(COSBase filters) throws IOException { checkClosed(); if (isWriting) { throw new IllegalStateException("Cannot have more than one open stream writer."); } // apply filters, if any if (filters != null) { setItem(COSName.FILTER, filters); } if (randomAccess != null) randomAccess.clear(); else randomAccess = getScratchFile().createBuffer(); OutputStream randomOut = new RandomAccessOutputStream(randomAccess); OutputStream cosOut = new COSOutputStream(getFilterList(), this, randomOut, getScratchFile()); isWriting = true; return new FilterOutputStream(cosOut) { @Override public void write(byte[] b, int off, int len) throws IOException { this.out.write(b, off, len); } @Override public void close() throws IOException { super.close(); setInt(COSName.LENGTH, (int)randomAccess.length()); isWriting = false; } }; } /** * Returns a new OutputStream for writing encoded PDF data. Experts only! * * @return OutputStream for raw PDF stream data. * @throws IOException If the output stream could not be created. */ public OutputStream createRawOutputStream() throws IOException { checkClosed(); if (isWriting) { throw new IllegalStateException("Cannot have more than one open stream writer."); } if (randomAccess != null) randomAccess.clear(); else randomAccess = getScratchFile().createBuffer(); OutputStream out = new RandomAccessOutputStream(randomAccess); isWriting = true; return new FilterOutputStream(out) { @Override public void write(byte[] b, int off, int len) throws IOException { this.out.write(b, off, len); } @Override public void close() throws IOException { super.close(); setInt(COSName.LENGTH, (int)randomAccess.length()); isWriting = false; } }; } /** * Returns the list of filters. */ private List<Filter> getFilterList() throws IOException { List<Filter> filterList; COSBase filters = getFilters(); if (filters instanceof COSName) { filterList = new ArrayList<>(1); filterList.add(FilterFactory.INSTANCE.getFilter((COSName)filters)); } else if (filters instanceof COSArray) { COSArray filterArray = (COSArray)filters; filterList = new ArrayList<>(filterArray.size()); for (int i = 0; i < filterArray.size(); i++) { COSBase base = filterArray.get(i); if (!(base instanceof COSName)) { throw new IOException("Forbidden type in filter array: " + (base == null ? "null" : base.getClass().getName())); } filterList.add(FilterFactory.INSTANCE.getFilter((COSName) base)); } } else { filterList = new ArrayList<>(); } return filterList; } /** * Returns the length of the encoded stream. * * @return length in bytes */ public long getLength() { if (isWriting) { throw new IllegalStateException("There is an open OutputStream associated with " + "this COSStream. It must be closed before querying" + "length of this COSStream."); } return getInt(COSName.LENGTH, 0); } /** * This will return the filters to apply to the byte stream. * The method will return * <ul> * <li>null if no filters are to be applied * <li>a COSName if one filter is to be applied * <li>a COSArray containing COSNames if multiple filters are to be applied * </ul> * * @return the COSBase object representing the filters */ public COSBase getFilters() { return getDictionaryObject(COSName.FILTER); } /** * Returns the contents of the stream as a PDF "text string". */ public String toTextString() { try (InputStream input = createInputStream()) { byte[] array = IOUtils.toByteArray(input); COSString string = new COSString(array); return string.getString(); } catch (IOException e) { LOG.debug("An exception occurred trying to get the content - returning empty string instead", e); return ""; } } @Override public Object accept(ICOSVisitor visitor) throws IOException { return visitor.visitFromStream(this); } @Override public void close() throws IOException { if (closeScratchFile && scratchFile != null) { scratchFile.close(); scratchFile = null; } // marks the scratch file pages as free if (randomAccess != null) { randomAccess.close(); randomAccess = null; } if (randomAccessReadView != null) { randomAccessReadView.close(); randomAccessReadView = null; } } /** * Indicates whether the stream contains any data or not. * * @return true if the stream contains any data */ public boolean hasData() { return randomAccess != null || randomAccessReadView != null; } }
PDFBOX-4892: fix message, as suggested by valerybokov git-svn-id: c3ad59981690829a43dc34c293c4e2cd04bcd994@1890911 13f79535-47bb-0310-9956-ffa450edef68
pdfbox/src/main/java/org/apache/pdfbox/cos/COSStream.java
PDFBOX-4892: fix message, as suggested by valerybokov
<ide><path>dfbox/src/main/java/org/apache/pdfbox/cos/COSStream.java <ide> { <ide> if (isWriting) <ide> { <del> throw new IllegalStateException("There is an open OutputStream associated with " + <del> "this COSStream. It must be closed before querying" + <add> throw new IllegalStateException("There is an open OutputStream associated with this " + <add> "COSStream. It must be closed before querying the " + <ide> "length of this COSStream."); <ide> } <ide> return getInt(COSName.LENGTH, 0);
Java
apache-2.0
07e4de71d2215c6afee3a8912edd78f8ea5599f3
0
olegz/spring-cloud-function,olegz/spring-cloud-function,olegz/spring-cloud-function,olegz/spring-cloud-function,olegz/spring-cloud-function
/* * Copyright 2020-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.function.context.catalog; import java.lang.reflect.Field; import java.lang.reflect.GenericArrayType; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.reactivestreams.Publisher; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuples; import org.springframework.aop.framework.ProxyFactory; import org.springframework.aop.support.AopUtils; import org.springframework.beans.factory.BeanFactory; import org.springframework.cloud.function.context.FunctionProperties; import org.springframework.cloud.function.context.FunctionRegistration; import org.springframework.cloud.function.context.FunctionRegistry; import org.springframework.cloud.function.context.config.RoutingFunction; import org.springframework.cloud.function.json.JsonMapper; import org.springframework.core.convert.ConversionService; import org.springframework.expression.Expression; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.lang.Nullable; import org.springframework.messaging.Message; import org.springframework.messaging.MessageHeaders; import org.springframework.messaging.converter.CompositeMessageConverter; import org.springframework.messaging.converter.MessageConversionException; import org.springframework.messaging.support.GenericMessage; import org.springframework.messaging.support.MessageBuilder; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.MimeType; import org.springframework.util.MimeTypeUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; /** * * Basic implementation of FunctionRegistry which maintains the cache of registered functions while * decorating them with additional features such as transparent type conversion, composition, routing etc. * * Unlike {@link BeanFactoryAwareFunctionRegistry}, this implementation does not depend on {@link BeanFactory}. * * @author Oleg Zhurakousky * * @since 3.1 */ public class SimpleFunctionRegistry implements FunctionRegistry, FunctionInspector { Log logger = LogFactory.getLog(BeanFactoryAwareFunctionRegistry.class); /** * Identifies MessageConversionExceptions that happen when input can't be converted. */ public static final String COULD_NOT_CONVERT_INPUT = "Could Not Convert Input"; /** * Identifies MessageConversionExceptions that happen when output can't be converted. */ public static final String COULD_NOT_CONVERT_OUTPUT = "Could Not Convert Output"; private final Map<Object, FunctionRegistration<Object>> registrationsByFunction = new HashMap<>(); private final Map<String, FunctionRegistration<Object>> registrationsByName = new HashMap<>(); private final ConversionService conversionService; private final CompositeMessageConverter messageConverter; private List<String> declaredFunctionDefinitions; public SimpleFunctionRegistry(ConversionService conversionService, @Nullable CompositeMessageConverter messageConverter) { this.conversionService = conversionService; this.messageConverter = messageConverter; this.init(System.getProperty("spring.cloud.function.definition")); } void init(String functionDefinition) { this.declaredFunctionDefinitions = StringUtils.hasText(functionDefinition) ? Arrays.asList(functionDefinition.split(";")) : Collections.emptyList(); if (this.declaredFunctionDefinitions.contains(RoutingFunction.FUNCTION_NAME)) { Assert.isTrue(this.declaredFunctionDefinitions.size() == 1, "It is illegal to declare more then one function when using RoutingFunction"); } } @Override public <T> T lookup(Class<?> type, String definition) { return this.lookup(definition, new String[] {}); } @Override public int size() { return this.registrationsByFunction.size(); } @Override @SuppressWarnings("unchecked") public <T> T lookup(String definition, String... acceptedOutputTypes) { definition = StringUtils.hasText(definition) ? definition.replaceAll(",", "|") : ""; boolean routing = definition.contains(RoutingFunction.FUNCTION_NAME) || this.declaredFunctionDefinitions.contains(RoutingFunction.FUNCTION_NAME); if (!routing && this.declaredFunctionDefinitions.size() > 0) { if (StringUtils.hasText(definition)) { if (this.declaredFunctionDefinitions.size() > 1 && !this.declaredFunctionDefinitions.contains(definition)) { logger.warn("Attempted to access un-declared function definition '" + definition + "'. Declared functions are '" + this.declaredFunctionDefinitions + "' specified via `spring.cloud.function.definition` property. If the intention is to access " + "any function available in FunctionCatalog, please remove `spring.cloud.function.definition` property."); return null; } } else { if (this.declaredFunctionDefinitions.size() == 1) { definition = this.declaredFunctionDefinitions.get(0); } else if (this.declaredFunctionDefinitions.size() > 1) { logger.warn("Default function can not be mapped since multiple functions are declared " + this.declaredFunctionDefinitions); return null; } else { logger.warn("Default function can not be mapped since multiple functions are available in FunctionCatalog. " + "Please use 'spring.cloud.function.definition' property."); return null; } } } Object function = this .proxyInvokerIfNecessary((FunctionInvocationWrapper) this.compose(null, definition, acceptedOutputTypes)); return (T) function; } @Override public Set<String> getNames(Class<?> type) { Set<String> registeredNames = registrationsByFunction.values().stream().flatMap(reg -> reg.getNames().stream()) .collect(Collectors.toSet()); return registeredNames; } @SuppressWarnings("unchecked") @Override public <T> void register(FunctionRegistration<T> registration) { this.registrationsByFunction.put(registration.getTarget(), (FunctionRegistration<Object>) registration); for (String name : registration.getNames()) { this.registrationsByName.put(name, (FunctionRegistration<Object>) registration); } } @Override public FunctionRegistration<?> getRegistration(Object function) { FunctionRegistration<?> registration = this.registrationsByFunction.get(function); // need to do this due to the deployer not wrapping the actual target into FunctionInvocationWrapper // hence the lookup would need to be made by the actual target if (registration == null && function instanceof FunctionInvocationWrapper) { function = ((FunctionInvocationWrapper) function).target; } return this.registrationsByFunction.get(function); } Object locateFunction(String name) { return this.registrationsByName.get(name); } Type discoverFunctionType(Object function, String... names) { if (function instanceof RoutingFunction) { return this.registrationsByName.get(names[0]).getType().getType(); } return FunctionTypeUtils.discoverFunctionTypeFromClass(function.getClass()); } String discoverDefaultDefinitionFromRegistration() { String definition = null; if (this.registrationsByName.size() > 0) { Assert .isTrue(this.registrationsByName.size() == 1, "Found more then one function in local registry"); definition = this.registrationsByName.keySet().iterator().next(); } return definition; } String discoverDefaultDefinitionIfNecessary(String definition) { if (StringUtils.isEmpty(definition)) { definition = this.discoverDefaultDefinitionFromRegistration(); } else if (!this.registrationsByName.containsKey(definition) && this.registrationsByName.size() == 1) { definition = this.registrationsByName.keySet().iterator().next(); } else if (definition.endsWith("|")) { if (this.registrationsByName.size() == 2) { Set<String> fNames = this.getNames(null); definition = this.determinImpliedDefinition(fNames, definition); } } return definition; } String determinImpliedDefinition(Set<String> fNames, String originalDefinition) { if (fNames.size() == 2) { Iterator<String> iter = fNames.iterator(); String n1 = iter.next(); String n2 = iter.next(); String[] definitionName = StringUtils.delimitedListToStringArray(originalDefinition, "|"); if (definitionName[0].equals(n1)) { definitionName[1] = n2; originalDefinition = definitionName[0] + "|" + definitionName[1]; } else { definitionName[1] = n1; originalDefinition = definitionName[0] + "|" + definitionName[1]; } } return originalDefinition; } Type discovereFunctionTypeByName(String name) { return this.registrationsByName.get(name).getType().getType(); } @SuppressWarnings({"unchecked", "rawtypes"}) private Function<?, ?> compose(Class<?> type, String definition, String... acceptedOutputTypes) { if (logger.isInfoEnabled()) { logger.info("Looking up function '" + definition + "' with acceptedOutputTypes: " + Arrays .asList(acceptedOutputTypes)); } definition = discoverDefaultDefinitionIfNecessary(definition); if (StringUtils.isEmpty(definition)) { return null; } Function<?, ?> resultFunction = null; if (this.registrationsByName.containsKey(definition)) { Object targetFunction = this.registrationsByName.get(definition).getTarget(); Type functionType = this.registrationsByName.get(definition).getType().getType(); resultFunction = new FunctionInvocationWrapper(targetFunction, functionType, definition, acceptedOutputTypes); } else { String[] names = StringUtils.delimitedListToStringArray(definition.replaceAll(",", "|").trim(), "|"); StringBuilder composedNameBuilder = new StringBuilder(); String prefix = ""; Type originFunctionType = null; for (String name : names) { Object function = this.locateFunction(name); if (function == null) { logger.debug("Failed to discover function '" + definition + "' in function catalog. " + "Function available in catalog are: " + this.getNames(null) + ". This is generally " + "acceptable for cases where there was no intention to use functions."); return null; } else { Type functionType = this.discovereFunctionTypeByName(name); if (functionType != null && functionType.toString().contains("org.apache.kafka.streams.")) { logger .debug("Kafka Streams function '" + definition + "' is not supported by spring-cloud-function."); return null; } } composedNameBuilder.append(prefix); composedNameBuilder.append(name); FunctionRegistration<Object> registration; Type currentFunctionType = null; if (function instanceof FunctionRegistration) { registration = (FunctionRegistration<Object>) function; currentFunctionType = currentFunctionType == null ? registration.getType() .getType() : currentFunctionType; function = registration.getTarget(); } else { if (isFunctionPojo(function)) { Method functionalMethod = FunctionTypeUtils.discoverFunctionalMethod(function.getClass()); currentFunctionType = FunctionTypeUtils.fromFunctionMethod(functionalMethod); function = this.proxyTarget(function, functionalMethod); } String[] aliasNames = this.getAliases(name).toArray(new String[] {}); currentFunctionType = currentFunctionType == null ? this .discoverFunctionType(function, aliasNames) : currentFunctionType; registration = new FunctionRegistration<>(function, name).type(currentFunctionType); } if (function instanceof RoutingFunction) { registrationsByFunction.putIfAbsent(function, registration); registrationsByName.putIfAbsent(name, registration); } function = new FunctionInvocationWrapper(function, currentFunctionType, name, names.length > 1 ? new String[] {} : acceptedOutputTypes); if (originFunctionType == null) { originFunctionType = currentFunctionType; } // composition if (resultFunction == null) { resultFunction = (Function<?, ?>) function; } else { originFunctionType = FunctionTypeUtils.compose(originFunctionType, currentFunctionType); resultFunction = new FunctionInvocationWrapper(resultFunction.andThen((Function) function), originFunctionType, composedNameBuilder.toString(), acceptedOutputTypes); } prefix = "|"; } ((FunctionInvocationWrapper) resultFunction).acceptedOutputMimeTypes = acceptedOutputTypes; FunctionRegistration<Object> registration = new FunctionRegistration<Object>(resultFunction, definition) .type(originFunctionType); registrationsByFunction.putIfAbsent(resultFunction, registration); registrationsByName.putIfAbsent(definition, registration); } return resultFunction; } private boolean isFunctionPojo(Object function) { return !function.getClass().isSynthetic() && !(function instanceof Supplier) && !(function instanceof Function) && !(function instanceof Consumer) && !function.getClass().getPackage().getName().startsWith("org.springframework.cloud.function.compiler"); } /* * == OUTER PROXY === * For cases where function is POJO we need to be able to look it up as Function * as well as the type of actual pojo (e.g., MyFunction f1 = catalog.lookup("myFunction");) * To do this we wrap the target into CglibProxy (for cases when function is a POJO ) with the * actual target class (e.g., MyFunction). Meanwhile the invocation will be delegated to * the FunctionInvocationWrapper which will trigger the INNER PROXY. This effectively ensures that * conversion, composition and/or fluxification would happen (code inside of FunctionInvocationWrapper) * while the inner proxy invocation will delegate the invocation with already converted arguments * to the actual target class (e.g., MyFunction). */ private Object proxyInvokerIfNecessary(FunctionInvocationWrapper functionInvoker) { if (functionInvoker != null && AopUtils.isCglibProxy(functionInvoker.getTarget())) { if (logger.isInfoEnabled()) { logger .info("Proxying POJO function: " + functionInvoker.functionDefinition + ". . ." + functionInvoker.target .getClass()); } ProxyFactory pf = new ProxyFactory(functionInvoker.getTarget()); pf.setProxyTargetClass(true); pf.setInterfaces(Function.class, Supplier.class, Consumer.class); pf.addAdvice(new MethodInterceptor() { @Override public Object invoke(MethodInvocation invocation) throws Throwable { // this will trigger the INNER PROXY if (ObjectUtils.isEmpty(invocation.getArguments())) { Object o = functionInvoker.get(); return o; } else { // this is where we probably would need to gather all arguments into tuples return functionInvoker.apply(invocation.getArguments()[0]); } } }); return pf.getProxy(); } return functionInvoker; } /* * == INNER PROXY === * When dealing with POJO functions we still want to be able to treat them as any other * function for purposes of composition, type conversion and fluxification. * So this proxy will ensure that the target class can be represented as Function while delegating * any call to apply to the actual target method. * Since this proxy is part of the FunctionInvocationWrapper composition and copnversion will be applied * as tyo any other function. */ private Object proxyTarget(Object targetFunction, Method actualMethodToCall) { ProxyFactory pf = new ProxyFactory(targetFunction); pf.setProxyTargetClass(true); pf.setInterfaces(Function.class); pf.addAdvice(new MethodInterceptor() { @Override public Object invoke(MethodInvocation invocation) throws Throwable { return actualMethodToCall.invoke(invocation.getThis(), invocation.getArguments()); } }); return pf.getProxy(); } /** * Returns a list of aliases for 'functionName'. * It will do so providing the underlying implementation is based on the * system that supports name aliasing (see {@link BeanFactoryAwareFunctionRegistry} * @param functionName the name of the function * @return collection of aliases */ Collection<String> getAliases(String functionName) { return Collections.singletonList(functionName); } /** * Single wrapper for all Suppliers, Functions and Consumers managed by this * catalog. * * @author Oleg Zhurakousky */ public class FunctionInvocationWrapper implements Function<Object, Object>, Consumer<Object>, Supplier<Object> { private final Object target; private final Type functionType; private final boolean composed; String[] acceptedOutputMimeTypes; private final String functionDefinition; private final Field headersField; FunctionInvocationWrapper(Object target, Type functionType, String functionDefinition, String... acceptedOutputMimeTypes) { this.target = target; this.composed = functionDefinition.contains("|") || target instanceof RoutingFunction; this.functionType = functionType; this.acceptedOutputMimeTypes = acceptedOutputMimeTypes; this.functionDefinition = functionDefinition; this.headersField = ReflectionUtils.findField(MessageHeaders.class, "headers"); this.headersField.setAccessible(true); } public String getFunctionDefinition() { return this.functionDefinition; } @Override public void accept(Object input) { this.doApply(input, true, null); } @Override public Object apply(Object input) { return this.apply(input, null); } /** * !! Experimental, may change. Is not yet intended as public API !! * * @param input input value * @param enricher enricher function instance * @return the result */ @SuppressWarnings("rawtypes") public Object apply(Object input, Function<Message, Message> enricher) { return this.doApply(input, false, enricher); } @Override public Object get() { return this.get(null); } /** * !! Experimental, may change. Is not yet intended as public API !! * * @param enricher enricher function instance * @return the result */ @SuppressWarnings("rawtypes") public Object get(Function<Message, Message> enricher) { Object input = FunctionTypeUtils.isMono(this.functionType) ? Mono.empty() : (FunctionTypeUtils.isMono(this.functionType) ? Flux.empty() : null); return this.doApply(input, false, enricher); } public Type getFunctionType() { return this.functionType; } public boolean isConsumer() { return FunctionTypeUtils.isConsumer(this.functionType); } public boolean isSupplier() { return FunctionTypeUtils.isSupplier(this.functionType); } public Object getTarget() { return target; } @Override public String toString() { return "definition: " + this.functionDefinition + "; type: " + this.functionType; } @SuppressWarnings({"rawtypes", "unchecked"}) private Object invokeFunction(Object input) { Message incomingMessage = null; if (!this.functionDefinition.startsWith(RoutingFunction.FUNCTION_NAME)) { if (input instanceof Message && !FunctionTypeUtils.isMessage(FunctionTypeUtils.getInputType(functionType, 0)) && ((Message) input).getHeaders().containsKey("scf-func-name")) { incomingMessage = (Message) input; input = incomingMessage.getPayload(); } } Object invocationResult = null; if (this.target instanceof Function) { invocationResult = ((Function) target).apply(input); } else if (this.target instanceof Supplier) { invocationResult = ((Supplier) target).get(); } else { if (input instanceof Flux) { invocationResult = ((Flux) input).transform(flux -> { ((Consumer) this.target).accept(flux); return Mono.ignoreElements((Flux) flux); }).then(); } else if (input instanceof Mono) { invocationResult = ((Mono) input).transform(flux -> { ((Consumer) this.target).accept(flux); return Mono.ignoreElements((Mono) flux); }).then(); } else { ((Consumer) this.target).accept(input); } } if (!(this.target instanceof Consumer) && logger.isDebugEnabled()) { logger .debug("Result of invocation of \"" + this.functionDefinition + "\" function is '" + invocationResult + "'"); } if (!(invocationResult instanceof Message)) { if (incomingMessage != null && invocationResult != null && incomingMessage.getHeaders().containsKey("scf-func-name")) { invocationResult = MessageBuilder.withPayload(invocationResult) .copyHeaders(incomingMessage.getHeaders()) .removeHeader(MessageHeaders.CONTENT_TYPE) .build(); } } return invocationResult; } @SuppressWarnings({ "unchecked", "rawtypes" }) private Object doApply(Object input, boolean consumer, Function<Message, Message> enricher) { if (logger.isDebugEnabled()) { logger.debug("Applying function: " + this.functionDefinition); } Object result; if (input instanceof Publisher) { input = this.composed ? input : this.convertInputPublisherIfNecessary((Publisher<?>) input, FunctionTypeUtils .getInputType(this.functionType, 0)); if (FunctionTypeUtils.isReactive(FunctionTypeUtils.getInputType(this.functionType, 0))) { result = this.invokeFunction(input); } else { if (this.composed) { return input instanceof Mono ? Mono.from((Publisher<?>) input).transform((Function) this.target) : Flux.from((Publisher<?>) input).transform((Function) this.target); } else { if (FunctionTypeUtils.isConsumer(functionType)) { result = input instanceof Mono ? Mono.from((Publisher) input).doOnNext((Consumer) this.target).then() : Flux.from((Publisher) input).doOnNext((Consumer) this.target).then(); } else { result = input instanceof Mono ? Mono.from((Publisher) input).map(value -> this.invokeFunction(value)) : Flux.from((Publisher) input).map(value -> this.invokeFunction(value)); } } } } else { Type type = FunctionTypeUtils.getInputType(this.functionType, 0); if (!this.composed && !FunctionTypeUtils .isMultipleInputArguments(this.functionType) && FunctionTypeUtils.isReactive(type)) { Publisher<?> publisher = FunctionTypeUtils.isFlux(type) ? input == null ? Flux.empty() : Flux.just(input) : input == null ? Mono.empty() : Mono.just(input); if (logger.isDebugEnabled()) { logger.debug("Invoking reactive function '" + this.functionType + "' with non-reactive input " + "should at least assume reactive output (e.g., Function<String, Flux<String>> f3 = catalog.lookup(\"echoFlux\");), " + "otherwise invocation will result in ClassCastException."); } result = this.invokeFunction(this.convertInputPublisherIfNecessary(publisher, FunctionTypeUtils .getInputType(this.functionType, 0))); } else { result = this.invokeFunction(this.composed ? input : (input == null ? input : this .convertInputValueIfNecessary(input, FunctionTypeUtils.getInputType(this.functionType, 0)))); } } // Outputs will be converted only if we're told how (via acceptedOutputMimeTypes), otherwise output returned as is. if (result != null && !ObjectUtils.isEmpty(this.acceptedOutputMimeTypes)) { result = result instanceof Publisher ? this .convertOutputPublisherIfNecessary((Publisher<?>) result, enricher, this.acceptedOutputMimeTypes) : this.convertOutputValueIfNecessary(result, enricher, this.acceptedOutputMimeTypes); } return result; } @SuppressWarnings({"rawtypes", "unchecked"}) private Object convertOutputValueIfNecessary(Object value, Function<Message, Message> enricher, String... acceptedOutputMimeTypes) { logger.debug("Applying type conversion on output value"); Object convertedValue = null; if (FunctionTypeUtils.isMultipleArgumentsHolder(value)) { int outputCount = FunctionTypeUtils.getOutputCount(this.functionType); Object[] convertedInputArray = new Object[outputCount]; for (int i = 0; i < outputCount; i++) { Expression parsed = new SpelExpressionParser().parseExpression("getT" + (i + 1) + "()"); Object outputArgument = parsed.getValue(value); try { convertedInputArray[i] = outputArgument instanceof Publisher ? this .convertOutputPublisherIfNecessary((Publisher<?>) outputArgument, enricher, acceptedOutputMimeTypes[i]) : this.convertOutputValueIfNecessary(outputArgument, enricher, acceptedOutputMimeTypes[i]); } catch (ArrayIndexOutOfBoundsException e) { throw new IllegalStateException("The number of 'acceptedOutputMimeTypes' for function '" + this.functionDefinition + "' is (" + acceptedOutputMimeTypes.length + "), which does not match the number of actual outputs of this function which is (" + outputCount + ").", e); } } convertedValue = Tuples.fromArray(convertedInputArray); } else { List<MimeType> acceptedContentTypes = MimeTypeUtils .parseMimeTypes(acceptedOutputMimeTypes[0].toString()); if (CollectionUtils.isEmpty(acceptedContentTypes)) { convertedValue = value; } else { for (int i = 0; i < acceptedContentTypes.size() && convertedValue == null; i++) { MimeType acceptedContentType = acceptedContentTypes.get(i); /* * We need to treat Iterables differently since they may represent collection of Messages * which should be converted individually */ boolean convertIndividualItem = false; if (value instanceof Iterable || (ObjectUtils.isArray(value) && !(value instanceof byte[]))) { Type outputType = FunctionTypeUtils.getOutputType(functionType, 0); if (outputType instanceof ParameterizedType) { convertIndividualItem = FunctionTypeUtils.isMessage(FunctionTypeUtils.getImmediateGenericType(outputType, 0)); } else if (outputType instanceof GenericArrayType) { convertIndividualItem = FunctionTypeUtils.isMessage(((GenericArrayType) outputType).getGenericComponentType()); } } if (convertIndividualItem) { if (ObjectUtils.isArray(value)) { value = Arrays.asList((Object[]) value); } AtomicReference<List<Message>> messages = new AtomicReference<List<Message>>(new ArrayList<>()); ((Iterable) value).forEach(element -> messages.get() .add((Message) convertOutputValueIfNecessary(element, enricher, acceptedContentType .toString()))); convertedValue = messages.get(); } else { convertedValue = this.convertValueToMessage(value, enricher, acceptedContentType); } } } } if (convertedValue == null) { throw new MessageConversionException(COULD_NOT_CONVERT_OUTPUT); } return convertedValue; } @SuppressWarnings({"rawtypes", "unchecked"}) private Message convertValueToMessage(Object value, Function<Message, Message> enricher, MimeType acceptedContentType) { Message outputMessage = null; if (value instanceof Message) { MessageHeaders headers = ((Message) value).getHeaders(); Map<String, Object> headersMap = (Map<String, Object>) ReflectionUtils .getField(this.headersField, headers); headersMap.put("accept", acceptedContentType); // Set the contentType header to the value of accept for "legacy" reasons. But, do not set the // contentType header to the value of accept if it is a wildcard type, as this doesn't make sense. // This also applies to the else branch below. if (acceptedContentType.isConcrete()) { headersMap.put(MessageHeaders.CONTENT_TYPE, acceptedContentType); } } else { MessageBuilder<Object> builder = MessageBuilder.withPayload(value) .setHeader("accept", acceptedContentType); if (acceptedContentType.isConcrete()) { builder.setHeader(MessageHeaders.CONTENT_TYPE, acceptedContentType); } value = builder.build(); } if (enricher != null) { value = enricher.apply((Message) value); } outputMessage = messageConverter.toMessage(((Message) value).getPayload(), ((Message) value).getHeaders()); return outputMessage; } @SuppressWarnings("rawtypes") private Publisher<?> convertOutputPublisherIfNecessary(Publisher<?> publisher, Function<Message, Message> enricher, String... acceptedOutputMimeTypes) { if (logger.isDebugEnabled()) { logger.debug("Applying type conversion on output Publisher " + publisher); } Publisher<?> result = publisher instanceof Mono ? Mono.from(publisher) .map(value -> this.convertOutputValueIfNecessary(value, enricher, acceptedOutputMimeTypes)) : Flux.from(publisher) .map(value -> this.convertOutputValueIfNecessary(value, enricher, acceptedOutputMimeTypes)); return result; } private Publisher<?> convertInputPublisherIfNecessary(Publisher<?> publisher, Type type) { if (logger.isDebugEnabled()) { logger.debug("Applying type conversion on input Publisher " + publisher); } Publisher<?> result = publisher instanceof Mono ? Mono.from(publisher).map(value -> this.convertInputValueIfNecessary(value, type)) : Flux.from(publisher).map(value -> this.convertInputValueIfNecessary(value, type)); return result; } private Object convertInputValueIfNecessary(Object value, Type type) { if (logger.isDebugEnabled()) { logger.debug("Applying type conversion on input value " + value); logger.debug("Function type: " + this.functionType); } Object convertedValue = value; if (FunctionTypeUtils.isMultipleArgumentsHolder(value)) { int inputCount = FunctionTypeUtils.getInputCount(functionType); Object[] convertedInputArray = new Object[inputCount]; for (int i = 0; i < inputCount; i++) { Expression parsed = new SpelExpressionParser().parseExpression("getT" + (i + 1) + "()"); Object inptArgument = parsed.getValue(value); inptArgument = inptArgument instanceof Publisher ? this.convertInputPublisherIfNecessary((Publisher<?>) inptArgument, FunctionTypeUtils.getInputType(functionType, i)) : this.convertInputValueIfNecessary(inptArgument, FunctionTypeUtils.getInputType(functionType, i)); convertedInputArray[i] = inptArgument; } convertedValue = Tuples.fromArray(convertedInputArray); } else { // this needs revisiting as the type is not always Class (think really complex types) Type rawType = FunctionTypeUtils.unwrapActualTypeByIndex(type, 0); if (logger.isDebugEnabled()) { logger.debug("Raw type of value: " + value + "is " + rawType); } if (rawType instanceof ParameterizedType) { rawType = ((ParameterizedType) rawType).getRawType(); } if (value instanceof Message<?>) { // see AWS adapter with Optional payload if (messageNeedsConversion(rawType, (Message<?>) value)) { convertedValue = FunctionTypeUtils.isTypeCollection(type) ? messageConverter.fromMessage((Message<?>) value, (Class<?>) rawType, type) : messageConverter.fromMessage((Message<?>) value, (Class<?>) rawType); if (logger.isDebugEnabled()) { logger.debug("Converted from Message: " + convertedValue); } if (FunctionTypeUtils.isMessage(type) || ((Message<?>) value).getHeaders().containsKey("scf-func-name")) { convertedValue = MessageBuilder.withPayload(convertedValue) .copyHeaders(((Message<?>) value).getHeaders()).build(); } } else if (!FunctionTypeUtils.isMessage(type)) { convertedValue = ((Message<?>) convertedValue).getPayload(); } } else if (rawType instanceof Class<?>) { // see AWS adapter with WildardTypeImpl and Azure with Voids if (this.isJson(value)) { convertedValue = messageConverter .fromMessage(new GenericMessage<Object>(value), (Class<?>) rawType); } else { try { convertedValue = conversionService.convert(value, (Class<?>) rawType); } catch (Exception e) { if (value instanceof String || value instanceof byte[]) { convertedValue = messageConverter .fromMessage(new GenericMessage<Object>(value), (Class<?>) rawType); } } } } } if (logger.isDebugEnabled()) { logger.debug("Converted input value " + convertedValue); } if (convertedValue == null) { throw new MessageConversionException(COULD_NOT_CONVERT_INPUT); } return convertedValue; } private boolean isJson(Object value) { String v = value instanceof byte[] ? new String((byte[]) value, StandardCharsets.UTF_8) : (value instanceof String ? (String) value : null); if (v != null && JsonMapper.isJsonString(v)) { return true; } return false; } private boolean messageNeedsConversion(Type rawType, Message<?> message) { Boolean skipConversion = message.getHeaders().containsKey(FunctionProperties.SKIP_CONVERSION_HEADER) ? message.getHeaders().get(FunctionProperties.SKIP_CONVERSION_HEADER, Boolean.class) : false; if (skipConversion) { return false; } return rawType instanceof Class<?> && !(message.getPayload() instanceof Optional) && !(message.getPayload().getClass().isAssignableFrom(((Class<?>) rawType))); } } }
spring-cloud-function-context/src/main/java/org/springframework/cloud/function/context/catalog/SimpleFunctionRegistry.java
/* * Copyright 2020-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.function.context.catalog; import java.lang.reflect.Field; import java.lang.reflect.GenericArrayType; import java.lang.reflect.Method; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Collectors; import org.aopalliance.intercept.MethodInterceptor; import org.aopalliance.intercept.MethodInvocation; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.reactivestreams.Publisher; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; import reactor.util.function.Tuples; import org.springframework.aop.framework.ProxyFactory; import org.springframework.aop.support.AopUtils; import org.springframework.beans.factory.BeanFactory; import org.springframework.cloud.function.context.FunctionProperties; import org.springframework.cloud.function.context.FunctionRegistration; import org.springframework.cloud.function.context.FunctionRegistry; import org.springframework.cloud.function.context.config.RoutingFunction; import org.springframework.cloud.function.json.JsonMapper; import org.springframework.core.convert.ConversionService; import org.springframework.expression.Expression; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.lang.Nullable; import org.springframework.messaging.Message; import org.springframework.messaging.MessageHeaders; import org.springframework.messaging.converter.CompositeMessageConverter; import org.springframework.messaging.converter.MessageConversionException; import org.springframework.messaging.support.GenericMessage; import org.springframework.messaging.support.MessageBuilder; import org.springframework.util.Assert; import org.springframework.util.CollectionUtils; import org.springframework.util.MimeType; import org.springframework.util.MimeTypeUtils; import org.springframework.util.ObjectUtils; import org.springframework.util.ReflectionUtils; import org.springframework.util.StringUtils; /** * * Basic implementation of FunctionRegistry which maintains the cache of registered functions while * decorating them with additional features such as transparent type conversion, composition, routing etc. * * Unlike {@link BeanFactoryAwareFunctionRegistry}, this implementation does not depend on {@link BeanFactory}. * * @author Oleg Zhurakousky * * @since 3.1 */ public class SimpleFunctionRegistry implements FunctionRegistry, FunctionInspector { Log logger = LogFactory.getLog(BeanFactoryAwareFunctionRegistry.class); /** * Identifies MessageConversionExceptions that happen when input can't be converted. */ public static final String COULD_NOT_CONVERT_INPUT = "Could Not Convert Input"; /** * Identifies MessageConversionExceptions that happen when output can't be converted. */ public static final String COULD_NOT_CONVERT_OUTPUT = "Could Not Convert Output"; private final Map<Object, FunctionRegistration<Object>> registrationsByFunction = new HashMap<>(); private final Map<String, FunctionRegistration<Object>> registrationsByName = new HashMap<>(); private final ConversionService conversionService; private final CompositeMessageConverter messageConverter; private List<String> declaredFunctionDefinitions; public SimpleFunctionRegistry(ConversionService conversionService, @Nullable CompositeMessageConverter messageConverter) { this.conversionService = conversionService; this.messageConverter = messageConverter; this.init(System.getProperty("spring.cloud.function.definition")); } void init(String functionDefinition) { this.declaredFunctionDefinitions = StringUtils.hasText(functionDefinition) ? Arrays.asList(functionDefinition.split(";")) : Collections.emptyList(); if (this.declaredFunctionDefinitions.contains(RoutingFunction.FUNCTION_NAME)) { Assert.isTrue(this.declaredFunctionDefinitions.size() == 1, "It is illegal to declare more then one function when using RoutingFunction"); } } @Override public <T> T lookup(Class<?> type, String definition) { return this.lookup(definition, new String[] {}); } @Override public int size() { return this.registrationsByFunction.size(); } @Override @SuppressWarnings("unchecked") public <T> T lookup(String definition, String... acceptedOutputTypes) { definition = StringUtils.hasText(definition) ? definition.replaceAll(",", "|") : ""; boolean routing = definition.contains(RoutingFunction.FUNCTION_NAME) || this.declaredFunctionDefinitions.contains(RoutingFunction.FUNCTION_NAME); if (!routing && this.declaredFunctionDefinitions.size() > 0) { if (StringUtils.hasText(definition)) { if (this.declaredFunctionDefinitions.size() > 1 && !this.declaredFunctionDefinitions.contains(definition)) { logger.warn("Attempted to access un-declared function definition '" + definition + "'. Declared functions are '" + this.declaredFunctionDefinitions + "' specified via `spring.cloud.function.definition` property. If the intention is to access " + "any function available in FunctionCatalog, please remove `spring.cloud.function.definition` property."); return null; } } else { if (this.declaredFunctionDefinitions.size() == 1) { definition = this.declaredFunctionDefinitions.get(0); } else if (this.declaredFunctionDefinitions.size() > 1) { logger.warn("Default function can not be mapped since multiple functions are declared " + this.declaredFunctionDefinitions); return null; } else { logger.warn("Default function can not be mapped since multiple functions are available in FunctionCatalog. " + "Please use 'spring.cloud.function.definition' property."); return null; } } } Object function = this .proxyInvokerIfNecessary((FunctionInvocationWrapper) this.compose(null, definition, acceptedOutputTypes)); return (T) function; } @Override public Set<String> getNames(Class<?> type) { Set<String> registeredNames = registrationsByFunction.values().stream().flatMap(reg -> reg.getNames().stream()) .collect(Collectors.toSet()); return registeredNames; } @SuppressWarnings("unchecked") @Override public <T> void register(FunctionRegistration<T> registration) { this.registrationsByFunction.put(registration.getTarget(), (FunctionRegistration<Object>) registration); for (String name : registration.getNames()) { this.registrationsByName.put(name, (FunctionRegistration<Object>) registration); } } @Override public FunctionRegistration<?> getRegistration(Object function) { FunctionRegistration<?> registration = this.registrationsByFunction.get(function); // need to do this due to the deployer not wrapping the actual target into FunctionInvocationWrapper // hence the lookup would need to be made by the actual target if (registration == null && function instanceof FunctionInvocationWrapper) { function = ((FunctionInvocationWrapper) function).target; } return this.registrationsByFunction.get(function); } Object locateFunction(String name) { return this.registrationsByName.get(name); } Type discoverFunctionType(Object function, String... names) { if (function instanceof RoutingFunction) { return this.registrationsByName.get(names[0]).getType().getType(); } return FunctionTypeUtils.discoverFunctionTypeFromClass(function.getClass()); } String discoverDefaultDefinitionFromRegistration() { String definition = null; if (this.registrationsByName.size() > 0) { Assert .isTrue(this.registrationsByName.size() == 1, "Found more then one function in local registry"); definition = this.registrationsByName.keySet().iterator().next(); } return definition; } String discoverDefaultDefinitionIfNecessary(String definition) { if (StringUtils.isEmpty(definition)) { definition = this.discoverDefaultDefinitionFromRegistration(); } else if (!this.registrationsByName.containsKey(definition) && this.registrationsByName.size() == 1) { definition = this.registrationsByName.keySet().iterator().next(); } else if (definition.endsWith("|")) { if (this.registrationsByName.size() == 2) { Set<String> fNames = this.getNames(null); definition = this.determinImpliedDefinition(fNames, definition); } } return definition; } String determinImpliedDefinition(Set<String> fNames, String originalDefinition) { if (fNames.size() == 2) { Iterator<String> iter = fNames.iterator(); String n1 = iter.next(); String n2 = iter.next(); String[] definitionName = StringUtils.delimitedListToStringArray(originalDefinition, "|"); if (definitionName[0].equals(n1)) { definitionName[1] = n2; originalDefinition = definitionName[0] + "|" + definitionName[1]; } else { definitionName[1] = n1; originalDefinition = definitionName[0] + "|" + definitionName[1]; } } return originalDefinition; } Type discovereFunctionTypeByName(String name) { return this.registrationsByName.get(name).getType().getType(); } @SuppressWarnings({"unchecked", "rawtypes"}) private Function<?, ?> compose(Class<?> type, String definition, String... acceptedOutputTypes) { if (logger.isInfoEnabled()) { logger.info("Looking up function '" + definition + "' with acceptedOutputTypes: " + Arrays .asList(acceptedOutputTypes)); } definition = discoverDefaultDefinitionIfNecessary(definition); if (StringUtils.isEmpty(definition)) { return null; } Function<?, ?> resultFunction = null; if (this.registrationsByName.containsKey(definition)) { Object targetFunction = this.registrationsByName.get(definition).getTarget(); Type functionType = this.registrationsByName.get(definition).getType().getType(); resultFunction = new FunctionInvocationWrapper(targetFunction, functionType, definition, acceptedOutputTypes); } else { String[] names = StringUtils.delimitedListToStringArray(definition.replaceAll(",", "|").trim(), "|"); StringBuilder composedNameBuilder = new StringBuilder(); String prefix = ""; Type originFunctionType = null; for (String name : names) { Object function = this.locateFunction(name); if (function == null) { logger.debug("Failed to discover function '" + definition + "' in function catalog. " + "Function available in catalog are: " + this.getNames(null) + ". This is generally " + "acceptable for cases where there was no intention to use functions."); return null; } else { Type functionType = this.discovereFunctionTypeByName(name); if (functionType != null && functionType.toString().contains("org.apache.kafka.streams.")) { logger .debug("Kafka Streams function '" + definition + "' is not supported by spring-cloud-function."); return null; } } composedNameBuilder.append(prefix); composedNameBuilder.append(name); FunctionRegistration<Object> registration; Type currentFunctionType = null; if (function instanceof FunctionRegistration) { registration = (FunctionRegistration<Object>) function; currentFunctionType = currentFunctionType == null ? registration.getType() .getType() : currentFunctionType; function = registration.getTarget(); } else { if (isFunctionPojo(function)) { Method functionalMethod = FunctionTypeUtils.discoverFunctionalMethod(function.getClass()); currentFunctionType = FunctionTypeUtils.fromFunctionMethod(functionalMethod); function = this.proxyTarget(function, functionalMethod); } String[] aliasNames = this.getAliases(name).toArray(new String[] {}); currentFunctionType = currentFunctionType == null ? this .discoverFunctionType(function, aliasNames) : currentFunctionType; registration = new FunctionRegistration<>(function, name).type(currentFunctionType); } if (function instanceof RoutingFunction) { registrationsByFunction.putIfAbsent(function, registration); registrationsByName.putIfAbsent(name, registration); } function = new FunctionInvocationWrapper(function, currentFunctionType, name, names.length > 1 ? new String[] {} : acceptedOutputTypes); if (originFunctionType == null) { originFunctionType = currentFunctionType; } // composition if (resultFunction == null) { resultFunction = (Function<?, ?>) function; } else { originFunctionType = FunctionTypeUtils.compose(originFunctionType, currentFunctionType); resultFunction = new FunctionInvocationWrapper(resultFunction.andThen((Function) function), originFunctionType, composedNameBuilder.toString(), acceptedOutputTypes); } prefix = "|"; } ((FunctionInvocationWrapper) resultFunction).acceptedOutputMimeTypes = acceptedOutputTypes; FunctionRegistration<Object> registration = new FunctionRegistration<Object>(resultFunction, definition) .type(originFunctionType); registrationsByFunction.putIfAbsent(resultFunction, registration); registrationsByName.putIfAbsent(definition, registration); } return resultFunction; } private boolean isFunctionPojo(Object function) { return !function.getClass().isSynthetic() && !(function instanceof Supplier) && !(function instanceof Function) && !(function instanceof Consumer) && !function.getClass().getPackage().getName().startsWith("org.springframework.cloud.function.compiler"); } /* * == OUTER PROXY === * For cases where function is POJO we need to be able to look it up as Function * as well as the type of actual pojo (e.g., MyFunction f1 = catalog.lookup("myFunction");) * To do this we wrap the target into CglibProxy (for cases when function is a POJO ) with the * actual target class (e.g., MyFunction). Meanwhile the invocation will be delegated to * the FunctionInvocationWrapper which will trigger the INNER PROXY. This effectively ensures that * conversion, composition and/or fluxification would happen (code inside of FunctionInvocationWrapper) * while the inner proxy invocation will delegate the invocation with already converted arguments * to the actual target class (e.g., MyFunction). */ private Object proxyInvokerIfNecessary(FunctionInvocationWrapper functionInvoker) { if (functionInvoker != null && AopUtils.isCglibProxy(functionInvoker.getTarget())) { if (logger.isInfoEnabled()) { logger .info("Proxying POJO function: " + functionInvoker.functionDefinition + ". . ." + functionInvoker.target .getClass()); } ProxyFactory pf = new ProxyFactory(functionInvoker.getTarget()); pf.setProxyTargetClass(true); pf.setInterfaces(Function.class, Supplier.class, Consumer.class); pf.addAdvice(new MethodInterceptor() { @Override public Object invoke(MethodInvocation invocation) throws Throwable { // this will trigger the INNER PROXY if (ObjectUtils.isEmpty(invocation.getArguments())) { Object o = functionInvoker.get(); return o; } else { // this is where we probably would need to gather all arguments into tuples return functionInvoker.apply(invocation.getArguments()[0]); } } }); return pf.getProxy(); } return functionInvoker; } /* * == INNER PROXY === * When dealing with POJO functions we still want to be able to treat them as any other * function for purposes of composition, type conversion and fluxification. * So this proxy will ensure that the target class can be represented as Function while delegating * any call to apply to the actual target method. * Since this proxy is part of the FunctionInvocationWrapper composition and copnversion will be applied * as tyo any other function. */ private Object proxyTarget(Object targetFunction, Method actualMethodToCall) { ProxyFactory pf = new ProxyFactory(targetFunction); pf.setProxyTargetClass(true); pf.setInterfaces(Function.class); pf.addAdvice(new MethodInterceptor() { @Override public Object invoke(MethodInvocation invocation) throws Throwable { return actualMethodToCall.invoke(invocation.getThis(), invocation.getArguments()); } }); return pf.getProxy(); } /** * Returns a list of aliases for 'functionName'. * It will do so providing the underlying implementation is based on the * system that supports name aliasing (see {@link BeanFactoryAwareFunctionRegistry} * @param functionName the name of the function * @return collection of aliases */ Collection<String> getAliases(String functionName) { return Collections.singletonList(functionName); } /** * Single wrapper for all Suppliers, Functions and Consumers managed by this * catalog. * * @author Oleg Zhurakousky */ public class FunctionInvocationWrapper implements Function<Object, Object>, Consumer<Object>, Supplier<Object> { private final Object target; private final Type functionType; private final boolean composed; String[] acceptedOutputMimeTypes; private final String functionDefinition; private final Field headersField; FunctionInvocationWrapper(Object target, Type functionType, String functionDefinition, String... acceptedOutputMimeTypes) { this.target = target; this.composed = functionDefinition.contains("|") || target instanceof RoutingFunction; this.functionType = functionType; this.acceptedOutputMimeTypes = acceptedOutputMimeTypes; this.functionDefinition = functionDefinition; this.headersField = ReflectionUtils.findField(MessageHeaders.class, "headers"); this.headersField.setAccessible(true); } public String getFunctionDefinition() { return this.functionDefinition; } @Override public void accept(Object input) { this.doApply(input, true, null); } @Override public Object apply(Object input) { return this.apply(input, null); } /** * !! Experimental, may change. Is not yet intended as public API !! * * @param input input value * @param enricher enricher function instance * @return the result */ @SuppressWarnings("rawtypes") public Object apply(Object input, Function<Message, Message> enricher) { return this.doApply(input, false, enricher); } @Override public Object get() { return this.get(null); } /** * !! Experimental, may change. Is not yet intended as public API !! * * @param enricher enricher function instance * @return the result */ @SuppressWarnings("rawtypes") public Object get(Function<Message, Message> enricher) { Object input = FunctionTypeUtils.isMono(this.functionType) ? Mono.empty() : (FunctionTypeUtils.isMono(this.functionType) ? Flux.empty() : null); return this.doApply(input, false, enricher); } public Type getFunctionType() { return this.functionType; } public boolean isConsumer() { return FunctionTypeUtils.isConsumer(this.functionType); } public boolean isSupplier() { return FunctionTypeUtils.isSupplier(this.functionType); } public Object getTarget() { return target; } @Override public String toString() { return "definition: " + this.functionDefinition + "; type: " + this.functionType; } @SuppressWarnings({"rawtypes", "unchecked"}) private Object invokeFunction(Object input) { Message incomingMessage = null; if (!this.functionDefinition.startsWith(RoutingFunction.FUNCTION_NAME)) { if (input instanceof Message && !FunctionTypeUtils.isMessage(FunctionTypeUtils.getInputType(functionType, 0))) { incomingMessage = (Message) input; input = incomingMessage.getPayload(); } } Object invocationResult = null; if (this.target instanceof Function) { invocationResult = ((Function) target).apply(input); } else if (this.target instanceof Supplier) { invocationResult = ((Supplier) target).get(); } else { if (input instanceof Flux) { invocationResult = ((Flux) input).transform(flux -> { ((Consumer) this.target).accept(flux); return Mono.ignoreElements((Flux) flux); }).then(); } else if (input instanceof Mono) { invocationResult = ((Mono) input).transform(flux -> { ((Consumer) this.target).accept(flux); return Mono.ignoreElements((Mono) flux); }).then(); } else { ((Consumer) this.target).accept(input); } } if (!(this.target instanceof Consumer) && logger.isDebugEnabled()) { logger .debug("Result of invocation of \"" + this.functionDefinition + "\" function is '" + invocationResult + "'"); } if (!(invocationResult instanceof Message)) { if (incomingMessage != null && invocationResult != null && incomingMessage.getHeaders().containsKey("scf-func-name")) { invocationResult = MessageBuilder.withPayload(invocationResult) .copyHeaders(incomingMessage.getHeaders()) .removeHeader(MessageHeaders.CONTENT_TYPE) .build(); } } return invocationResult; } @SuppressWarnings({ "unchecked", "rawtypes" }) private Object doApply(Object input, boolean consumer, Function<Message, Message> enricher) { if (logger.isDebugEnabled()) { logger.debug("Applying function: " + this.functionDefinition); } Object result; if (input instanceof Publisher) { input = this.composed ? input : this.convertInputPublisherIfNecessary((Publisher<?>) input, FunctionTypeUtils .getInputType(this.functionType, 0)); if (FunctionTypeUtils.isReactive(FunctionTypeUtils.getInputType(this.functionType, 0))) { result = this.invokeFunction(input); } else { if (this.composed) { return input instanceof Mono ? Mono.from((Publisher<?>) input).transform((Function) this.target) : Flux.from((Publisher<?>) input).transform((Function) this.target); } else { if (FunctionTypeUtils.isConsumer(functionType)) { result = input instanceof Mono ? Mono.from((Publisher) input).doOnNext((Consumer) this.target).then() : Flux.from((Publisher) input).doOnNext((Consumer) this.target).then(); } else { result = input instanceof Mono ? Mono.from((Publisher) input).map(value -> this.invokeFunction(value)) : Flux.from((Publisher) input).map(value -> this.invokeFunction(value)); } } } } else { Type type = FunctionTypeUtils.getInputType(this.functionType, 0); if (!this.composed && !FunctionTypeUtils .isMultipleInputArguments(this.functionType) && FunctionTypeUtils.isReactive(type)) { Publisher<?> publisher = FunctionTypeUtils.isFlux(type) ? input == null ? Flux.empty() : Flux.just(input) : input == null ? Mono.empty() : Mono.just(input); if (logger.isDebugEnabled()) { logger.debug("Invoking reactive function '" + this.functionType + "' with non-reactive input " + "should at least assume reactive output (e.g., Function<String, Flux<String>> f3 = catalog.lookup(\"echoFlux\");), " + "otherwise invocation will result in ClassCastException."); } result = this.invokeFunction(this.convertInputPublisherIfNecessary(publisher, FunctionTypeUtils .getInputType(this.functionType, 0))); } else { result = this.invokeFunction(this.composed ? input : (input == null ? input : this .convertInputValueIfNecessary(input, FunctionTypeUtils.getInputType(this.functionType, 0)))); } } // Outputs will be converted only if we're told how (via acceptedOutputMimeTypes), otherwise output returned as is. if (result != null && !ObjectUtils.isEmpty(this.acceptedOutputMimeTypes)) { result = result instanceof Publisher ? this .convertOutputPublisherIfNecessary((Publisher<?>) result, enricher, this.acceptedOutputMimeTypes) : this.convertOutputValueIfNecessary(result, enricher, this.acceptedOutputMimeTypes); } return result; } @SuppressWarnings({"rawtypes", "unchecked"}) private Object convertOutputValueIfNecessary(Object value, Function<Message, Message> enricher, String... acceptedOutputMimeTypes) { logger.debug("Applying type conversion on output value"); Object convertedValue = null; if (FunctionTypeUtils.isMultipleArgumentsHolder(value)) { int outputCount = FunctionTypeUtils.getOutputCount(this.functionType); Object[] convertedInputArray = new Object[outputCount]; for (int i = 0; i < outputCount; i++) { Expression parsed = new SpelExpressionParser().parseExpression("getT" + (i + 1) + "()"); Object outputArgument = parsed.getValue(value); try { convertedInputArray[i] = outputArgument instanceof Publisher ? this .convertOutputPublisherIfNecessary((Publisher<?>) outputArgument, enricher, acceptedOutputMimeTypes[i]) : this.convertOutputValueIfNecessary(outputArgument, enricher, acceptedOutputMimeTypes[i]); } catch (ArrayIndexOutOfBoundsException e) { throw new IllegalStateException("The number of 'acceptedOutputMimeTypes' for function '" + this.functionDefinition + "' is (" + acceptedOutputMimeTypes.length + "), which does not match the number of actual outputs of this function which is (" + outputCount + ").", e); } } convertedValue = Tuples.fromArray(convertedInputArray); } else { List<MimeType> acceptedContentTypes = MimeTypeUtils .parseMimeTypes(acceptedOutputMimeTypes[0].toString()); if (CollectionUtils.isEmpty(acceptedContentTypes)) { convertedValue = value; } else { for (int i = 0; i < acceptedContentTypes.size() && convertedValue == null; i++) { MimeType acceptedContentType = acceptedContentTypes.get(i); /* * We need to treat Iterables differently since they may represent collection of Messages * which should be converted individually */ boolean convertIndividualItem = false; if (value instanceof Iterable || (ObjectUtils.isArray(value) && !(value instanceof byte[]))) { Type outputType = FunctionTypeUtils.getOutputType(functionType, 0); if (outputType instanceof ParameterizedType) { convertIndividualItem = FunctionTypeUtils.isMessage(FunctionTypeUtils.getImmediateGenericType(outputType, 0)); } else if (outputType instanceof GenericArrayType) { convertIndividualItem = FunctionTypeUtils.isMessage(((GenericArrayType) outputType).getGenericComponentType()); } } if (convertIndividualItem) { if (ObjectUtils.isArray(value)) { value = Arrays.asList((Object[]) value); } AtomicReference<List<Message>> messages = new AtomicReference<List<Message>>(new ArrayList<>()); ((Iterable) value).forEach(element -> messages.get() .add((Message) convertOutputValueIfNecessary(element, enricher, acceptedContentType .toString()))); convertedValue = messages.get(); } else { convertedValue = this.convertValueToMessage(value, enricher, acceptedContentType); } } } } if (convertedValue == null) { throw new MessageConversionException(COULD_NOT_CONVERT_OUTPUT); } return convertedValue; } @SuppressWarnings({"rawtypes", "unchecked"}) private Message convertValueToMessage(Object value, Function<Message, Message> enricher, MimeType acceptedContentType) { Message outputMessage = null; if (value instanceof Message) { MessageHeaders headers = ((Message) value).getHeaders(); Map<String, Object> headersMap = (Map<String, Object>) ReflectionUtils .getField(this.headersField, headers); headersMap.put("accept", acceptedContentType); // Set the contentType header to the value of accept for "legacy" reasons. But, do not set the // contentType header to the value of accept if it is a wildcard type, as this doesn't make sense. // This also applies to the else branch below. if (acceptedContentType.isConcrete()) { headersMap.put(MessageHeaders.CONTENT_TYPE, acceptedContentType); } } else { MessageBuilder<Object> builder = MessageBuilder.withPayload(value) .setHeader("accept", acceptedContentType); if (acceptedContentType.isConcrete()) { builder.setHeader(MessageHeaders.CONTENT_TYPE, acceptedContentType); } value = builder.build(); } if (enricher != null) { value = enricher.apply((Message) value); } outputMessage = messageConverter.toMessage(((Message) value).getPayload(), ((Message) value).getHeaders()); return outputMessage; } @SuppressWarnings("rawtypes") private Publisher<?> convertOutputPublisherIfNecessary(Publisher<?> publisher, Function<Message, Message> enricher, String... acceptedOutputMimeTypes) { if (logger.isDebugEnabled()) { logger.debug("Applying type conversion on output Publisher " + publisher); } Publisher<?> result = publisher instanceof Mono ? Mono.from(publisher) .map(value -> this.convertOutputValueIfNecessary(value, enricher, acceptedOutputMimeTypes)) : Flux.from(publisher) .map(value -> this.convertOutputValueIfNecessary(value, enricher, acceptedOutputMimeTypes)); return result; } private Publisher<?> convertInputPublisherIfNecessary(Publisher<?> publisher, Type type) { if (logger.isDebugEnabled()) { logger.debug("Applying type conversion on input Publisher " + publisher); } Publisher<?> result = publisher instanceof Mono ? Mono.from(publisher).map(value -> this.convertInputValueIfNecessary(value, type)) : Flux.from(publisher).map(value -> this.convertInputValueIfNecessary(value, type)); return result; } private Object convertInputValueIfNecessary(Object value, Type type) { if (logger.isDebugEnabled()) { logger.debug("Applying type conversion on input value " + value); logger.debug("Function type: " + this.functionType); } Object convertedValue = value; if (FunctionTypeUtils.isMultipleArgumentsHolder(value)) { int inputCount = FunctionTypeUtils.getInputCount(functionType); Object[] convertedInputArray = new Object[inputCount]; for (int i = 0; i < inputCount; i++) { Expression parsed = new SpelExpressionParser().parseExpression("getT" + (i + 1) + "()"); Object inptArgument = parsed.getValue(value); inptArgument = inptArgument instanceof Publisher ? this.convertInputPublisherIfNecessary((Publisher<?>) inptArgument, FunctionTypeUtils.getInputType(functionType, i)) : this.convertInputValueIfNecessary(inptArgument, FunctionTypeUtils.getInputType(functionType, i)); convertedInputArray[i] = inptArgument; } convertedValue = Tuples.fromArray(convertedInputArray); } else { // this needs revisiting as the type is not always Class (think really complex types) Type rawType = FunctionTypeUtils.unwrapActualTypeByIndex(type, 0); if (logger.isDebugEnabled()) { logger.debug("Raw type of value: " + value + "is " + rawType); } if (rawType instanceof ParameterizedType) { rawType = ((ParameterizedType) rawType).getRawType(); } if (value instanceof Message<?>) { // see AWS adapter with Optional payload if (messageNeedsConversion(rawType, (Message<?>) value)) { convertedValue = FunctionTypeUtils.isTypeCollection(type) ? messageConverter.fromMessage((Message<?>) value, (Class<?>) rawType, type) : messageConverter.fromMessage((Message<?>) value, (Class<?>) rawType); if (logger.isDebugEnabled()) { logger.debug("Converted from Message: " + convertedValue); } if (FunctionTypeUtils.isMessage(type) || ((Message<?>) value).getHeaders().containsKey("scf-func-name")) { convertedValue = MessageBuilder.withPayload(convertedValue) .copyHeaders(((Message<?>) value).getHeaders()).build(); } } else if (!FunctionTypeUtils.isMessage(type)) { convertedValue = ((Message<?>) convertedValue).getPayload(); } } else if (rawType instanceof Class<?>) { // see AWS adapter with WildardTypeImpl and Azure with Voids if (this.isJson(value)) { convertedValue = messageConverter .fromMessage(new GenericMessage<Object>(value), (Class<?>) rawType); } else { try { convertedValue = conversionService.convert(value, (Class<?>) rawType); } catch (Exception e) { if (value instanceof String || value instanceof byte[]) { convertedValue = messageConverter .fromMessage(new GenericMessage<Object>(value), (Class<?>) rawType); } } } } } if (logger.isDebugEnabled()) { logger.debug("Converted input value " + convertedValue); } if (convertedValue == null) { throw new MessageConversionException(COULD_NOT_CONVERT_INPUT); } return convertedValue; } private boolean isJson(Object value) { String v = value instanceof byte[] ? new String((byte[]) value, StandardCharsets.UTF_8) : (value instanceof String ? (String) value : null); if (v != null && JsonMapper.isJsonString(v)) { return true; } return false; } private boolean messageNeedsConversion(Type rawType, Message<?> message) { Boolean skipConversion = message.getHeaders().containsKey(FunctionProperties.SKIP_CONVERSION_HEADER) ? message.getHeaders().get(FunctionProperties.SKIP_CONVERSION_HEADER, Boolean.class) : false; if (skipConversion) { return false; } return rawType instanceof Class<?> && !(message.getPayload() instanceof Optional) && !(message.getPayload().getClass().isAssignableFrom(((Class<?>) rawType))); } } }
Fix Message unwrapping condition
spring-cloud-function-context/src/main/java/org/springframework/cloud/function/context/catalog/SimpleFunctionRegistry.java
Fix Message unwrapping condition
<ide><path>pring-cloud-function-context/src/main/java/org/springframework/cloud/function/context/catalog/SimpleFunctionRegistry.java <ide> import reactor.core.publisher.Flux; <ide> import reactor.core.publisher.Mono; <ide> import reactor.util.function.Tuples; <del> <ide> <ide> import org.springframework.aop.framework.ProxyFactory; <ide> import org.springframework.aop.support.AopUtils; <ide> import org.springframework.util.StringUtils; <ide> <ide> <add> <add> <ide> /** <ide> * <ide> * Basic implementation of FunctionRegistry which maintains the cache of registered functions while <ide> private Object invokeFunction(Object input) { <ide> Message incomingMessage = null; <ide> if (!this.functionDefinition.startsWith(RoutingFunction.FUNCTION_NAME)) { <del> if (input instanceof Message && !FunctionTypeUtils.isMessage(FunctionTypeUtils.getInputType(functionType, 0))) { <add> if (input instanceof Message <add> && !FunctionTypeUtils.isMessage(FunctionTypeUtils.getInputType(functionType, 0)) <add> && ((Message) input).getHeaders().containsKey("scf-func-name")) { <ide> incomingMessage = (Message) input; <ide> input = incomingMessage.getPayload(); <ide> }
Java
mit
3dcf051a3cbff42bbf99c2b21a7e7980b4895772
0
CS2103AUG2016-F11-C2/main
package guitests; import org.junit.Test; import guitests.guihandles.TaskCardHandle; import seedu.cmdo.testutil.TestTask; import seedu.cmdo.testutil.TestUtil; import static org.junit.Assert.assertTrue; import static seedu.cmdo.logic.commands.DoneCommand.MESSAGE_DONE_TASK_SUCCESS; /* * @@author A0141128R tested and passed */ public class DoneCommandTest extends ToDoListGuiTest { @Test public void done() { TestTask[] currentList = td.getTypicalTasks(); //done the first task in the list int targetIndex = 1; assertdoneSuccess(targetIndex, currentList); currentList = updateList(targetIndex, currentList); //done a task that is the last in the list targetIndex = currentList.length; assertdoneSuccess(targetIndex, currentList); currentList = updateList(targetIndex, currentList); //done task from the middle of the list targetIndex = currentList.length/2; assertdoneSuccess(targetIndex, currentList); currentList = updateList(targetIndex, currentList); //invalid index runDoneCommand(currentList.length + 1); assertResultMessage("The task index provided is invalid"); } //run done command private void runDoneCommand(int targetIndex){ commandBox.runCommand("done " + targetIndex); } //update list private TestTask[] updateList(int targetIndex, TestTask... currentList){ return TestUtil.removeTaskFromList(currentList, targetIndex); } //confirm the list now contains all previous tasks except the deleted task private void compareList(TestTask[] expectedRemainder){ assertTrue(taskListPanel.isListMatching(expectedRemainder)); } /** * Runs the done command to change the task done status at specified index and confirms the result is correct. * @param targetIndexOneIndexed e.g. to done the first task in the list, 1 should be given as the target index. * @param currentList A copy of the current list of tasks (before done). */ private void assertdoneSuccess(int targetIndexOneIndexed, final TestTask[] currentList) { TestTask taskToDone = currentList[targetIndexOneIndexed-1]; //-1 because array uses zero indexing runDoneCommand(targetIndexOneIndexed); TestTask[] expectedRemainder = updateList(targetIndexOneIndexed, currentList); //confirm the list now contains all previous tasks except the done task compareList(expectedRemainder); //confirm the result message is correct assertResultMessage(String.format(MESSAGE_DONE_TASK_SUCCESS, taskToDone)); } }
src/test/java/guitests/DoneCommandTest.java
package guitests; import org.junit.Test; import guitests.guihandles.TaskCardHandle; import seedu.cmdo.testutil.TestTask; import seedu.cmdo.testutil.TestUtil; import static org.junit.Assert.assertTrue; import static seedu.cmdo.logic.commands.DoneCommand.MESSAGE_DONE_TASK_SUCCESS; /* * @@author A0141128R tested and passed */ public class DoneCommandTest extends ToDoListGuiTest { @Test public void done() { TestTask[] currentList = td.getTypicalTasks(); //done the first task in the list int targetIndex = 1; assertdoneSuccess(targetIndex, currentList); currentList = updateList(targetIndex, currentList); //done a task that is the last in the list targetIndex = currentList.length; assertdoneSuccess(targetIndex, currentList); currentList = updateList(targetIndex, currentList); //done task from the middle of the list targetIndex = currentList.length/2; assertdoneSuccess(targetIndex, currentList); currentList = updateList(targetIndex, currentList); //invalid index runDoneCommand(currentList.length + 1); assertResultMessage("The task index provided is invalid"); } //run done command private void runDoneCommand(int targetIndex){ commandBox.runCommand("done " + targetIndex); } //update list private TestTask[] updateList(int targetIndex, TestTask... currentList){ return TestUtil.removeTaskFromList(currentList, targetIndex); } /** * Runs the done command to change the task done status at specified index and confirms the result is correct. * @param targetIndexOneIndexed e.g. to done the first task in the list, 1 should be given as the target index. * @param currentList A copy of the current list of tasks (before done). */ private void assertdoneSuccess(int targetIndexOneIndexed, final TestTask[] currentList) { TestTask taskToDone = currentList[targetIndexOneIndexed-1]; //-1 because array uses zero indexing runDoneCommand(targetIndexOneIndexed); TestTask[] expectedRemainder = updateList(targetIndexOneIndexed, currentList); //confirm the list now contains all previous tasks except the done task assertTrue(taskListPanel.isListMatching(expectedRemainder)); //confirm the result message is correct assertResultMessage(String.format(MESSAGE_DONE_TASK_SUCCESS, taskToDone)); } }
slap donecommandtest
src/test/java/guitests/DoneCommandTest.java
slap donecommandtest
<ide><path>rc/test/java/guitests/DoneCommandTest.java <ide> private TestTask[] updateList(int targetIndex, TestTask... currentList){ <ide> return TestUtil.removeTaskFromList(currentList, targetIndex); <ide> } <add> <add> //confirm the list now contains all previous tasks except the deleted task <add> private void compareList(TestTask[] expectedRemainder){ <add> assertTrue(taskListPanel.isListMatching(expectedRemainder)); <add> } <add> <ide> <ide> /** <ide> * Runs the done command to change the task done status at specified index and confirms the result is correct. <ide> TestTask[] expectedRemainder = updateList(targetIndexOneIndexed, currentList); <ide> <ide> //confirm the list now contains all previous tasks except the done task <del> assertTrue(taskListPanel.isListMatching(expectedRemainder)); <add> compareList(expectedRemainder); <ide> <ide> //confirm the result message is correct <ide> assertResultMessage(String.format(MESSAGE_DONE_TASK_SUCCESS, taskToDone));
Java
apache-2.0
a670f6705e3f6cb825a58fc4ee73ee31348830ea
0
aglne/oryx,ashokpant/oryx,rahuldhote/oryx,glenioborges/oryx,elkingtonmcb/oryx,heshm/oryx
/* * Copyright (c) 2013, Cloudera, Inc. All Rights Reserved. * * Cloudera, Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the * License. */ package com.cloudera.oryx.als.serving; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.Reader; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.primitives.Doubles; import com.google.common.primitives.Floats; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.commons.math3.util.FastMath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.cloudera.oryx.als.common.IDValue; import com.cloudera.oryx.als.common.NoSuchItemException; import com.cloudera.oryx.als.common.NoSuchUserException; import com.cloudera.oryx.als.common.NotReadyException; import com.cloudera.oryx.als.common.NumericIDValue; import com.cloudera.oryx.als.common.OryxRecommender; import com.cloudera.oryx.als.common.StringLongMapping; import com.cloudera.oryx.als.common.TopN; import com.cloudera.oryx.als.common.candidate.CandidateFilter; import com.cloudera.oryx.als.common.rescorer.PairRescorer; import com.cloudera.oryx.als.common.rescorer.Rescorer; import com.cloudera.oryx.als.serving.generation.ALSGenerationManager; import com.cloudera.oryx.als.serving.generation.Generation; import com.cloudera.oryx.common.LangUtils; import com.cloudera.oryx.common.ReloadingReference; import com.cloudera.oryx.common.collection.LongFloatMap; import com.cloudera.oryx.common.collection.LongObjectMap; import com.cloudera.oryx.common.collection.LongSet; import com.cloudera.oryx.common.iterator.FileLineIterable; import com.cloudera.oryx.common.iterator.LongPrimitiveIterator; import com.cloudera.oryx.common.math.Solver; import com.cloudera.oryx.common.math.SimpleVectorMath; import com.cloudera.oryx.common.io.IOUtils; import com.cloudera.oryx.common.io.DelimitedDataUtils; import com.cloudera.oryx.common.parallel.ExecutorUtils; /** * <p>The core implementation of {@link OryxRecommender} that lies inside the Serving Layer.</p> * * @author Sean Owen */ public final class ServerRecommender implements OryxRecommender, Closeable { private static final Logger log = LoggerFactory.getLogger(ServerRecommender.class); private final ALSGenerationManager generationManager; private final int numCores; private final ReloadingReference<ExecutorService> executor; public ServerRecommender(File localInputDir) throws IOException { Preconditions.checkNotNull(localInputDir, "No local dir"); numCores = ExecutorUtils.getParallelism(); executor = new ReloadingReference<ExecutorService>(new Callable<ExecutorService>() { @Override public ExecutorService call() { return Executors.newFixedThreadPool( 2 * numCores, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("ServerRecommender-%d").build()); } }); this.generationManager = new ALSGenerationManager(localInputDir); this.generationManager.refresh(); } @Override public void refresh() { generationManager.refresh(); } @Override public void ingest(File file) throws IOException { Reader reader = null; try { reader = IOUtils.openReaderMaybeDecompressing(file); ingest(reader); } finally { if (reader != null) { reader.close(); } } } @Override public void ingest(Reader reader) { for (CharSequence line : new FileLineIterable(reader)) { String[] columns = DelimitedDataUtils.decode(line); if (columns.length < 2) { throw new IllegalArgumentException("Bad line: [" + line + "]"); } String userID = columns[0]; String itemID = columns[1]; if (columns.length > 2) { String valueToken = columns[2]; float value = valueToken.isEmpty() ? Float.NaN : LangUtils.parseFloat(valueToken); if (Float.isNaN(value)) { removePreference(userID, itemID); } else { setPreference(userID, itemID, value); } } else { setPreference(userID, itemID); } } } @Override public void close() { generationManager.close(); ExecutorService executorService = executor.maybeGet(); if (executorService != null) { ExecutorUtils.shutdownNowAndAwait(executorService); } } /** * @throws NotReadyException if {@link ALSGenerationManager#getCurrentGeneration()} returns null */ private Generation getCurrentGeneration() throws NotReadyException { Generation generation = generationManager.getCurrentGeneration(); if (generation == null) { throw new NotReadyException(); } return generation; } /** * Like {@link #recommend(String, int, Rescorer)} but supplies no rescorer. */ @Override public List<IDValue> recommend(String userID, int howMany) throws NoSuchUserException, NotReadyException { return recommend(userID, howMany, null); } /** * Like {@link #recommend(String, int, boolean, Rescorer)} and specifies to not consider known items. */ @Override public List<IDValue> recommend(String userID, int howMany, Rescorer rescorer) throws NoSuchUserException, NotReadyException { return recommend(userID, howMany, false, rescorer); } /** * @param userID user for which recommendations are to be computed * @param howMany desired number of recommendations * @param considerKnownItems if true, items that the user is already associated to are candidates * for recommendation. Normally this is {@code false}. * @param rescorer rescoring function used to modify association strengths before ranking results * @return {@link List} of recommended {@link IDValue}s, ordered from most strongly recommend to least * @throws NoSuchUserException if the user is not known in the model * @throws NotReadyException if the recommender has no model available yet */ @Override public List<IDValue> recommend(String userID, int howMany, boolean considerKnownItems, Rescorer rescorer) throws NoSuchUserException, NotReadyException { return recommendToMany(new String[] { userID }, howMany, considerKnownItems, rescorer); } @Override public List<IDValue> recommendToMany(String[] userIDs, int howMany, boolean considerKnownItems, Rescorer rescorer) throws NoSuchUserException, NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); Generation generation = getCurrentGeneration(); LongObjectMap<float[]> X = generation.getX(); Lock xLock = generation.getXLock().readLock(); List<float[]> userFeatures = Lists.newArrayListWithCapacity(userIDs.length); xLock.lock(); try { for (String userID : userIDs) { float[] theUserFeatures = X.get(StringLongMapping.toLong(userID)); if (theUserFeatures != null) { userFeatures.add(theUserFeatures); } } } finally { xLock.unlock(); } if (userFeatures.isEmpty()) { throw new NoSuchUserException(Arrays.toString(userIDs)); } LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs == null && !considerKnownItems) { throw new UnsupportedOperationException("Can't ignore known items because no known items available"); } LongSet usersKnownItemIDs = null; if (!considerKnownItems) { Lock knownItemLock = generation.getKnownItemLock().readLock(); knownItemLock.lock(); try { for (String userID : userIDs) { LongSet theKnownItemIDs = knownItemIDs.get(StringLongMapping.toLong(userID)); if (theKnownItemIDs == null) { continue; } if (usersKnownItemIDs == null) { usersKnownItemIDs = theKnownItemIDs; } else { LongPrimitiveIterator it = usersKnownItemIDs.iterator(); while (it.hasNext()) { if (!theKnownItemIDs.contains(it.nextLong())) { it.remove(); } } } if (usersKnownItemIDs.isEmpty()) { break; } } } finally { knownItemLock.unlock(); } } float[][] userFeaturesArray = userFeatures.toArray(new float[userFeatures.size()][]); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { return multithreadedTopN(userFeaturesArray, usersKnownItemIDs, rescorer, howMany, generation.getCandidateFilter()); } finally { yLock.unlock(); } } private List<IDValue> multithreadedTopN(final float[][] userFeatures, final LongSet userKnownItemIDs, final Rescorer rescorer, final int howMany, CandidateFilter candidateFilter) throws NotReadyException { Collection<Iterator<LongObjectMap.MapEntry<float[]>>> candidateIterators = candidateFilter.getCandidateIterator(userFeatures); int numIterators = candidateIterators.size(); int parallelism = FastMath.min(numCores, numIterators); final Queue<NumericIDValue> topN = TopN.initialQueue(howMany); if (parallelism > 1) { ExecutorService executorService = executor.get(); final Iterator<Iterator<LongObjectMap.MapEntry<float[]>>> candidateIteratorsIterator = candidateIterators.iterator(); Collection<Future<Object>> futures = Lists.newArrayList(); for (int i = 0; i < numCores; i++) { futures.add(executorService.submit(new Callable<Object>() { @Override public Void call() throws NotReadyException { float[] queueLeastValue = { Float.NEGATIVE_INFINITY }; while (true) { Iterator<LongObjectMap.MapEntry<float[]>> candidateIterator; synchronized (candidateIteratorsIterator) { if (!candidateIteratorsIterator.hasNext()) { break; } candidateIterator = candidateIteratorsIterator.next(); } Iterator<NumericIDValue> partialIterator = new RecommendIterator(userFeatures, candidateIterator, userKnownItemIDs, rescorer, getCurrentGeneration().getIDMapping()); TopN.selectTopNIntoQueueMultithreaded(topN, queueLeastValue, partialIterator, howMany); } return null; } })); } ExecutorUtils.checkExceptions(futures); } else { for (Iterator<LongObjectMap.MapEntry<float[]>> candidateIterator : candidateIterators) { Iterator<NumericIDValue> partialIterator = new RecommendIterator(userFeatures, candidateIterator, userKnownItemIDs, rescorer, getCurrentGeneration().getIDMapping()); TopN.selectTopNIntoQueue(topN, partialIterator, howMany); } } return translateToStringIDs(TopN.selectTopNFromQueue(topN, howMany)); } private List<IDValue> translateToStringIDs(Collection<NumericIDValue> numericIDValues) throws NotReadyException { StringLongMapping mapping = getCurrentGeneration().getIDMapping(); List<IDValue> translated = Lists.newArrayListWithCapacity(numericIDValues.size()); for (NumericIDValue numericIDValue : numericIDValues) { translated.add(new IDValue(mapping.toString(numericIDValue.getID()), numericIDValue.getValue())); } return translated; } @Override public List<IDValue> recommendToAnonymous(String[] itemIDs, int howMany) throws NotReadyException, NoSuchItemException { return recommendToAnonymous(itemIDs, howMany, null); } @Override public List<IDValue> recommendToAnonymous(String[] itemIDs, float[] values, int howMany) throws NotReadyException, NoSuchItemException { return recommendToAnonymous(itemIDs, values, howMany, null); } @Override public List<IDValue> recommendToAnonymous(String[] itemIDs, int howMany, Rescorer rescorer) throws NotReadyException, NoSuchItemException { return recommendToAnonymous(itemIDs, null, howMany, rescorer); } @Override public List<IDValue> recommendToAnonymous(String[] itemIDs, float[] values, int howMany, Rescorer rescorer) throws NotReadyException, NoSuchItemException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); float[] anonymousUserFeatures = buildAnonymousUserFeatures(itemIDs, values); LongSet userKnownItemIDs = new LongSet(itemIDs.length); for (String itemID : itemIDs) { userKnownItemIDs.add(StringLongMapping.toLong(itemID)); } float[][] anonymousFeaturesAsArray = { anonymousUserFeatures }; Generation generation = getCurrentGeneration(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { return multithreadedTopN(anonymousFeaturesAsArray, userKnownItemIDs, rescorer, howMany, generation.getCandidateFilter()); } finally { yLock.unlock(); } } private float[] buildAnonymousUserFeatures(String[] itemIDs, float[] values) throws NotReadyException, NoSuchItemException { Preconditions.checkArgument(values == null || values.length == itemIDs.length, "Number of values doesn't match number of items"); Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); Solver ytySolver = generation.getYTYSolver(); if (ytySolver == null) { throw new NotReadyException(); } float[] anonymousUserFeatures = null; Lock yLock = generation.getYLock().readLock(); boolean anyItemIDFound = false; for (int j = 0; j < itemIDs.length; j++) { String itemID = itemIDs[j]; float[] itemFeatures; yLock.lock(); try { itemFeatures = Y.get(StringLongMapping.toLong(itemID)); } finally { yLock.unlock(); } if (itemFeatures == null) { continue; } anyItemIDFound = true; double[] userFoldIn = ytySolver.solveFToD(itemFeatures); if (anonymousUserFeatures == null) { anonymousUserFeatures = new float[userFoldIn.length]; } double signedFoldInWeight = foldInWeight(0.0, values == null ? 1.0f : values[j]); if (signedFoldInWeight != 0.0) { for (int i = 0; i < anonymousUserFeatures.length; i++) { anonymousUserFeatures[i] += (float) (signedFoldInWeight * userFoldIn[i]); } } } if (!anyItemIDFound) { throw new NoSuchItemException(Arrays.toString(itemIDs)); } return anonymousUserFeatures; } @Override public List<IDValue> mostPopularItems(int howMany) throws NotReadyException { return mostPopularItems(howMany, null); } @Override public List<IDValue> mostPopularItems(int howMany, Rescorer rescorer) throws NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); Generation generation = getCurrentGeneration(); LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs == null) { throw new UnsupportedOperationException(); } LongFloatMap itemCounts = new LongFloatMap(); Lock knownItemReadLock = generation.getKnownItemLock().readLock(); knownItemReadLock.lock(); try { Lock xReadLock = generation.getXLock().readLock(); xReadLock.lock(); try { for (LongObjectMap.MapEntry<LongSet> entry : generation.getKnownItemIDs().entrySet()) { LongSet itemIDs = entry.getValue(); synchronized (itemIDs) { LongPrimitiveIterator it = itemIDs.iterator(); while (it.hasNext()) { long itemID = it.nextLong(); itemCounts.increment(itemID, 1.0f); } } } } finally { xReadLock.unlock(); } } finally { knownItemReadLock.unlock(); } return translateToStringIDs( TopN.selectTopN(new MostPopularItemsIterator(itemCounts.entrySet().iterator(), rescorer, generation.getIDMapping()), howMany)); } /** * @param userID user ID whose preference is to be estimated * @param itemID item ID to estimate preference for * @return an estimate of the strength of the association between the user and item. These values are the * same as will be returned from {@link #recommend(String, int)}. They are opaque values and have no interpretation * other than that larger means stronger. The values are typically in the range [0,1] but are not guaranteed * to be so. Note that 0 will be returned if the user or item is not known in the data. * @throws NotReadyException if the recommender has no model available yet */ @Override public float estimatePreference(String userID, String itemID) throws NotReadyException { return estimatePreferences(userID, itemID)[0]; } @Override public float[] estimatePreferences(String userID, String... itemIDs) throws NotReadyException { Generation generation = getCurrentGeneration(); LongObjectMap<float[]> X = generation.getX(); float[] userFeatures; Lock xLock = generation.getXLock().readLock(); xLock.lock(); try { userFeatures = X.get(StringLongMapping.toLong(userID)); } finally { xLock.unlock(); } if (userFeatures == null) { return new float[itemIDs.length]; // All 0.0f } LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { float[] result = new float[itemIDs.length]; for (int i = 0; i < itemIDs.length; i++) { String itemID = itemIDs[i]; float[] itemFeatures = Y.get(StringLongMapping.toLong(itemID)); if (itemFeatures != null) { float value = (float) SimpleVectorMath.dot(itemFeatures, userFeatures); Preconditions.checkState(Floats.isFinite(value), "Bad estimate"); result[i] = value; } // else leave value at 0.0f } return result; } finally { yLock.unlock(); } } @Override public float estimateForAnonymous(String toItemID, String[] itemIDs) throws NotReadyException, NoSuchItemException { return estimateForAnonymous(toItemID, itemIDs, null); } @Override public float estimateForAnonymous(String toItemID, String[] itemIDs, float[] values) throws NotReadyException, NoSuchItemException { Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); float[] toItemFeatures; yLock.lock(); try { toItemFeatures = Y.get(StringLongMapping.toLong(toItemID)); } finally { yLock.unlock(); } if (toItemFeatures == null) { throw new NoSuchItemException(toItemID); } float[] anonymousUserFeatures = buildAnonymousUserFeatures(itemIDs, values); return (float) SimpleVectorMath.dot(anonymousUserFeatures, toItemFeatures); } /** * Calls {@link #setPreference(String, String, float)} with value 1.0. */ @Override public void setPreference(String userID, String itemID) { setPreference(userID, itemID, 1.0f); } @Override public void setPreference(String userID, String itemID, float value) { // Record datum try { generationManager.append(userID, itemID, value); } catch (IOException ioe) { log.warn("Could not append datum; continuing", ioe); } Generation generation; try { generation = getCurrentGeneration(); } catch (NotReadyException nre) { // Corner case -- no model ready so all we can do is record (above). Don't fail the request. return; } long longUserID = StringLongMapping.toLong(userID); long longItemID = StringLongMapping.toLong(itemID); float[] userFeatures = getFeatures(longUserID, generation.getX(), generation.getXLock()); boolean newItem; Lock yReadLock = generation.getYLock().readLock(); yReadLock.lock(); try { newItem = generation.getY().get(longItemID) == null; } finally { yReadLock.unlock(); } if (newItem) { generation.getCandidateFilter().addItem(itemID); } float[] itemFeatures = getFeatures(longItemID, generation.getY(), generation.getYLock()); updateFeatures(userFeatures, itemFeatures, value, generation); LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs != null) { LongSet userKnownItemIDs; ReadWriteLock knownItemLock = generation.getKnownItemLock(); Lock knownItemReadLock = knownItemLock.readLock(); knownItemReadLock.lock(); try { userKnownItemIDs = knownItemIDs.get(longUserID); if (userKnownItemIDs == null) { userKnownItemIDs = new LongSet(); Lock knownItemWriteLock = knownItemLock.writeLock(); knownItemReadLock.unlock(); knownItemWriteLock.lock(); try { knownItemIDs.put(longUserID, userKnownItemIDs); } finally { knownItemReadLock.lock(); knownItemWriteLock.unlock(); } } } finally { knownItemReadLock.unlock(); } synchronized (userKnownItemIDs) { userKnownItemIDs.add(longItemID); } } } private static float[] getFeatures(long longID, LongObjectMap<float[]> matrix, ReadWriteLock lock) { float[] features; Lock readLock = lock.readLock(); readLock.lock(); try { features = matrix.get(longID); if (features == null) { int numFeatures = countFeatures(matrix); if (numFeatures > 0) { features = new float[numFeatures]; Lock writeLock = lock.writeLock(); readLock.unlock(); writeLock.lock(); try { matrix.put(longID, features); readLock.lock(); } finally { writeLock.unlock(); } } } } finally { readLock.unlock(); } return features; } private static void updateFeatures(float[] userFeatures, float[] itemFeatures, float value, Generation generation) { if (userFeatures == null || itemFeatures == null) { return; } double signedFoldInWeight = foldInWeight(SimpleVectorMath.dot(userFeatures, itemFeatures), value); if (signedFoldInWeight == 0.0) { return; } // Here, we are using userFeatures, which is a row of X, as if it were a column of X'. // This is multiplied on the left by (X'*X)^-1. That's our left-inverse of X or at least the one // column we need. Which is what the new data point is multiplied on the left by. The result is a column; // we scale to complete the multiplication of the fold-in and add it in. Solver xtxSolver = generation.getXTXSolver(); double[] itemFoldIn = xtxSolver == null ? null : xtxSolver.solveFToD(userFeatures); // Same, but reversed. Multiply itemFeatures, which is a row of Y, on the right by (Y'*Y)^-1. // This is the right-inverse for Y', or at least the row we need. Because of the symmetries we can use // the same method above to carry out the multiply; the result is conceptually a row vector. // The result is scaled and added in. Solver ytySolver = generation.getYTYSolver(); double[] userFoldIn = ytySolver == null ? null : ytySolver.solveFToD(itemFeatures); if (itemFoldIn != null) { for (int i = 0; i < itemFeatures.length; i++) { double delta = signedFoldInWeight * itemFoldIn[i]; Preconditions.checkState(Doubles.isFinite(delta)); itemFeatures[i] += (float) delta; } } if (userFoldIn != null) { for (int i = 0; i < userFeatures.length; i++) { double delta = signedFoldInWeight * userFoldIn[i]; Preconditions.checkState(Doubles.isFinite(delta)); userFeatures[i] += (float) delta; } } } private static int countFeatures(LongObjectMap<float[]> M) { // assumes the read lock is held return M.isEmpty() ? 0 : M.entrySet().iterator().next().getValue().length; } /** * This function decides how much of a folded-in user or item vector should be added to a target item or user * vector, respectively, on a new action. The idea is that a positive value should push the current value towards * 1, but not further, and a negative value should push towards 0, but not further. How much to move should be * mostly proportional to the size of the value. 0 should move the result not at all; 2 ought to move twice as * much as 1, etc. This isn't quite possible but can be approximated by moving a fraction 1-1/(1+value) of the * distance towards 1, or 0. */ private static double foldInWeight(double estimate, float value) { Preconditions.checkState(Doubles.isFinite(estimate)); double signedFoldInWeight; if (value > 0.0f && estimate < 1.0) { double multiplier = 1.0 - FastMath.max(0.0, estimate); signedFoldInWeight = (1.0 - 1.0 / (1.0 + value)) * multiplier; } else if (value < 0.0f && estimate > 0.0) { double multiplier = -FastMath.min(1.0, estimate); signedFoldInWeight = (1.0 - 1.0 / (1.0 - value)) * multiplier; } else { signedFoldInWeight = 0.0; } return signedFoldInWeight; } @Override public void removePreference(String userID, String itemID) { // Record datum try { generationManager.remove(userID, itemID); } catch (IOException ioe) { log.warn("Could not append datum; continuing", ioe); } Generation generation; try { generation = getCurrentGeneration(); } catch (NotReadyException nre) { // Corner case -- no model ready so all we can do is record (above). Don't fail the request. return; } long longUserID = StringLongMapping.toLong(userID); long longItemID = StringLongMapping.toLong(itemID); ReadWriteLock knownItemLock = generation.getKnownItemLock(); boolean removeUser = false; LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs != null) { Lock knownItemReadLock = knownItemLock.readLock(); LongSet userKnownItemIDs; knownItemReadLock.lock(); try { userKnownItemIDs = knownItemIDs.get(longUserID); } finally { knownItemReadLock.unlock(); } if (userKnownItemIDs == null) { // Doesn't exist? So ignore this request return; } synchronized (userKnownItemIDs) { if (!userKnownItemIDs.remove(longItemID)) { // Item unknown, so ignore this request return; } removeUser = userKnownItemIDs.isEmpty(); } } // We can proceed with the request LongObjectMap<float[]> X = generation.getX(); ReadWriteLock xLock = generation.getXLock(); if (removeUser) { Lock knownItemWriteLock = knownItemLock.writeLock(); knownItemWriteLock.lock(); try { knownItemIDs.remove(longUserID); } finally { knownItemWriteLock.unlock(); } Lock xWriteLock = xLock.writeLock(); xWriteLock.lock(); try { X.remove(longUserID); } finally { xWriteLock.unlock(); } } } /** * One-argument version of {@link #mostSimilarItems(String[], int)}. */ @Override public List<IDValue> mostSimilarItems(String itemID, int howMany) throws NoSuchItemException, NotReadyException { return mostSimilarItems(itemID, howMany, null); } /** * One-argument version of {@link #mostSimilarItems(String[], int, PairRescorer)}. */ @Override public List<IDValue> mostSimilarItems(String itemID, int howMany, PairRescorer rescorer) throws NoSuchItemException, NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); long longItemID = StringLongMapping.toLong(itemID); Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { float[] itemFeatures = Y.get(longItemID); if (itemFeatures == null) { throw new NoSuchItemException(itemID); } return translateToStringIDs( TopN.selectTopN(new MostSimilarItemIterator(Y.entrySet().iterator(), new long[]{longItemID}, new float[][]{itemFeatures}, rescorer, generation.getIDMapping()), howMany)); } finally { yLock.unlock(); } } /** * Like {@link #mostSimilarItems(String[], int, PairRescorer)} but uses no rescorer. */ @Override public List<IDValue> mostSimilarItems(String[] itemIDs, int howMany) throws NoSuchItemException, NotReadyException { return mostSimilarItems(itemIDs, howMany, null); } /** * Computes items most similar to an item or items. The returned items have the highest average similarity * to the given items. * * @param itemIDs items for which most similar items are required * @param howMany maximum number of similar items to return; fewer may be returned * @param rescorer rescoring function used to modify item-item similarities before ranking results * @return {@link IDValue}s representing the top recommendations for the user, ordered by quality, * descending. The score associated to it is an opaque value. Larger means more similar, but no further * interpretation may necessarily be applied. * @throws NoSuchItemException if any of the items is not known in the model * @throws NotReadyException if the recommender has no model available yet */ @Override public List<IDValue> mostSimilarItems(String[] itemIDs, int howMany, PairRescorer rescorer) throws NoSuchItemException, NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); long[] longItemIDs = new long[itemIDs.length]; for (int i = 0; i < longItemIDs.length; i++) { longItemIDs[i] = StringLongMapping.toLong(itemIDs[i]); } Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { List<float[]> itemFeatures = Lists.newArrayListWithCapacity(itemIDs.length); for (long longItemID : longItemIDs) { float[] features = Y.get(longItemID); if (features != null) { itemFeatures.add(features); } } if (itemFeatures.isEmpty()) { throw new NoSuchItemException(Arrays.toString(itemIDs)); } float[][] itemFeaturesArray = itemFeatures.toArray(new float[itemFeatures.size()][]); return translateToStringIDs( TopN.selectTopN(new MostSimilarItemIterator(Y.entrySet().iterator(), longItemIDs, itemFeaturesArray, rescorer, generation.getIDMapping()), howMany)); } finally { yLock.unlock(); } } @Override public float[] similarityToItem(String toItemID, String... itemIDs) throws NotReadyException, NoSuchItemException { Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); float[] similarities = new float[itemIDs.length]; Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { float[] toFeatures = Y.get(StringLongMapping.toLong(toItemID)); if (toFeatures == null) { throw new NoSuchItemException(toItemID); } double toFeaturesNorm = SimpleVectorMath.norm(toFeatures); boolean anyFound = false; for (int i = 0; i < similarities.length; i++) { float[] features = Y.get(StringLongMapping.toLong(itemIDs[i])); if (features == null) { similarities[i] = Float.NaN; } else { anyFound = true; double featuresNorm = SimpleVectorMath.norm(features); similarities[i] = (float) (SimpleVectorMath.dot(features, toFeatures) / (featuresNorm * toFeaturesNorm)); } } if (!anyFound) { throw new NoSuchItemException(Arrays.toString(itemIDs)); } } finally { yLock.unlock(); } return similarities; } /** * <p>Lists the items that were most influential in recommending a given item to a given user. Exactly how this * is determined is left to the implementation, but, generally this will return items that the user prefers * and that are similar to the given item.</p> * * <p>These values by which the results are ordered are opaque values and have no interpretation * other than that larger means stronger.</p> * * @param userID ID of user who was recommended the item * @param itemID ID of item that was recommended * @param howMany maximum number of items to return * @return {@link List} of {@link IDValue}, ordered from most influential in recommended the given * item to least * @throws NoSuchUserException if the user is not known in the model * @throws NoSuchItemException if the item is not known in the model * @throws NotReadyException if the recommender has no model available yet */ @Override public List<IDValue> recommendedBecause(String userID, String itemID, int howMany) throws NoSuchUserException, NoSuchItemException, NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); Generation generation = getCurrentGeneration(); LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs == null) { throw new UnsupportedOperationException("No known item IDs available"); } Lock knownItemLock = generation.getKnownItemLock().readLock(); LongSet userKnownItemIDs; knownItemLock.lock(); try { userKnownItemIDs = knownItemIDs.get(StringLongMapping.toLong(userID)); } finally { knownItemLock.unlock(); } if (userKnownItemIDs == null) { throw new NoSuchUserException(userID); } LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { float[] features = Y.get(StringLongMapping.toLong(itemID)); if (features == null) { throw new NoSuchItemException(itemID); } LongObjectMap<float[]> toFeatures; synchronized (userKnownItemIDs) { toFeatures = new LongObjectMap<float[]>(userKnownItemIDs.size()); LongPrimitiveIterator it = userKnownItemIDs.iterator(); while (it.hasNext()) { long fromItemID = it.nextLong(); float[] fromFeatures = Y.get(fromItemID); toFeatures.put(fromItemID, fromFeatures); } } return translateToStringIDs( TopN.selectTopN(new RecommendedBecauseIterator(toFeatures.entrySet().iterator(), features), howMany)); } finally { yLock.unlock(); } } @Override public boolean isReady() { try { getCurrentGeneration(); return true; } catch (NotReadyException ignored) { return false; } } @Override public void await() throws InterruptedException { while (!isReady()) { Thread.sleep(1000L); } } }
als-serving/src/main/java/com/cloudera/oryx/als/serving/ServerRecommender.java
/* * Copyright (c) 2013, Cloudera, Inc. All Rights Reserved. * * Cloudera, Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"). You may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for * the specific language governing permissions and limitations under the * License. */ package com.cloudera.oryx.als.serving; import java.io.Closeable; import java.io.File; import java.io.IOException; import java.io.Reader; import java.util.Arrays; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Queue; import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import com.google.common.primitives.Doubles; import com.google.common.primitives.Floats; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.commons.math3.util.FastMath; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.cloudera.oryx.als.common.IDValue; import com.cloudera.oryx.als.common.NoSuchItemException; import com.cloudera.oryx.als.common.NoSuchUserException; import com.cloudera.oryx.als.common.NotReadyException; import com.cloudera.oryx.als.common.NumericIDValue; import com.cloudera.oryx.als.common.OryxRecommender; import com.cloudera.oryx.als.common.StringLongMapping; import com.cloudera.oryx.als.common.TopN; import com.cloudera.oryx.als.common.candidate.CandidateFilter; import com.cloudera.oryx.als.common.rescorer.PairRescorer; import com.cloudera.oryx.als.common.rescorer.Rescorer; import com.cloudera.oryx.als.serving.generation.ALSGenerationManager; import com.cloudera.oryx.als.serving.generation.Generation; import com.cloudera.oryx.common.LangUtils; import com.cloudera.oryx.common.ReloadingReference; import com.cloudera.oryx.common.collection.LongFloatMap; import com.cloudera.oryx.common.collection.LongObjectMap; import com.cloudera.oryx.common.collection.LongSet; import com.cloudera.oryx.common.iterator.FileLineIterable; import com.cloudera.oryx.common.iterator.LongPrimitiveIterator; import com.cloudera.oryx.common.math.Solver; import com.cloudera.oryx.common.math.SimpleVectorMath; import com.cloudera.oryx.common.io.IOUtils; import com.cloudera.oryx.common.io.DelimitedDataUtils; import com.cloudera.oryx.common.parallel.ExecutorUtils; /** * <p>The core implementation of {@link OryxRecommender} that lies inside the Serving Layer.</p> * * @author Sean Owen */ public final class ServerRecommender implements OryxRecommender, Closeable { private static final Logger log = LoggerFactory.getLogger(ServerRecommender.class); private final ALSGenerationManager generationManager; private final int numCores; private final ReloadingReference<ExecutorService> executor; public ServerRecommender(File localInputDir) throws IOException { Preconditions.checkNotNull(localInputDir, "No local dir"); numCores = ExecutorUtils.getParallelism(); executor = new ReloadingReference<ExecutorService>(new Callable<ExecutorService>() { @Override public ExecutorService call() { return Executors.newFixedThreadPool( 2 * numCores, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("ServerRecommender-%d").build()); } }); this.generationManager = new ALSGenerationManager(localInputDir); this.generationManager.refresh(); } @Override public void refresh() { generationManager.refresh(); } @Override public void ingest(File file) throws IOException { Reader reader = null; try { reader = IOUtils.openReaderMaybeDecompressing(file); ingest(reader); } finally { if (reader != null) { reader.close(); } } } @Override public void ingest(Reader reader) { for (CharSequence line : new FileLineIterable(reader)) { String[] columns = DelimitedDataUtils.decode(line); if (columns.length < 2) { throw new IllegalArgumentException("Bad line: [" + line + "]"); } String userID = columns[0]; String itemID = columns[1]; if (columns.length > 2) { String valueToken = columns[2]; float value = valueToken.isEmpty() ? Float.NaN : LangUtils.parseFloat(valueToken); if (Float.isNaN(value)) { removePreference(userID, itemID); } else { setPreference(userID, itemID, value); } } else { setPreference(userID, itemID); } } } @Override public void close() { generationManager.close(); ExecutorService executorService = executor.maybeGet(); if (executorService != null) { ExecutorUtils.shutdownNowAndAwait(executorService); } } /** * @throws NotReadyException if {@link ALSGenerationManager#getCurrentGeneration()} returns null */ private Generation getCurrentGeneration() throws NotReadyException { Generation generation = generationManager.getCurrentGeneration(); if (generation == null) { throw new NotReadyException(); } return generation; } /** * Like {@link #recommend(String, int, Rescorer)} but supplies no rescorer. */ @Override public List<IDValue> recommend(String userID, int howMany) throws NoSuchUserException, NotReadyException { return recommend(userID, howMany, null); } /** * Like {@link #recommend(String, int, boolean, Rescorer)} and specifies to not consider known items. */ @Override public List<IDValue> recommend(String userID, int howMany, Rescorer rescorer) throws NoSuchUserException, NotReadyException { return recommend(userID, howMany, false, rescorer); } /** * @param userID user for which recommendations are to be computed * @param howMany desired number of recommendations * @param considerKnownItems if true, items that the user is already associated to are candidates * for recommendation. Normally this is {@code false}. * @param rescorer rescoring function used to modify association strengths before ranking results * @return {@link List} of recommended {@link IDValue}s, ordered from most strongly recommend to least * @throws NoSuchUserException if the user is not known in the model * @throws NotReadyException if the recommender has no model available yet */ @Override public List<IDValue> recommend(String userID, int howMany, boolean considerKnownItems, Rescorer rescorer) throws NoSuchUserException, NotReadyException { return recommendToMany(new String[] { userID }, howMany, considerKnownItems, rescorer); } @Override public List<IDValue> recommendToMany(String[] userIDs, int howMany, boolean considerKnownItems, Rescorer rescorer) throws NoSuchUserException, NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); Generation generation = getCurrentGeneration(); LongObjectMap<float[]> X = generation.getX(); Lock xLock = generation.getXLock().readLock(); List<float[]> userFeatures = Lists.newArrayListWithCapacity(userIDs.length); xLock.lock(); try { for (String userID : userIDs) { float[] theUserFeatures = X.get(StringLongMapping.toLong(userID)); if (theUserFeatures != null) { userFeatures.add(theUserFeatures); } } } finally { xLock.unlock(); } if (userFeatures.isEmpty()) { throw new NoSuchUserException(Arrays.toString(userIDs)); } LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs == null && !considerKnownItems) { throw new UnsupportedOperationException("Can't ignore known items because no known items available"); } LongSet usersKnownItemIDs = null; if (!considerKnownItems) { Lock knownItemLock = generation.getKnownItemLock().readLock(); knownItemLock.lock(); try { for (String userID : userIDs) { LongSet theKnownItemIDs = knownItemIDs.get(StringLongMapping.toLong(userID)); if (theKnownItemIDs == null) { continue; } if (usersKnownItemIDs == null) { usersKnownItemIDs = theKnownItemIDs; } else { LongPrimitiveIterator it = usersKnownItemIDs.iterator(); while (it.hasNext()) { if (!theKnownItemIDs.contains(it.nextLong())) { it.remove(); } } } if (usersKnownItemIDs.isEmpty()) { break; } } } finally { knownItemLock.unlock(); } } float[][] userFeaturesArray = userFeatures.toArray(new float[userFeatures.size()][]); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { return multithreadedTopN(userFeaturesArray, usersKnownItemIDs, rescorer, howMany, generation.getCandidateFilter()); } finally { yLock.unlock(); } } private List<IDValue> multithreadedTopN(final float[][] userFeatures, final LongSet userKnownItemIDs, final Rescorer rescorer, final int howMany, CandidateFilter candidateFilter) throws NotReadyException { Collection<Iterator<LongObjectMap.MapEntry<float[]>>> candidateIterators = candidateFilter.getCandidateIterator(userFeatures); int numIterators = candidateIterators.size(); int parallelism = FastMath.min(numCores, numIterators); final Queue<NumericIDValue> topN = TopN.initialQueue(howMany); if (parallelism > 1) { ExecutorService executorService = executor.get(); final Iterator<Iterator<LongObjectMap.MapEntry<float[]>>> candidateIteratorsIterator = candidateIterators.iterator(); Collection<Future<Object>> futures = Lists.newArrayList(); for (int i = 0; i < numCores; i++) { futures.add(executorService.submit(new Callable<Object>() { @Override public Void call() throws NotReadyException { float[] queueLeastValue = { Float.NEGATIVE_INFINITY }; while (true) { Iterator<LongObjectMap.MapEntry<float[]>> candidateIterator; synchronized (candidateIteratorsIterator) { if (!candidateIteratorsIterator.hasNext()) { break; } candidateIterator = candidateIteratorsIterator.next(); } Iterator<NumericIDValue> partialIterator = new RecommendIterator(userFeatures, candidateIterator, userKnownItemIDs, rescorer, getCurrentGeneration().getIDMapping()); TopN.selectTopNIntoQueueMultithreaded(topN, queueLeastValue, partialIterator, howMany); } return null; } })); } ExecutorUtils.checkExceptions(futures); } else { for (Iterator<LongObjectMap.MapEntry<float[]>> candidateIterator : candidateIterators) { Iterator<NumericIDValue> partialIterator = new RecommendIterator(userFeatures, candidateIterator, userKnownItemIDs, rescorer, getCurrentGeneration().getIDMapping()); TopN.selectTopNIntoQueue(topN, partialIterator, howMany); } } return translateToStringIDs(TopN.selectTopNFromQueue(topN, howMany)); } private List<IDValue> translateToStringIDs(Collection<NumericIDValue> numericIDValues) throws NotReadyException { StringLongMapping mapping = getCurrentGeneration().getIDMapping(); List<IDValue> translated = Lists.newArrayListWithCapacity(numericIDValues.size()); for (NumericIDValue numericIDValue : numericIDValues) { translated.add(new IDValue(mapping.toString(numericIDValue.getID()), numericIDValue.getValue())); } return translated; } @Override public List<IDValue> recommendToAnonymous(String[] itemIDs, int howMany) throws NotReadyException, NoSuchItemException { return recommendToAnonymous(itemIDs, howMany, null); } @Override public List<IDValue> recommendToAnonymous(String[] itemIDs, float[] values, int howMany) throws NotReadyException, NoSuchItemException { return recommendToAnonymous(itemIDs, values, howMany, null); } @Override public List<IDValue> recommendToAnonymous(String[] itemIDs, int howMany, Rescorer rescorer) throws NotReadyException, NoSuchItemException { return recommendToAnonymous(itemIDs, null, howMany, rescorer); } @Override public List<IDValue> recommendToAnonymous(String[] itemIDs, float[] values, int howMany, Rescorer rescorer) throws NotReadyException, NoSuchItemException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); float[] anonymousUserFeatures = buildAnonymousUserFeatures(itemIDs, values); LongSet userKnownItemIDs = new LongSet(itemIDs.length); for (String itemID : itemIDs) { userKnownItemIDs.add(StringLongMapping.toLong(itemID)); } float[][] anonymousFeaturesAsArray = { anonymousUserFeatures }; Generation generation = getCurrentGeneration(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { return multithreadedTopN(anonymousFeaturesAsArray, userKnownItemIDs, rescorer, howMany, generation.getCandidateFilter()); } finally { yLock.unlock(); } } private float[] buildAnonymousUserFeatures(String[] itemIDs, float[] values) throws NotReadyException, NoSuchItemException { Preconditions.checkArgument(values == null || values.length == itemIDs.length, "Number of values doesn't match number of items"); Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); Solver ytySolver = generation.getYTYSolver(); if (ytySolver == null) { throw new NotReadyException(); } float[] anonymousUserFeatures = null; Lock yLock = generation.getYLock().readLock(); boolean anyItemIDFound = false; for (int j = 0; j < itemIDs.length; j++) { String itemID = itemIDs[j]; float[] itemFeatures; yLock.lock(); try { itemFeatures = Y.get(StringLongMapping.toLong(itemID)); } finally { yLock.unlock(); } if (itemFeatures == null) { continue; } anyItemIDFound = true; double[] userFoldIn = ytySolver.solveFToD(itemFeatures); if (anonymousUserFeatures == null) { anonymousUserFeatures = new float[userFoldIn.length]; } double signedFoldInWeight = foldInWeight(0.0, values == null ? 1.0f : values[j]); if (signedFoldInWeight != 0.0) { for (int i = 0; i < anonymousUserFeatures.length; i++) { anonymousUserFeatures[i] += (float) (signedFoldInWeight * userFoldIn[i]); } } } if (!anyItemIDFound) { throw new NoSuchItemException(Arrays.toString(itemIDs)); } return anonymousUserFeatures; } @Override public List<IDValue> mostPopularItems(int howMany) throws NotReadyException { return mostPopularItems(howMany, null); } @Override public List<IDValue> mostPopularItems(int howMany, Rescorer rescorer) throws NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); Generation generation = getCurrentGeneration(); LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs == null) { throw new UnsupportedOperationException(); } LongFloatMap itemCounts = new LongFloatMap(); Lock knownItemReadLock = generation.getKnownItemLock().readLock(); knownItemReadLock.lock(); try { Lock xReadLock = generation.getXLock().readLock(); xReadLock.lock(); try { for (LongObjectMap.MapEntry<LongSet> entry : generation.getKnownItemIDs().entrySet()) { LongSet itemIDs = entry.getValue(); synchronized (itemIDs) { LongPrimitiveIterator it = itemIDs.iterator(); while (it.hasNext()) { long itemID = it.nextLong(); itemCounts.increment(itemID, 1.0f); } } } } finally { xReadLock.unlock(); } } finally { knownItemReadLock.unlock(); } return translateToStringIDs( TopN.selectTopN(new MostPopularItemsIterator(itemCounts.entrySet().iterator(), rescorer, generation.getIDMapping()), howMany)); } /** * @param userID user ID whose preference is to be estimated * @param itemID item ID to estimate preference for * @return an estimate of the strength of the association between the user and item. These values are the * same as will be returned from {@link #recommend(String, int)}. They are opaque values and have no interpretation * other than that larger means stronger. The values are typically in the range [0,1] but are not guaranteed * to be so. Note that 0 will be returned if the user or item is not known in the data. * @throws NotReadyException if the recommender has no model available yet */ @Override public float estimatePreference(String userID, String itemID) throws NotReadyException { return estimatePreferences(userID, itemID)[0]; } @Override public float[] estimatePreferences(String userID, String... itemIDs) throws NotReadyException { Generation generation = getCurrentGeneration(); LongObjectMap<float[]> X = generation.getX(); float[] userFeatures; Lock xLock = generation.getXLock().readLock(); xLock.lock(); try { userFeatures = X.get(StringLongMapping.toLong(userID)); } finally { xLock.unlock(); } if (userFeatures == null) { return new float[itemIDs.length]; // All 0.0f } LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { float[] result = new float[itemIDs.length]; for (int i = 0; i < itemIDs.length; i++) { String itemID = itemIDs[i]; float[] itemFeatures = Y.get(StringLongMapping.toLong(itemID)); if (itemFeatures != null) { float value = (float) SimpleVectorMath.dot(itemFeatures, userFeatures); Preconditions.checkState(Floats.isFinite(value), "Bad estimate"); result[i] = value; } // else leave value at 0.0f } return result; } finally { yLock.unlock(); } } @Override public float estimateForAnonymous(String toItemID, String[] itemIDs) throws NotReadyException, NoSuchItemException { return estimateForAnonymous(toItemID, itemIDs, null); } @Override public float estimateForAnonymous(String toItemID, String[] itemIDs, float[] values) throws NotReadyException, NoSuchItemException { Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); float[] toItemFeatures; yLock.lock(); try { toItemFeatures = Y.get(StringLongMapping.toLong(toItemID)); } finally { yLock.unlock(); } if (toItemFeatures == null) { throw new NoSuchItemException(toItemID); } float[] anonymousUserFeatures = buildAnonymousUserFeatures(itemIDs, values); return (float) SimpleVectorMath.dot(anonymousUserFeatures, toItemFeatures); } /** * Calls {@link #setPreference(String, String, float)} with value 1.0. */ @Override public void setPreference(String userID, String itemID) { setPreference(userID, itemID, 1.0f); } @Override public void setPreference(String userID, String itemID, float value) { // Record datum try { generationManager.append(userID, itemID, value); } catch (IOException ioe) { log.warn("Could not append datum; continuing", ioe); } Generation generation; try { generation = getCurrentGeneration(); } catch (NotReadyException nre) { // Corner case -- no model ready so all we can do is record (above). Don't fail the request. return; } long longUserID = StringLongMapping.toLong(userID); long longItemID = StringLongMapping.toLong(itemID); float[] userFeatures = getFeatures(longUserID, generation.getX(), generation.getXLock()); boolean newItem; Lock yReadLock = generation.getYLock().readLock(); yReadLock.lock(); try { newItem = generation.getY().get(longItemID) == null; } finally { yReadLock.unlock(); } if (newItem) { generation.getCandidateFilter().addItem(itemID); } float[] itemFeatures = getFeatures(longItemID, generation.getY(), generation.getYLock()); updateFeatures(userFeatures, itemFeatures, value, generation); LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs != null) { LongSet userKnownItemIDs; ReadWriteLock knownItemLock = generation.getKnownItemLock(); Lock knownItemReadLock = knownItemLock.readLock(); knownItemReadLock.lock(); try { userKnownItemIDs = knownItemIDs.get(longUserID); if (userKnownItemIDs == null) { userKnownItemIDs = new LongSet(); Lock knownItemWriteLock = knownItemLock.writeLock(); knownItemReadLock.unlock(); knownItemWriteLock.lock(); try { knownItemIDs.put(longUserID, userKnownItemIDs); } finally { knownItemReadLock.lock(); knownItemWriteLock.unlock(); } } } finally { knownItemReadLock.unlock(); } synchronized (userKnownItemIDs) { userKnownItemIDs.add(longItemID); } } } private static float[] getFeatures(long longID, LongObjectMap<float[]> matrix, ReadWriteLock lock) { float[] features; Lock readLock = lock.readLock(); readLock.lock(); try { features = matrix.get(longID); if (features == null) { int numFeatures = countFeatures(matrix); if (numFeatures > 0) { features = new float[numFeatures]; Lock writeLock = lock.writeLock(); readLock.unlock(); writeLock.lock(); try { matrix.put(longID, features); } finally { readLock.lock(); writeLock.unlock(); } } } } finally { readLock.unlock(); } return features; } private static void updateFeatures(float[] userFeatures, float[] itemFeatures, float value, Generation generation) { if (userFeatures == null || itemFeatures == null) { return; } double signedFoldInWeight = foldInWeight(SimpleVectorMath.dot(userFeatures, itemFeatures), value); if (signedFoldInWeight == 0.0) { return; } // Here, we are using userFeatures, which is a row of X, as if it were a column of X'. // This is multiplied on the left by (X'*X)^-1. That's our left-inverse of X or at least the one // column we need. Which is what the new data point is multiplied on the left by. The result is a column; // we scale to complete the multiplication of the fold-in and add it in. Solver xtxSolver = generation.getXTXSolver(); double[] itemFoldIn = xtxSolver == null ? null : xtxSolver.solveFToD(userFeatures); // Same, but reversed. Multiply itemFeatures, which is a row of Y, on the right by (Y'*Y)^-1. // This is the right-inverse for Y', or at least the row we need. Because of the symmetries we can use // the same method above to carry out the multiply; the result is conceptually a row vector. // The result is scaled and added in. Solver ytySolver = generation.getYTYSolver(); double[] userFoldIn = ytySolver == null ? null : ytySolver.solveFToD(itemFeatures); if (itemFoldIn != null) { for (int i = 0; i < itemFeatures.length; i++) { double delta = signedFoldInWeight * itemFoldIn[i]; Preconditions.checkState(Doubles.isFinite(delta)); itemFeatures[i] += (float) delta; } } if (userFoldIn != null) { for (int i = 0; i < userFeatures.length; i++) { double delta = signedFoldInWeight * userFoldIn[i]; Preconditions.checkState(Doubles.isFinite(delta)); userFeatures[i] += (float) delta; } } } private static int countFeatures(LongObjectMap<float[]> M) { // assumes the read lock is held return M.isEmpty() ? 0 : M.entrySet().iterator().next().getValue().length; } /** * This function decides how much of a folded-in user or item vector should be added to a target item or user * vector, respectively, on a new action. The idea is that a positive value should push the current value towards * 1, but not further, and a negative value should push towards 0, but not further. How much to move should be * mostly proportional to the size of the value. 0 should move the result not at all; 2 ought to move twice as * much as 1, etc. This isn't quite possible but can be approximated by moving a fraction 1-1/(1+value) of the * distance towards 1, or 0. */ private static double foldInWeight(double estimate, float value) { Preconditions.checkState(Doubles.isFinite(estimate)); double signedFoldInWeight; if (value > 0.0f && estimate < 1.0) { double multiplier = 1.0 - FastMath.max(0.0, estimate); signedFoldInWeight = (1.0 - 1.0 / (1.0 + value)) * multiplier; } else if (value < 0.0f && estimate > 0.0) { double multiplier = -FastMath.min(1.0, estimate); signedFoldInWeight = (1.0 - 1.0 / (1.0 - value)) * multiplier; } else { signedFoldInWeight = 0.0; } return signedFoldInWeight; } @Override public void removePreference(String userID, String itemID) { // Record datum try { generationManager.remove(userID, itemID); } catch (IOException ioe) { log.warn("Could not append datum; continuing", ioe); } Generation generation; try { generation = getCurrentGeneration(); } catch (NotReadyException nre) { // Corner case -- no model ready so all we can do is record (above). Don't fail the request. return; } long longUserID = StringLongMapping.toLong(userID); long longItemID = StringLongMapping.toLong(itemID); ReadWriteLock knownItemLock = generation.getKnownItemLock(); boolean removeUser = false; LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs != null) { Lock knownItemReadLock = knownItemLock.readLock(); LongSet userKnownItemIDs; knownItemReadLock.lock(); try { userKnownItemIDs = knownItemIDs.get(longUserID); } finally { knownItemReadLock.unlock(); } if (userKnownItemIDs == null) { // Doesn't exist? So ignore this request return; } synchronized (userKnownItemIDs) { if (!userKnownItemIDs.remove(longItemID)) { // Item unknown, so ignore this request return; } removeUser = userKnownItemIDs.isEmpty(); } } // We can proceed with the request LongObjectMap<float[]> X = generation.getX(); ReadWriteLock xLock = generation.getXLock(); if (removeUser) { Lock knownItemWriteLock = knownItemLock.writeLock(); knownItemWriteLock.lock(); try { knownItemIDs.remove(longUserID); } finally { knownItemWriteLock.unlock(); } Lock xWriteLock = xLock.writeLock(); xWriteLock.lock(); try { X.remove(longUserID); } finally { xWriteLock.unlock(); } } } /** * One-argument version of {@link #mostSimilarItems(String[], int)}. */ @Override public List<IDValue> mostSimilarItems(String itemID, int howMany) throws NoSuchItemException, NotReadyException { return mostSimilarItems(itemID, howMany, null); } /** * One-argument version of {@link #mostSimilarItems(String[], int, PairRescorer)}. */ @Override public List<IDValue> mostSimilarItems(String itemID, int howMany, PairRescorer rescorer) throws NoSuchItemException, NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); long longItemID = StringLongMapping.toLong(itemID); Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { float[] itemFeatures = Y.get(longItemID); if (itemFeatures == null) { throw new NoSuchItemException(itemID); } return translateToStringIDs( TopN.selectTopN(new MostSimilarItemIterator(Y.entrySet().iterator(), new long[]{longItemID}, new float[][]{itemFeatures}, rescorer, generation.getIDMapping()), howMany)); } finally { yLock.unlock(); } } /** * Like {@link #mostSimilarItems(String[], int, PairRescorer)} but uses no rescorer. */ @Override public List<IDValue> mostSimilarItems(String[] itemIDs, int howMany) throws NoSuchItemException, NotReadyException { return mostSimilarItems(itemIDs, howMany, null); } /** * Computes items most similar to an item or items. The returned items have the highest average similarity * to the given items. * * @param itemIDs items for which most similar items are required * @param howMany maximum number of similar items to return; fewer may be returned * @param rescorer rescoring function used to modify item-item similarities before ranking results * @return {@link IDValue}s representing the top recommendations for the user, ordered by quality, * descending. The score associated to it is an opaque value. Larger means more similar, but no further * interpretation may necessarily be applied. * @throws NoSuchItemException if any of the items is not known in the model * @throws NotReadyException if the recommender has no model available yet */ @Override public List<IDValue> mostSimilarItems(String[] itemIDs, int howMany, PairRescorer rescorer) throws NoSuchItemException, NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); long[] longItemIDs = new long[itemIDs.length]; for (int i = 0; i < longItemIDs.length; i++) { longItemIDs[i] = StringLongMapping.toLong(itemIDs[i]); } Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { List<float[]> itemFeatures = Lists.newArrayListWithCapacity(itemIDs.length); for (long longItemID : longItemIDs) { float[] features = Y.get(longItemID); if (features != null) { itemFeatures.add(features); } } if (itemFeatures.isEmpty()) { throw new NoSuchItemException(Arrays.toString(itemIDs)); } float[][] itemFeaturesArray = itemFeatures.toArray(new float[itemFeatures.size()][]); return translateToStringIDs( TopN.selectTopN(new MostSimilarItemIterator(Y.entrySet().iterator(), longItemIDs, itemFeaturesArray, rescorer, generation.getIDMapping()), howMany)); } finally { yLock.unlock(); } } @Override public float[] similarityToItem(String toItemID, String... itemIDs) throws NotReadyException, NoSuchItemException { Generation generation = getCurrentGeneration(); LongObjectMap<float[]> Y = generation.getY(); float[] similarities = new float[itemIDs.length]; Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { float[] toFeatures = Y.get(StringLongMapping.toLong(toItemID)); if (toFeatures == null) { throw new NoSuchItemException(toItemID); } double toFeaturesNorm = SimpleVectorMath.norm(toFeatures); boolean anyFound = false; for (int i = 0; i < similarities.length; i++) { float[] features = Y.get(StringLongMapping.toLong(itemIDs[i])); if (features == null) { similarities[i] = Float.NaN; } else { anyFound = true; double featuresNorm = SimpleVectorMath.norm(features); similarities[i] = (float) (SimpleVectorMath.dot(features, toFeatures) / (featuresNorm * toFeaturesNorm)); } } if (!anyFound) { throw new NoSuchItemException(Arrays.toString(itemIDs)); } } finally { yLock.unlock(); } return similarities; } /** * <p>Lists the items that were most influential in recommending a given item to a given user. Exactly how this * is determined is left to the implementation, but, generally this will return items that the user prefers * and that are similar to the given item.</p> * * <p>These values by which the results are ordered are opaque values and have no interpretation * other than that larger means stronger.</p> * * @param userID ID of user who was recommended the item * @param itemID ID of item that was recommended * @param howMany maximum number of items to return * @return {@link List} of {@link IDValue}, ordered from most influential in recommended the given * item to least * @throws NoSuchUserException if the user is not known in the model * @throws NoSuchItemException if the item is not known in the model * @throws NotReadyException if the recommender has no model available yet */ @Override public List<IDValue> recommendedBecause(String userID, String itemID, int howMany) throws NoSuchUserException, NoSuchItemException, NotReadyException { Preconditions.checkArgument(howMany > 0, "howMany must be positive"); Generation generation = getCurrentGeneration(); LongObjectMap<LongSet> knownItemIDs = generation.getKnownItemIDs(); if (knownItemIDs == null) { throw new UnsupportedOperationException("No known item IDs available"); } Lock knownItemLock = generation.getKnownItemLock().readLock(); LongSet userKnownItemIDs; knownItemLock.lock(); try { userKnownItemIDs = knownItemIDs.get(StringLongMapping.toLong(userID)); } finally { knownItemLock.unlock(); } if (userKnownItemIDs == null) { throw new NoSuchUserException(userID); } LongObjectMap<float[]> Y = generation.getY(); Lock yLock = generation.getYLock().readLock(); yLock.lock(); try { float[] features = Y.get(StringLongMapping.toLong(itemID)); if (features == null) { throw new NoSuchItemException(itemID); } LongObjectMap<float[]> toFeatures; synchronized (userKnownItemIDs) { toFeatures = new LongObjectMap<float[]>(userKnownItemIDs.size()); LongPrimitiveIterator it = userKnownItemIDs.iterator(); while (it.hasNext()) { long fromItemID = it.nextLong(); float[] fromFeatures = Y.get(fromItemID); toFeatures.put(fromItemID, fromFeatures); } } return translateToStringIDs( TopN.selectTopN(new RecommendedBecauseIterator(toFeatures.entrySet().iterator(), features), howMany)); } finally { yLock.unlock(); } } @Override public boolean isReady() { try { getCurrentGeneration(); return true; } catch (NotReadyException ignored) { return false; } } @Override public void await() throws InterruptedException { while (!isReady()) { Thread.sleep(1000L); } } }
Very corner case that could be relevant to issue #60 : write lock might not be released if lock downgrade fails
als-serving/src/main/java/com/cloudera/oryx/als/serving/ServerRecommender.java
Very corner case that could be relevant to issue #60 : write lock might not be released if lock downgrade fails
<ide><path>ls-serving/src/main/java/com/cloudera/oryx/als/serving/ServerRecommender.java <ide> writeLock.lock(); <ide> try { <ide> matrix.put(longID, features); <add> readLock.lock(); <ide> } finally { <del> readLock.lock(); <ide> writeLock.unlock(); <ide> } <ide> }
Java
mit
4f0666376040e855256323608c2aa789f504e58f
0
SpongePowered/SpongeAPI,SpongePowered/SpongeAPI,SpongePowered/SpongeAPI
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.world.server; import org.spongepowered.api.ResourceKey; import org.spongepowered.api.world.WorldArchetype; import org.spongepowered.api.world.storage.WorldProperties; import java.io.IOException; import java.util.Collection; import java.util.Optional; import java.util.concurrent.CompletableFuture; public interface WorldManager { /** * Gets a loaded {@link ServerWorld} by it's {@link ResourceKey key}. * * @param key The key * @return The world, if found */ Optional<ServerWorld> getWorld(ResourceKey key); /** * Gets all currently loaded {@link ServerWorld}s. * * @return A collection of loaded worlds */ Collection<ServerWorld> getWorlds(); /** * Gets the default {@link WorldProperties} {@link ResourceKey key} the {@link WorldManager} creates and loads * during the lifecycle * * @return The key */ ResourceKey getDefaultPropertiesKey(); /** * Gets the default loaded {@link WorldProperties} or {@link Optional#empty()} if none has been loaded. * * <p>It is up to the implementation to determine when and if a default is loaded.</p> * * @return The world properties */ Optional<WorldProperties> getDefaultProperties(); /** * Creates a new {@link WorldProperties} from the given * {@link WorldArchetype}. For the creation of the {@link WorldArchetype} please see {@link WorldArchetype.Builder}. * * <p>It is up to the implementation to define an {@link Optional#empty()} result.</p> * * <p>The returned properties should be considered "virtual" as it will not exist on the disk nor will the manager consider it "offline data". * * To write it to the default storage container, use one of the following methods: * <ul> <li>{@link #loadWorld(WorldProperties)}</li> <li>{@link #saveProperties(WorldProperties)}</li> </ul> * </p> * * @param key The key * @param archetype The archetype for creation * @return The new world properties, if the creation was successful */ CompletableFuture<WorldProperties> createProperties(ResourceKey key, WorldArchetype archetype); /** * Loads a {@link ServerWorld} specified by a {@link ResourceKey key}. If a world with * the given name is already loaded then it is returned instead. * * @param key The key * @return The world */ CompletableFuture<ServerWorld> loadWorld(ResourceKey key); /** * Loads a {@link ServerWorld} from the default storage container. * * <p>If the world associated with the given properties is already loaded then it is returned instead.</p> * * <p>If the given properties already has data within the default storage container it will be loaded instead.</p> * * <p>If none of the above, the properties will be wrote to the default storage container as a result of the load</p> * * @param properties The properties of the world to load * @return The world */ CompletableFuture<ServerWorld> loadWorld(WorldProperties properties); /** * Unloads the {@link ServerWorld} registered to the {@link ResourceKey key}. * * <p>The conditions for how and when a world may be unloaded are left up to the * implementation to define.</p> * * @param key The key to unload * @return Whether the operation was successful */ CompletableFuture<Boolean> unloadWorld(ResourceKey key); /** * Unloads a {@link ServerWorld}. * * <p>The conditions for how and when a world may be unloaded are left up to the * implementation to define.</p> * * @param world The world to unload * @return Whether the operation was successful */ CompletableFuture<Boolean> unloadWorld(ServerWorld world); /** * Gets the {@link WorldProperties} by it's {@link ResourceKey key}. If a world with the given * name is loaded then this is equivalent to calling {@link ServerWorld#getProperties()}. * * However, if no loaded world is found then an attempt will be made to match to a known unloaded world. * * @param key The key * @return The world properties, if found */ Optional<WorldProperties> getProperties(ResourceKey key); /** * Gets the properties of all unloaded worlds. * * <p>It is left up to the implementation to determine it's offline worlds and no contract is enforced * that states that they must returns all unloaded worlds that actually exist.</p> * * @return A collection of world properties */ Collection<WorldProperties> getUnloadedProperties(); /** * Gets the properties of all worlds, online and offline. * * <p>It is left up to the implementation to determine it's offline worlds and no contract is enforced * that states that they must returns all unloaded worlds that actually exist.</p> * * @return A collection of world properties */ Collection<WorldProperties> getAllProperties(); /** * Persists the given {@link WorldProperties} to the world storage for it, * updating any modified values. * * @param properties The world properties to save * @return True if the save was successful, can fail exceptionally */ CompletableFuture<Boolean> saveProperties(WorldProperties properties); /** * Copies a {@link WorldProperties properties} under the provided {@link ResourceKey key}. * * <p>If the world is already loaded then the following will occur:</p> * * <ul> * <li>World is unloaded</li> * <li>World is copied</li> * <li>Original world is loaded</li> * </ul> * * <p>It is left up to the implementation on exactly what is copied.</p> * * @param key The key * @param copyKey The copied key for the new properties * @return The copied properties */ CompletableFuture<WorldProperties> copyWorld(ResourceKey key, ResourceKey copyKey); /** * Renames a {@link WorldProperties properties}. * * <p>If the world is loaded, the following will occur:</p> * * <ul> * <li>World is saved</li> * <li>World is unloaded</li> * <li>World is renamed, up to the implementation to determine how so</li> * <li>World is loaded with it's new key</li> * </ul> * * <p>The default Minecraft worlds cannot be renamed. Additionally, it is left up to the * implementation on exactly what is renamed.</p> * * @param key The key * @param newKey The new key * @return The renamed properties */ CompletableFuture<WorldProperties> renameWorld(ResourceKey key, ResourceKey newKey); /** * Deletes a {@link WorldProperties properties} by it's {@link ResourceKey key}. * * <p>If the world is loaded, the following will occur:</p> * * <u1> * <li>World is unloaded</li> * <li>World is deleted</li> * </u1> * * <p>The default Minecraft world, based on the implementation, cannot be deleted. Additionally, * it is left up to the implementation on what is deleted.</p> * * @param key The key * @return True if the deletion was successful. */ CompletableFuture<Boolean> deleteWorld(ResourceKey key); }
src/main/java/org/spongepowered/api/world/server/WorldManager.java
/* * This file is part of SpongeAPI, licensed under the MIT License (MIT). * * Copyright (c) SpongePowered <https://www.spongepowered.org> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package org.spongepowered.api.world.server; import org.spongepowered.api.ResourceKey; import org.spongepowered.api.world.WorldArchetype; import org.spongepowered.api.world.storage.WorldProperties; import java.io.IOException; import java.util.Collection; import java.util.Optional; import java.util.concurrent.CompletableFuture; public interface WorldManager { /** * Gets a loaded {@link ServerWorld} by it's {@link ResourceKey key}. * * @param key The key * @return The world, if found */ Optional<ServerWorld> getWorld(ResourceKey key); /** * Gets all currently loaded {@link ServerWorld}s. * * @return A collection of loaded worlds */ Collection<ServerWorld> getWorlds(); /** * Gets the default {@link WorldProperties} {@link ResourceKey key} the {@link WorldManager} creates and loads * during the lifecycle * * @return The key */ ResourceKey getDefaultPropertiesKey(); /** * Gets the default loaded {@link WorldProperties} or {@link Optional#empty()} if none has been loaded. * * <p>It is up to the implementation to determine when and if a default is loaded.</p> * * @return The world properties */ Optional<WorldProperties> getDefaultProperties(); /** * Creates a new {@link WorldProperties} from the given * {@link WorldArchetype}. For the creation of the {@link WorldArchetype} please see {@link WorldArchetype.Builder}. * * <p>It is up to the implementation to define an {@link Optional#empty()} result.</p> * * <p>The returned properties should be considered "virtual" as it will not exist on the disk nor will the manager consider it "offline data". * * To write it to the default storage container, use one of the following methods: * <ul> <li>{@link #loadWorld(WorldProperties)}</li> <li>{@link #saveProperties(WorldProperties)}</li> </ul> * </p> * * @param key The key * @param archetype The archetype for creation * @return The new world properties, if the creation was successful */ CompletableFuture<WorldProperties> createProperties(ResourceKey key, WorldArchetype archetype); /** * Loads a {@link ServerWorld} specified by a {@link ResourceKey key}. If a world with * the given name is already loaded then it is returned instead. * * @param key The key * @return The world */ CompletableFuture<ServerWorld> loadWorld(ResourceKey key); /** * Loads a {@link ServerWorld} from the default storage container. * * <p>If the world associated with the given properties is already loaded then it is returned instead.</p> * * <p>If the given properties already has data within the default storage container it will be loaded instead.</p> * * <p>If none of the above, the properties will be wrote to the default storage container as a result of the load</p> * * @param properties The properties of the world to load * @return The world */ CompletableFuture<ServerWorld> loadWorld(WorldProperties properties); /** * Unloads the {@link ServerWorld} registered to the {@link ResourceKey key}. * * <p>The conditions for how and when a world may be unloaded are left up to the * implementation to define.</p> * * @param key The key to unload * @return Whether the operation was successful */ CompletableFuture<Boolean> unloadWorld(ResourceKey key); /** * Unloads a {@link ServerWorld}. * * <p>The conditions for how and when a world may be unloaded are left up to the * implementation to define.</p> * * @param world The world to unload * @return Whether the operation was successful */ CompletableFuture<Boolean> unloadWorld(ServerWorld world); /** * Gets the {@link WorldProperties} by it's {@link ResourceKey key}. If a world with the given * name is loaded then this is equivalent to calling {@link ServerWorld#getProperties()}. * * However, if no loaded world is found then an attempt will be made to match to a known unloaded world. * * @param key The key * @return The world properties, if found */ Optional<WorldProperties> getProperties(ResourceKey key); /** * Gets the properties of all unloaded worlds. * * <p>It is left up to the implementation to determine it's offline worlds and no contract is enforced * that states that they must returns all unloaded worlds that actually exist.</p> * * @return A collection of world properties */ Collection<WorldProperties> getUnloadedProperties(); /** * Gets the properties of all worlds, online and offline. * * <p>It is left up to the implementation to determine it's offline worlds and no contract is enforced * that states that they must returns all unloaded worlds that actually exist.</p> * * @return A collection of world properties */ Collection<WorldProperties> getAllProperties(); /** * Persists the given {@link WorldProperties} to the world storage for it, * updating any modified values. * * @param properties The world properties to save * @return True if the save was successful, can fail exceptionally */ CompletableFuture<Boolean> saveProperties(WorldProperties properties); /** * Creates a world copy asynchronously using the new name given and returns * the new world properties if the copy was possible. * * <p>If the world is already loaded then the following will occur:</p> * * <ul> * <li>World is saved.</li> * <li>World saving is disabled.</li> * <li>World is copied. </li> * <li>World saving is enabled.</li> * </ul> * * @param key The key * @param copyValue The copied value for the new properties * @return The world properties */ CompletableFuture<WorldProperties> copyWorld(ResourceKey key, String copyValue); /** * Renames a {@link WorldProperties}. * * <p>If the properties represents an online world, an attempt will be made to unload it. Once unloaded and if * the attempt is successful, an attempt will be made to load it. It is left up to the implementation to determine * the conditions for a rename to be successful.</p> * * @param key The key * @param newValue The new value * @return The world properties */ CompletableFuture<WorldProperties> renameWorld(ResourceKey key, String newValue); /** * Deletes the {@link WorldProperties} by it's {@link ResourceKey key}. * * @param key The key * @return True if the deletion was successful. */ CompletableFuture<Boolean> deleteWorld(ResourceKey key); }
Better docs and enforce key usage for rename/copy/delete Signed-off-by: Chris Sanders <[email protected]>
src/main/java/org/spongepowered/api/world/server/WorldManager.java
Better docs and enforce key usage for rename/copy/delete
<ide><path>rc/main/java/org/spongepowered/api/world/server/WorldManager.java <ide> CompletableFuture<Boolean> saveProperties(WorldProperties properties); <ide> <ide> /** <del> * Creates a world copy asynchronously using the new name given and returns <del> * the new world properties if the copy was possible. <add> * Copies a {@link WorldProperties properties} under the provided {@link ResourceKey key}. <ide> * <ide> * <p>If the world is already loaded then the following will occur:</p> <ide> * <ide> * <ul> <del> * <li>World is saved.</li> <del> * <li>World saving is disabled.</li> <del> * <li>World is copied. </li> <del> * <li>World saving is enabled.</li> <add> * <li>World is unloaded</li> <add> * <li>World is copied</li> <add> * <li>Original world is loaded</li> <ide> * </ul> <ide> * <del> * @param key The key <del> * @param copyValue The copied value for the new properties <del> * @return The world properties <del> */ <del> CompletableFuture<WorldProperties> copyWorld(ResourceKey key, String copyValue); <del> <del> /** <del> * Renames a {@link WorldProperties}. <del> * <del> * <p>If the properties represents an online world, an attempt will be made to unload it. Once unloaded and if <del> * the attempt is successful, an attempt will be made to load it. It is left up to the implementation to determine <del> * the conditions for a rename to be successful.</p> <del> * <del> * @param key The key <del> * @param newValue The new value <del> * @return The world properties <del> */ <del> CompletableFuture<WorldProperties> renameWorld(ResourceKey key, String newValue); <del> <del> /** <del> * Deletes the {@link WorldProperties} by it's {@link ResourceKey key}. <add> * <p>It is left up to the implementation on exactly what is copied.</p> <add> * <add> * @param key The key <add> * @param copyKey The copied key for the new properties <add> * @return The copied properties <add> */ <add> CompletableFuture<WorldProperties> copyWorld(ResourceKey key, ResourceKey copyKey); <add> <add> /** <add> * Renames a {@link WorldProperties properties}. <add> * <add> * <p>If the world is loaded, the following will occur:</p> <add> * <add> * <ul> <add> * <li>World is saved</li> <add> * <li>World is unloaded</li> <add> * <li>World is renamed, up to the implementation to determine how so</li> <add> * <li>World is loaded with it's new key</li> <add> * </ul> <add> * <add> * <p>The default Minecraft worlds cannot be renamed. Additionally, it is left up to the <add> * implementation on exactly what is renamed.</p> <add> * <add> * @param key The key <add> * @param newKey The new key <add> * @return The renamed properties <add> */ <add> CompletableFuture<WorldProperties> renameWorld(ResourceKey key, ResourceKey newKey); <add> <add> /** <add> * Deletes a {@link WorldProperties properties} by it's {@link ResourceKey key}. <add> * <add> * <p>If the world is loaded, the following will occur:</p> <add> * <add> * <u1> <add> * <li>World is unloaded</li> <add> * <li>World is deleted</li> <add> * </u1> <add> * <add> * <p>The default Minecraft world, based on the implementation, cannot be deleted. Additionally, <add> * it is left up to the implementation on what is deleted.</p> <ide> * <ide> * @param key The key <ide> * @return True if the deletion was successful.
Java
mit
2e8105d11fca53b7d60296449f97e52df0e4aaa9
0
solita/functional-utils
package fi.solita.utils.functional; import static fi.solita.utils.functional.Collections.newList; import static fi.solita.utils.functional.Collections.newMap; import static fi.solita.utils.functional.Collections.newSet; import static fi.solita.utils.functional.Option.None; import static fi.solita.utils.functional.Option.Some; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import fi.solita.utils.functional.Iterables.ConcatenatingIterable; import fi.solita.utils.functional.Iterables.FilteringIterable; import fi.solita.utils.functional.Iterables.RangeIterable; import fi.solita.utils.functional.Iterables.RepeatingIterable; import fi.solita.utils.functional.Iterables.TransformingIterable; import fi.solita.utils.functional.Iterables.TransposingIterable; import fi.solita.utils.functional.Iterables.ZippingIterable; public abstract class Functional { /** * Returns a new iterable <code>a - b</code>, i.e. one that contains all elements of <code>a</code> that * don't exist in <code>b</code>. */ public static <T> Iterable<T> subtract(Iterable<T> a, final Collection<T> b) { return filter(a, new Predicate<T>() { @Override public boolean accept(T object) { return !b.contains(object); } }); } public static <T> Iterable<T> subtract(T[] a, final Collection<T> b) { return subtract(newList(a), b); } public static <T> Iterable<T> subtract(Iterable<T> a, final T[] b) { return subtract(a, newSet(b)); } public static <T> Iterable<T> subtract(T[] a, final T[] b) { return subtract(newList(a), newSet(b)); } public static <K, V> Option<V> find(Map<? extends K, V> map, K key) { return Option.of(map.get(key)); } public static <T> Option<T> find(T[] elements, Apply<? super T, Boolean> filter) { return find(Arrays.asList(elements), filter); } public static <T> Option<T> find(Iterable<T> elements, Apply<? super T, Boolean> filter) { return headOption(filter(elements, filter)); } public static <T> Iterable<T> filter(T[] elements, Apply<? super T, Boolean> filter) { return filter(Arrays.asList(elements), filter); } public static <T> Iterable<T> filter(Iterable<T> elements, Apply<? super T, Boolean> filter) { return new FilteringIterable<T>(elements, filter); } public static <T, E> Map<T, E> filter(Map<T, E> map, Apply<Map.Entry<T, E>, Boolean> filter) { return Collections.newMap(filter(map.entrySet(), filter)); } public static <S, T> Iterable<T> map(S[] elements, Apply<? super S, ? extends T> transformer) { return map(Arrays.asList(elements), transformer); } public static <S, T> Iterable<T> map(Iterable<S> elements, Apply<? super S, ? extends T> transformer) { return new TransformingIterable<S,T>(elements, transformer); } public static <K1, V1, K2, V2> Map<K2, V2> map(Map<K1, V1> source, Apply<Map.Entry<K1, V1>, Map.Entry<K2, V2>> transformer) { return Collections.newMap(map(source.entrySet(), transformer)); } public static <S, T> Iterable<T> flatMap(S[] elements, Apply<? super S, ? extends Iterable<T>> transformer) { return flatMap(Arrays.asList(elements), transformer); } public static <S, T> Iterable<T> flatMap(Iterable<S> elements, Apply<? super S, ? extends Iterable<? extends T>> transformer) { return flatten(map(elements, transformer)); } public static <T> Iterable<T> flatten(T[][] elements) { return flatten(map(elements, new Transformer<T[], Iterable<T>>() { @Override public Iterable<T> transform(T[] source) { return Arrays.asList(source); } })); } public static <T> Iterable<T> flatten(Iterable<? extends T>[] elements) { return flatten(map(elements, Function1.<Iterable<? extends T>>id())); } public static <T> Iterable<T> flatten(Iterable<? extends Iterable<? extends T>> elements) { return new ConcatenatingIterable<T>(elements); } public static <T> void foreach(T[] elements, Apply<? super T, Void> procedure) { foreach(Arrays.asList(elements), procedure); } public static <T> void foreach(Iterable<T> elements, Apply<? super T, Void> procedure) { for (T t: elements) { procedure.apply(t); } } /** * Non-lazy */ public static <T> Iterable<List<T>> grouped(T[] elements, int size) { return grouped(Arrays.asList(elements), size); } /** * Non-lazy */ public static <T> Iterable<List<T>> grouped(Iterable<T> elements, int size) { if (size <= 0) { throw new IllegalArgumentException("size must be positive"); } List<List<T>> target = Collections.newList(); Iterator<T> it = elements.iterator(); while (it.hasNext()) { List<T> group = Collections.newListOfSize(size); for (@SuppressWarnings("unused") int i: range(1, size)) { if (it.hasNext()) { group.add(it.next()); } } if (!group.isEmpty()) { target.add(group); } } return target; } /** * Non-lazy */ public static <G, T> Map<G, List<T>> groupBy(T[] elements, Apply<? super T,G> f) { return groupBy(Arrays.asList(elements), f); } /** * Non-lazy */ public static <G, T> Map<G, List<T>> groupBy(Iterable<T> elements, Apply<? super T,G> f) { Map<G, List<T>> target = newMap(); for (T t: elements) { G g = f.apply(t); Option<List<T>> groupOption = find(target, g); List<T> group; if (groupOption.isDefined()) { group = groupOption.get(); } else { group = Collections.newList(); target.put(g, group); } group.add(t); } return target; } public static <T> T head(T[] elements) { return head(Arrays.asList(elements)); } public static <T> T head(Iterable<T> elements) { return elements.iterator().next(); } public static <T> Option<T> headOption(T[] elements) { return headOption(Arrays.asList(elements)); } public static <T> Option<T> headOption(Iterable<T> elements) { Iterator<T> it = elements.iterator(); if (it.hasNext()) { return Some(it.next()); } else { return None(); } } public static <T> Iterable<T> tail(T[] elements) { return tail(Arrays.asList(elements)); } public static <T> Iterable<T> tail(Iterable<T> elements) { return drop(elements, 1); } public static <T> T last(T[] elements) { return last(Arrays.asList(elements)); } public static <T> T last(Iterable<T> elements) { Iterator<T> it = elements.iterator(); T ret = it.next(); while (it.hasNext()) { ret = it.next(); } return ret; } public static <T> Option<T> lastOption(T[] elements) { return lastOption(Arrays.asList(elements)); } public static <T> Option<T> lastOption(Iterable<T> elements) { Iterator<T> it = elements.iterator(); if (it.hasNext()) { T ret = it.next(); while (it.hasNext()) { ret = it.next(); } return Some(ret); } else { return None(); } } public static <T> Iterable<T> init(T[] elements) { return init(Arrays.asList(elements)); } public static <T> Iterable<T> init(Iterable<T> elements) { return take(elements, size(elements)-1); } public static <T> Iterable<T> take(T[] elements, int amount) { return take(Arrays.asList(elements), amount); } public static <T> Iterable<T> take(final Iterable<T> elements, final int amount) { return new Iterables.TakingIterable<T>(elements, amount); } public static <T> Iterable<T> drop(T[] elements, int amount) { return drop(Arrays.asList(elements), amount); } public static <T> Iterable<T> drop(final Iterable<T> elements, final int amount) { return new Iterables.DroppingIterable<T>(elements, amount); } public static <T> Iterable<T> takeWhile(T[] elements, Apply<? super T, Boolean> predicate) { return takeWhile(Arrays.asList(elements), predicate); } public static <T> Iterable<T> takeWhile(final Iterable<T> elements, final Apply<? super T, Boolean> predicate) { return new Iterable<T>() { @Override public Iterator<T> iterator() { return new Iterator<T>() { private Option<T> next; private Iterator<T> source = elements.iterator(); { readNext(); } @Override public boolean hasNext() { return next.isDefined(); } private void readNext() { if (!source.hasNext()) { next = None(); } else { T n = source.next(); if (predicate.apply(n)) { next = Some(n); } else { next = None(); } } } @Override public T next() { if (!next.isDefined()) { throw new NoSuchElementException(); } T ret = next.get(); readNext(); return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } public static <T> Iterable<T> dropWhile(T[] elements, Apply<? super T, Boolean> predicate) { return dropWhile(Arrays.asList(elements), predicate); } public static <T> Iterable<T> dropWhile(final Iterable<T> elements, final Apply<? super T, Boolean> predicate) { return new Iterable<T>() { @Override public Iterator<T> iterator() { return new Iterator<T>() { private boolean dropping = true; private Iterator<T> source = elements.iterator(); private Option<T> next; { readNext(); } @Override public boolean hasNext() { return next.isDefined(); } private void readNext() { next = source.hasNext() ? Some(source.next()) : Option.<T>None(); while (dropping && next.isDefined() && predicate.apply(next.get())) { next = source.hasNext() ? Some(source.next()) : Option.<T>None(); } dropping = false; } @Override public T next() { if (!next.isDefined()) { throw new NoSuchElementException(); } T ret = next.get(); readNext(); return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } public static <T> Pair<Iterable<T>, Iterable<T>> span(final Iterable<T> elements, final Apply<? super T, Boolean> predicate) { // TODO: a more efficient implementation return Pair.of(takeWhile(elements, predicate), dropWhile(elements, predicate)); } public static boolean isEmpty(Iterable<?> elements) { return !elements.iterator().hasNext(); } public static int size(Iterable<?> elements) { return Iterables.resolveSize.apply(elements).getOrElse(Collections.newList(elements).size()); } public static <T> boolean contains(T[] elements, T element) { return contains(Arrays.asList(elements), element); } public static <T,E> boolean contains(Iterable<T> elements, T element) { return exists(elements, Predicates.equalTo(element)); } public static <T> boolean exists(T[] elements, Apply<T, Boolean> filter) { return exists(Arrays.asList(elements), filter); } public static <T> boolean exists(Iterable<T> elements, Apply<? super T, Boolean> filter) { return !isEmpty(filter(elements, filter)); } public static <T> boolean forAll(T[] elements, Apply<? super T, Boolean> filter) { return forAll(Arrays.asList(elements), filter); } public static <T> boolean forAll(Iterable<T> elements, Apply<? super T, Boolean> filter) { return !exists(elements, Predicates.not(filter)); } @SuppressWarnings("unchecked") public static <T> Iterable<T> cons(T element, Iterable<? extends T> elements) { return concat(Arrays.asList(element), elements); } @SuppressWarnings("unchecked") public static <T> Iterable<T> cons(T element, T[] elements) { return concat(Arrays.asList(element), elements); } public static <T> Iterable<T> concat(T[] elements1, Iterable<? extends T> elements2) { return concat(Arrays.asList(elements1), elements2); } public static <T> Iterable<T> concat(Iterable<? extends T> elements1, T[] elements2) { return concat(elements1, Arrays.asList(elements2)); } public static <T> Iterable<T> concat(T[] elements1, T[] elements2) { return concat(Arrays.asList(elements1), Arrays.asList(elements2)); } public static <T> Iterable<T> concat(Iterable<? extends T> elements1, Iterable<? extends T> elements2) { return new ConcatenatingIterable<T>(Collections.newList(elements1, elements2)); } public static <T> Iterable<T> concat(final Iterable<? extends T> elements1, final Iterable<? extends T> elements2, final Iterable<? extends T> elements3) { return concat(elements1, concat(elements2, elements3)); } public static <T> Iterable<T> concat(final Iterable<? extends T> elements1, final Iterable<? extends T> elements2, final Iterable<? extends T> elements3, final Iterable<? extends T> elements4) { return concat(elements1, concat(elements2, elements3, elements4)); } public static <T> Iterable<T> concat(final Iterable<? extends T> elements1, final Iterable<? extends T> elements2, final Iterable<? extends T> elements3, final Iterable<? extends T> elements4, final Iterable<? extends T> elements5, final Iterable<? extends T>... rest) { return concat(elements1, concat(elements2, concat(elements3, elements4, elements5, flatten(rest)))); } public static <T extends Comparable<T>> Iterable<T> sort(final T[] elements) { return sort(Arrays.asList(elements)); } public static <T extends Comparable<T>> Iterable<T> sort(final Iterable<T> elements) { if (isEmpty(elements)) { return newSet(); } return sort(elements, Ordering.Natural()); } public static <T> Iterable<T> sort(T[] elements, Comparator<? super T> comparator) { return sort(Arrays.asList(elements), comparator); } public static <T> Iterable<T> sort(final Iterable<T> elements, final Comparator<? super T> comparator) { return new Iterables.SortingIterable<T>(elements, comparator); } public static <T extends SemiGroup<T>> T reduce(T e1) { return e1; } public static <T extends SemiGroup<T>> T reduce(T e1, T e2) { return reduce(newList(e1, e2)).get(); } public static <T extends SemiGroup<T>> T reduce(T e1, T e2, T e3) { return reduce(newList(e1, e2, e3)).get(); } public static <T extends SemiGroup<T>> T reduce(T e1, T e2, T e3, T e4) { return reduce(newList(e1, e2, e3, e4)).get(); } public static <T extends SemiGroup<T>> T reduce(T e1, T e2, T e3, T e4, T... elements) { return reduce(concat(newList(e1, e2, e3, e4), elements)).get(); } public static <T extends SemiGroup<T>> Option<T> reduce(T[] elements) { return reduce(Arrays.asList(elements)); } public static <T extends SemiGroup<T>> Option<T> reduce(Iterable<? extends T> elements) { if (isEmpty(elements)) { return None(); } return fold(elements, head(elements)); } public static <T> T reduce(T[] elements, Monoid<T> m) { return reduce(Arrays.asList(elements), m); } public static <T> T reduce(Iterable<? extends T> elements, Monoid<T> m) { return fold(cons(m.zero(), elements), m).get(); } /** * @return <i>None</i> if <i>elements</i> is empty */ public static <T> Option<T> fold(T[] elements, Apply<Tuple2<T,T>, T> f) { return fold(Arrays.asList(elements), f); } /** * @return <i>None</i> if <i>elements</i> is empty */ public static <T> Option<T> fold(Iterable<? extends T> elements, Apply<Tuple2<T,T>, T> f) { Option<? extends T> h = headOption(elements); if (h.isDefined()) { T ret = h.get(); for (T t : drop(elements, 1)) { ret = f.apply(Tuple.of(ret, t)); } return Some(ret); } return None(); } public static long sum(Integer... elements) { return sum(Arrays.asList(elements)); } public static long sum(Iterable<Integer> elements) { return reduce(map(elements, Transformers.int2long), Monoids.longSum); } public static long product(Integer... elements) { return product(Arrays.asList(elements)); } public static long product(Iterable<Integer> elements) { return reduce(map(elements, Transformers.int2long), Monoids.longProduct); } public static <T extends Comparable<T>> T min(T e1, T... elements) { return min(cons(e1, elements)).get(); } public static <T extends Comparable<T>> Option<T> min(T[] elements) { return min(Arrays.asList(elements)); } public static <T extends Comparable<T>> Option<T> min(Iterable<T> elements) { return headOption(sort(elements)); } public static <T extends Comparable<T>> T max(T e1, T... elements) { return max(cons(e1, elements)).get(); } public static <T extends Comparable<T>> Option<T> max(T[] elements) { return max(Arrays.asList(elements)); } public static <T extends Comparable<T>> Option<T> max(Iterable<T> elements) { return headOption(sort(elements, Ordering.Natural().reverse())); } public static <A,B> Iterable<Tuple2<A, B>> zip(A[] a, B[] b) { return zip(Arrays.asList(a), Arrays.asList(b)); } public static <A,B> Iterable<Tuple2<A, B>> zip(A[] a, Iterable<B> b) { return zip(Arrays.asList(a), b); } public static <A,B> Iterable<Tuple2<A, B>> zip(Iterable<A> a, B[] b) { return zip(a, Arrays.asList(b)); } public static <A,B> Iterable<Tuple2<A, B>> zip(Iterable<A> a, Iterable<B> b) { return new ZippingIterable<A,B>(a, b); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(A[] a, B[] b, C[] c) { return zip(Arrays.asList(a), Arrays.asList(b), Arrays.asList(c)); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(A[] a, Iterable<B> b, Iterable<C> c) { return zip(Arrays.asList(a), b, c); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(Iterable<A> a, B[] b, Iterable<C> c) { return zip(a, Arrays.asList(b), c); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(Iterable<A> a, Iterable<B> b, C[] c) { return zip(a, b, Arrays.asList(c)); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(A[] a, B[] b, Iterable<C> c) { return zip(Arrays.asList(a), Arrays.asList(b), c); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(Iterable<A> a, B[] b, C[] c) { return zip(a, Arrays.asList(b), Arrays.asList(c)); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(A[] a, Iterable<B> b, C[] c) { return zip(Arrays.asList(a), b, Arrays.asList(c)); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(Iterable<A> a, Iterable<B> b, Iterable<C> c) { return map(zip(zip(a, b), c), new Transformer<Tuple2<Tuple2<A, B>, C>, Tuple3<A, B, C>>() { @Override public Tuple3<A, B, C> transform(Tuple2<Tuple2<A, B>, C> source) { return source._1.append(source._2); } }); } public static <A> Iterable<Tuple2<Integer, A>> zipWithIndex(Iterable<A> a) { return new ZippingIterable<Integer,A>(new RangeIterable(0), a); } public static <A> Iterable<Tuple2<Integer, A>> zipWithIndex(A[] a) { return new ZippingIterable<Integer,A>(new RangeIterable(0), Arrays.asList(a)); } public static Iterable<Integer> range(int from) { return new RangeIterable(from); } public static Iterable<Integer> range(int from, int toInclusive) { return new RangeIterable(from, toInclusive); } public static <T> Iterable<T> repeat(T value) { return new RepeatingIterable<T>(value); } public static <T> Iterable<T> repeat(T value, int amount) { return new RepeatingIterable<T>(value, amount); } public static String mkString(Iterable<Character> elements) { return mkString("", map(elements, Transformers.toString)); } public static String mkString(String delim, String[] elements) { return mkString(delim, Arrays.asList(elements)); } public static String mkString(String delim, Iterable<String> elements) { StringBuilder sb = new StringBuilder(); for (String s: elements) { if (sb.length() > 0) { sb.append(delim); } sb.append(s); } return sb.toString(); } public static <T> Iterable<T> reverse(Iterable<T> elements) { return new Iterables.ReversingIterable<T>(elements); } public static <T,R> Iterable<R> sequence(Iterable<? extends Apply<? super T,? extends R>> elements, final T t) { return map(elements, new Transformer<Apply<? super T,? extends R>, R>() { @Override public R transform(Apply<? super T,? extends R> source) { return source.apply(t); } }); } public static <T> Iterable<Iterable<T>> transpose(Iterable<? extends Iterable<T>> elements) { return new TransposingIterable<T>(elements); } public static <T> Iterable<Iterable<T>> transpose2(Iterable<T[]> elements) { return transpose(map(elements, new Transformer<T[], Iterable<T>>() { @Override public Iterable<T> transform(T[] source) { return Arrays.asList(source); } })); } private static final String LINE_SEP = System.getProperty("line.separator"); public static final String unlines(Iterable<String> elements) { return mkString(LINE_SEP, elements); } }
src/main/java/fi/solita/utils/functional/Functional.java
package fi.solita.utils.functional; import static fi.solita.utils.functional.Collections.newList; import static fi.solita.utils.functional.Collections.newMap; import static fi.solita.utils.functional.Collections.newSet; import static fi.solita.utils.functional.Option.None; import static fi.solita.utils.functional.Option.Some; import java.util.Arrays; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import fi.solita.utils.functional.Iterables.ConcatenatingIterable; import fi.solita.utils.functional.Iterables.FilteringIterable; import fi.solita.utils.functional.Iterables.RangeIterable; import fi.solita.utils.functional.Iterables.RepeatingIterable; import fi.solita.utils.functional.Iterables.TransformingIterable; import fi.solita.utils.functional.Iterables.TransposingIterable; import fi.solita.utils.functional.Iterables.ZippingIterable; public abstract class Functional { /** * Returns a new iterable <code>a - b</code>, i.e. one that contains all elements of <code>a</code> that * don't exist in <code>b</code>. */ public static <T> Iterable<T> subtract(Iterable<T> a, final Collection<T> b) { return filter(a, new Predicate<T>() { @Override public boolean accept(T object) { return !b.contains(object); } }); } public static <T> Iterable<T> subtract(T[] a, final Collection<T> b) { return subtract(newList(a), b); } public static <T> Iterable<T> subtract(Iterable<T> a, final T[] b) { return subtract(a, newSet(b)); } public static <T> Iterable<T> subtract(T[] a, final T[] b) { return subtract(newList(a), newSet(b)); } public static <K, V> Option<V> find(Map<? extends K, V> map, K key) { return Option.of(map.get(key)); } public static <T> Option<T> find(T[] elements, Apply<? super T, Boolean> filter) { return find(Arrays.asList(elements), filter); } public static <T> Option<T> find(Iterable<T> elements, Apply<? super T, Boolean> filter) { return headOption(filter(elements, filter)); } public static <T> Iterable<T> filter(T[] elements, Apply<? super T, Boolean> filter) { return filter(Arrays.asList(elements), filter); } public static <T> Iterable<T> filter(Iterable<T> elements, Apply<? super T, Boolean> filter) { return new FilteringIterable<T>(elements, filter); } public static <T, E> Map<T, E> filter(Map<T, E> map, Apply<Map.Entry<T, E>, Boolean> filter) { return Collections.newMap(filter(map.entrySet(), filter)); } public static <S, T> Iterable<T> map(S[] elements, Apply<? super S, ? extends T> transformer) { return map(Arrays.asList(elements), transformer); } public static <S, T> Iterable<T> map(Iterable<S> elements, Apply<? super S, ? extends T> transformer) { return new TransformingIterable<S,T>(elements, transformer); } public static <K1, V1, K2, V2> Map<K2, V2> map(Map<K1, V1> source, Apply<Map.Entry<K1, V1>, Map.Entry<K2, V2>> transformer) { return Collections.newMap(map(source.entrySet(), transformer)); } public static <S, T> Iterable<T> flatMap(S[] elements, Apply<? super S, ? extends Iterable<T>> transformer) { return flatMap(Arrays.asList(elements), transformer); } public static <S, T> Iterable<T> flatMap(Iterable<S> elements, Apply<? super S, ? extends Iterable<? extends T>> transformer) { return flatten(map(elements, transformer)); } public static <T> Iterable<T> flatten(T[][] elements) { return flatten(map(elements, new Transformer<T[], Iterable<T>>() { @Override public Iterable<T> transform(T[] source) { return Arrays.asList(source); } })); } public static <T> Iterable<T> flatten(Iterable<? extends T>[] elements) { return flatten(map(elements, Function1.<Iterable<? extends T>>id())); } public static <T> Iterable<T> flatten(Iterable<? extends Iterable<? extends T>> elements) { return new ConcatenatingIterable<T>(elements); } public static <T> void foreach(T[] elements, Apply<? super T, Void> procedure) { foreach(Arrays.asList(elements), procedure); } public static <T> void foreach(Iterable<T> elements, Apply<? super T, Void> procedure) { for (T t: elements) { procedure.apply(t); } } /** * Non-lazy */ public static <T> Iterable<List<T>> grouped(T[] elements, int size) { return grouped(Arrays.asList(elements), size); } /** * Non-lazy */ public static <T> Iterable<List<T>> grouped(Iterable<T> elements, int size) { if (size <= 0) { throw new IllegalArgumentException("size must be positive"); } List<List<T>> target = Collections.newList(); Iterator<T> it = elements.iterator(); while (it.hasNext()) { List<T> group = Collections.newListOfSize(size); for (@SuppressWarnings("unused") int i: range(1, size)) { if (it.hasNext()) { group.add(it.next()); } } if (!group.isEmpty()) { target.add(group); } } return target; } /** * Non-lazy */ public static <G, T> Map<G, List<T>> groupBy(T[] elements, Apply<? super T,G> f) { return groupBy(Arrays.asList(elements), f); } /** * Non-lazy */ public static <G, T> Map<G, List<T>> groupBy(Iterable<T> elements, Apply<? super T,G> f) { Map<G, List<T>> target = newMap(); for (T t: elements) { G g = f.apply(t); Option<List<T>> groupOption = find(target, g); List<T> group; if (groupOption.isDefined()) { group = groupOption.get(); } else { group = Collections.newList(); target.put(g, group); } group.add(t); } return target; } public static <T> T head(T[] elements) { return head(Arrays.asList(elements)); } public static <T> T head(Iterable<T> elements) { return elements.iterator().next(); } public static <T> Option<T> headOption(T[] elements) { return headOption(Arrays.asList(elements)); } public static <T> Option<T> headOption(Iterable<T> elements) { Iterator<T> it = elements.iterator(); if (it.hasNext()) { return Some(it.next()); } else { return None(); } } public static <T> Iterable<T> tail(T[] elements) { return tail(Arrays.asList(elements)); } public static <T> Iterable<T> tail(Iterable<T> elements) { return drop(elements, 1); } public static <T> T last(T[] elements) { return last(Arrays.asList(elements)); } public static <T> T last(Iterable<T> elements) { Iterator<T> it = elements.iterator(); T ret = it.next(); while (it.hasNext()) { ret = it.next(); } return ret; } public static <T> Option<T> lastOption(T[] elements) { return lastOption(Arrays.asList(elements)); } public static <T> Option<T> lastOption(Iterable<T> elements) { Iterator<T> it = elements.iterator(); if (it.hasNext()) { T ret = it.next(); while (it.hasNext()) { ret = it.next(); } return Some(ret); } else { return None(); } } public static <T> Iterable<T> init(T[] elements) { return init(Arrays.asList(elements)); } public static <T> Iterable<T> init(Iterable<T> elements) { return take(elements, size(elements)-1); } public static <T> Iterable<T> take(T[] elements, int amount) { return take(Arrays.asList(elements), amount); } public static <T> Iterable<T> take(final Iterable<T> elements, final int amount) { return new Iterables.TakingIterable<T>(elements, amount); } public static <T> Iterable<T> drop(T[] elements, int amount) { return drop(Arrays.asList(elements), amount); } public static <T> Iterable<T> drop(final Iterable<T> elements, final int amount) { return new Iterables.DroppingIterable<T>(elements, amount); } public static <T> Iterable<T> takeWhile(T[] elements, Apply<? super T, Boolean> predicate) { return takeWhile(Arrays.asList(elements), predicate); } public static <T> Iterable<T> takeWhile(final Iterable<T> elements, final Apply<? super T, Boolean> predicate) { return new Iterable<T>() { @Override public Iterator<T> iterator() { return new Iterator<T>() { private Option<T> next; private Iterator<T> source = elements.iterator(); { readNext(); } @Override public boolean hasNext() { return next.isDefined(); } private void readNext() { if (!source.hasNext()) { next = None(); } else { T n = source.next(); if (predicate.apply(n)) { next = Some(n); } else { next = None(); } } } @Override public T next() { if (!next.isDefined()) { throw new NoSuchElementException(); } T ret = next.get(); readNext(); return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } public static <T> Iterable<T> dropWhile(T[] elements, Apply<? super T, Boolean> predicate) { return dropWhile(Arrays.asList(elements), predicate); } public static <T> Iterable<T> dropWhile(final Iterable<T> elements, final Apply<? super T, Boolean> predicate) { return new Iterable<T>() { @Override public Iterator<T> iterator() { return new Iterator<T>() { private boolean dropping = true; private Iterator<T> source = elements.iterator(); private Option<T> next; { readNext(); } @Override public boolean hasNext() { return next.isDefined(); } private void readNext() { next = source.hasNext() ? Some(source.next()) : Option.<T>None(); while (dropping && next.isDefined() && predicate.apply(next.get())) { next = source.hasNext() ? Some(source.next()) : Option.<T>None(); } dropping = false; } @Override public T next() { if (!next.isDefined()) { throw new NoSuchElementException(); } T ret = next.get(); readNext(); return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } public static <T> Pair<Iterable<T>, Iterable<T>> span(final Iterable<T> elements, final Apply<? super T, Boolean> predicate) { // TODO: a more efficient implementation return Pair.of(takeWhile(elements, predicate), dropWhile(elements, predicate)); } public static boolean isEmpty(Iterable<?> elements) { return !elements.iterator().hasNext(); } public static int size(Iterable<?> elements) { return Iterables.resolveSize.apply(elements).getOrElse(Collections.newList(elements).size()); } public static <T> boolean contains(T[] elements, T element) { return contains(Arrays.asList(elements), element); } public static <T,E> boolean contains(Iterable<T> elements, T element) { return exists(elements, Predicates.equalTo(element)); } public static <T> boolean exists(T[] elements, Apply<T, Boolean> filter) { return exists(Arrays.asList(elements), filter); } public static <T> boolean exists(Iterable<T> elements, Apply<? super T, Boolean> filter) { return !isEmpty(filter(elements, filter)); } public static <T> boolean forAll(T[] elements, Apply<? super T, Boolean> filter) { return forAll(Arrays.asList(elements), filter); } public static <T> boolean forAll(Iterable<T> elements, Apply<? super T, Boolean> filter) { return !exists(elements, Predicates.not(filter)); } @SuppressWarnings("unchecked") public static <T> Iterable<T> cons(T element, Iterable<? extends T> elements) { return concat(Arrays.asList(element), elements); } @SuppressWarnings("unchecked") public static <T> Iterable<T> cons(T element, T[] elements) { return concat(Arrays.asList(element), elements); } public static <T> Iterable<T> concat(T[] elements1, Iterable<? extends T> elements2) { return concat(Arrays.asList(elements1), elements2); } public static <T> Iterable<T> concat(Iterable<? extends T> elements1, T[] elements2) { return concat(elements1, Arrays.asList(elements2)); } public static <T> Iterable<T> concat(T[] elements1, T[] elements2) { return concat(Arrays.asList(elements1), Arrays.asList(elements2)); } public static <T> Iterable<T> concat(Iterable<? extends T> elements1, Iterable<? extends T> elements2) { return new ConcatenatingIterable<T>(Collections.newList(elements1, elements2)); } public static <T> Iterable<T> concat(final Iterable<? extends T> elements1, final Iterable<? extends T> elements2, final Iterable<? extends T> elements3) { return concat(elements1, concat(elements2, elements3)); } public static <T> Iterable<T> concat(final Iterable<? extends T> elements1, final Iterable<? extends T> elements2, final Iterable<? extends T> elements3, final Iterable<? extends T> elements4) { return concat(elements1, concat(elements2, elements3, elements4)); } public static <T> Iterable<T> concat(final Iterable<? extends T> elements1, final Iterable<? extends T> elements2, final Iterable<? extends T> elements3, final Iterable<? extends T> elements4, final Iterable<? extends T> elements5, final Iterable<? extends T>... rest) { return concat(elements1, concat(elements2, concat(elements3, elements4, elements5, flatten(rest)))); } public static <T extends Comparable<T>> Iterable<T> sort(final T[] elements) { return sort(Arrays.asList(elements)); } public static <T extends Comparable<T>> Iterable<T> sort(final Iterable<T> elements) { if (isEmpty(elements)) { return newSet(); } return sort(elements, Ordering.Natural()); } public static <T> Iterable<T> sort(T[] elements, Comparator<? super T> comparator) { return sort(Arrays.asList(elements), comparator); } public static <T> Iterable<T> sort(final Iterable<T> elements, final Comparator<? super T> comparator) { return new Iterables.SortingIterable<T>(elements, comparator); } public static <T extends SemiGroup<T>> T reduce(T e1) { return e1; } public static <T extends SemiGroup<T>> T reduce(T e1, T e2) { return reduce(newList(e1, e2)).get(); } public static <T extends SemiGroup<T>> T reduce(T e1, T e2, T e3) { return reduce(newList(e1, e2, e3)).get(); } public static <T extends SemiGroup<T>> T reduce(T e1, T e2, T e3, T e4) { return reduce(newList(e1, e2, e3, e4)).get(); } public static <T extends SemiGroup<T>> T reduce(T e1, T e2, T e3, T e4, T... elements) { return reduce(concat(newList(e1, e2, e3, e4), elements)).get(); } public static <T extends SemiGroup<T>> Option<T> reduce(T[] elements) { return reduce(Arrays.asList(elements)); } public static <T extends SemiGroup<T>> Option<T> reduce(Iterable<? extends T> elements) { if (isEmpty(elements)) { return None(); } return fold(elements, head(elements)); } public static <T> T reduce(T[] elements, Monoid<T> m) { return reduce(Arrays.asList(elements), m); } public static <T> T reduce(Iterable<? extends T> elements, Monoid<T> m) { return fold(cons(m.zero(), elements), m).get(); } /** * @return <i>None</i> if <i>elements</i> is empty */ public static <T> Option<T> fold(T[] elements, Apply<Tuple2<T,T>, T> f) { return fold(Arrays.asList(elements), f); } /** * @return <i>None</i> if <i>elements</i> is empty */ public static <T> Option<T> fold(Iterable<? extends T> elements, Apply<Tuple2<T,T>, T> f) { Option<? extends T> h = headOption(elements); if (h.isDefined()) { T ret = h.get(); for (T t : drop(elements, 1)) { ret = f.apply(Tuple.of(ret, t)); } return Some(ret); } return None(); } public static long sum(int e1) { return sum(Arrays.asList(e1)); } public static long sum(int e1, int e2) { return sum(Arrays.asList(e1, e2)); } public static long sum(int e1, int e2, int e3) { return sum(Arrays.asList(e1, e2, e3)); } public static long sum(int e1, int e2, int e3, Integer... rest) { return sum(concat(Arrays.asList(e1, e2, e3), rest)); } public static long sum(Iterable<Integer> elements) { return reduce(map(elements, Transformers.int2long), Monoids.longSum); } public static long product(int e1) { return product(Arrays.asList(e1)); } public static long product(int e1, int e2) { return product(Arrays.asList(e1, e2)); } public static long product(int e1, int e2, int e3) { return product(Arrays.asList(e1, e2, e3)); } public static long product(int e1, int e2, int e3, Integer... rest) { return product(concat(Arrays.asList(e1, e2, e3), rest)); } public static long product(Iterable<Integer> elements) { return reduce(map(elements, Transformers.int2long), Monoids.longProduct); } @SuppressWarnings("unchecked") public static <T extends Comparable<T>> T min(T e1, T... elements) { return min(concat(Arrays.asList(e1), elements)).get(); } public static <T extends Comparable<T>> Option<T> min(Iterable<T> elements) { return headOption(sort(elements)); } @SuppressWarnings("unchecked") public static <T extends Comparable<T>> T max(T e1, T... elements) { return max(concat(Arrays.asList(e1), elements)).get(); } public static <T extends Comparable<T>> Option<T> max(Iterable<T> elements) { return headOption(sort(elements, Ordering.Natural().reverse())); } public static <A,B> Iterable<Tuple2<A, B>> zip(A[] a, B[] b) { return zip(Arrays.asList(a), Arrays.asList(b)); } public static <A,B> Iterable<Tuple2<A, B>> zip(A[] a, Iterable<B> b) { return zip(Arrays.asList(a), b); } public static <A,B> Iterable<Tuple2<A, B>> zip(Iterable<A> a, B[] b) { return zip(a, Arrays.asList(b)); } public static <A,B> Iterable<Tuple2<A, B>> zip(Iterable<A> a, Iterable<B> b) { return new ZippingIterable<A,B>(a, b); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(A[] a, B[] b, C[] c) { return zip(Arrays.asList(a), Arrays.asList(b), Arrays.asList(c)); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(A[] a, Iterable<B> b, Iterable<C> c) { return zip(Arrays.asList(a), b, c); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(Iterable<A> a, B[] b, Iterable<C> c) { return zip(a, Arrays.asList(b), c); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(Iterable<A> a, Iterable<B> b, C[] c) { return zip(a, b, Arrays.asList(c)); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(A[] a, B[] b, Iterable<C> c) { return zip(Arrays.asList(a), Arrays.asList(b), c); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(Iterable<A> a, B[] b, C[] c) { return zip(a, Arrays.asList(b), Arrays.asList(c)); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(A[] a, Iterable<B> b, C[] c) { return zip(Arrays.asList(a), b, Arrays.asList(c)); } public static <A,B,C> Iterable<Tuple3<A, B, C>> zip(Iterable<A> a, Iterable<B> b, Iterable<C> c) { return map(zip(zip(a, b), c), new Transformer<Tuple2<Tuple2<A, B>, C>, Tuple3<A, B, C>>() { @Override public Tuple3<A, B, C> transform(Tuple2<Tuple2<A, B>, C> source) { return source._1.append(source._2); } }); } public static <A> Iterable<Tuple2<Integer, A>> zipWithIndex(Iterable<A> a) { return new ZippingIterable<Integer,A>(new RangeIterable(0), a); } public static <A> Iterable<Tuple2<Integer, A>> zipWithIndex(A[] a) { return new ZippingIterable<Integer,A>(new RangeIterable(0), Arrays.asList(a)); } public static Iterable<Integer> range(int from) { return new RangeIterable(from); } public static Iterable<Integer> range(int from, int toInclusive) { return new RangeIterable(from, toInclusive); } public static <T> Iterable<T> repeat(T value) { return new RepeatingIterable<T>(value); } public static <T> Iterable<T> repeat(T value, int amount) { return new RepeatingIterable<T>(value, amount); } public static String mkString(Iterable<Character> elements) { return mkString("", map(elements, Transformers.toString)); } public static String mkString(String delim, String[] elements) { return mkString(delim, Arrays.asList(elements)); } public static String mkString(String delim, Iterable<String> elements) { StringBuilder sb = new StringBuilder(); for (String s: elements) { if (sb.length() > 0) { sb.append(delim); } sb.append(s); } return sb.toString(); } public static <T> Iterable<T> reverse(Iterable<T> elements) { return new Iterables.ReversingIterable<T>(elements); } public static <T,R> Iterable<R> sequence(Iterable<? extends Apply<? super T,? extends R>> elements, final T t) { return map(elements, new Transformer<Apply<? super T,? extends R>, R>() { @Override public R transform(Apply<? super T,? extends R> source) { return source.apply(t); } }); } public static <T> Iterable<Iterable<T>> transpose(Iterable<? extends Iterable<T>> elements) { return new TransposingIterable<T>(elements); } public static <T> Iterable<Iterable<T>> transpose2(Iterable<T[]> elements) { return transpose(map(elements, new Transformer<T[], Iterable<T>>() { @Override public Iterable<T> transform(T[] source) { return Arrays.asList(source); } })); } private static final String LINE_SEP = System.getProperty("line.separator"); public static final String unlines(Iterable<String> elements) { return mkString(LINE_SEP, elements); } }
small refactoring
src/main/java/fi/solita/utils/functional/Functional.java
small refactoring
<ide><path>rc/main/java/fi/solita/utils/functional/Functional.java <ide> public static <T> T reduce(T[] elements, Monoid<T> m) { <ide> return reduce(Arrays.asList(elements), m); <ide> } <del> <add> <ide> public static <T> T reduce(Iterable<? extends T> elements, Monoid<T> m) { <ide> return fold(cons(m.zero(), elements), m).get(); <ide> } <ide> return None(); <ide> } <ide> <del> public static long sum(int e1) { <del> return sum(Arrays.asList(e1)); <del> } <del> <del> public static long sum(int e1, int e2) { <del> return sum(Arrays.asList(e1, e2)); <del> } <del> <del> public static long sum(int e1, int e2, int e3) { <del> return sum(Arrays.asList(e1, e2, e3)); <del> } <del> <del> public static long sum(int e1, int e2, int e3, Integer... rest) { <del> return sum(concat(Arrays.asList(e1, e2, e3), rest)); <add> public static long sum(Integer... elements) { <add> return sum(Arrays.asList(elements)); <ide> } <ide> <ide> public static long sum(Iterable<Integer> elements) { <ide> return reduce(map(elements, Transformers.int2long), Monoids.longSum); <ide> } <ide> <del> public static long product(int e1) { <del> return product(Arrays.asList(e1)); <del> } <del> <del> public static long product(int e1, int e2) { <del> return product(Arrays.asList(e1, e2)); <del> } <del> <del> public static long product(int e1, int e2, int e3) { <del> return product(Arrays.asList(e1, e2, e3)); <del> } <del> <del> public static long product(int e1, int e2, int e3, Integer... rest) { <del> return product(concat(Arrays.asList(e1, e2, e3), rest)); <add> public static long product(Integer... elements) { <add> return product(Arrays.asList(elements)); <ide> } <ide> <ide> public static long product(Iterable<Integer> elements) { <ide> return reduce(map(elements, Transformers.int2long), Monoids.longProduct); <ide> } <ide> <del> @SuppressWarnings("unchecked") <ide> public static <T extends Comparable<T>> T min(T e1, T... elements) { <del> return min(concat(Arrays.asList(e1), elements)).get(); <del> } <del> <add> return min(cons(e1, elements)).get(); <add> } <add> <add> public static <T extends Comparable<T>> Option<T> min(T[] elements) { <add> return min(Arrays.asList(elements)); <add> } <add> <ide> public static <T extends Comparable<T>> Option<T> min(Iterable<T> elements) { <ide> return headOption(sort(elements)); <ide> } <ide> <del> @SuppressWarnings("unchecked") <ide> public static <T extends Comparable<T>> T max(T e1, T... elements) { <del> return max(concat(Arrays.asList(e1), elements)).get(); <add> return max(cons(e1, elements)).get(); <add> } <add> <add> public static <T extends Comparable<T>> Option<T> max(T[] elements) { <add> return max(Arrays.asList(elements)); <ide> } <ide> <ide> public static <T extends Comparable<T>> Option<T> max(Iterable<T> elements) {
JavaScript
mit
4ed6f0e005d6a6ef7eb5d7bd515f15418bc1433a
0
siimple/siimple-elements
import h from '../hyperscript.js'; //Heading class export default class SiimpleHeading extends React.Component { //Constructor constructor(props) { //Call super super(props); //Initialize the state object this.state = { type: '1' }; //Parse the initial properties this.componentWillReceiveProps(props); } //Parse new props componentWillReceiveProps(props) { //Parse the heading type value if(typeof props.type !== 'undefined' && props.type !== this.state.type) { //Save the heading type this.state.type = props.type; } } //Render the heading element render() { //Get the header class var class_name = (typeof this.state.type === 'string') ? 'siimple-h' + this.state.type.trim() : null; //Return the heading element return h.div({ className: class_name }, this.props.children); } }
src/typography/heading.js
import h from './hyperscript.js'; //Heading class export default class SiimpleHeading extends React.Component { //Constructor constructor(props) { //Call super super(props); //Initialize the state object this.state = { type: '1' }; //Parse the initial properties this.componentWillReceiveProps(props); } //Parse new props componentWillReceiveProps(props) { //Parse the heading type value if(typeof props.type !== 'undefined' && props.type !== this.state.type) { //Save the heading type this.state.type = props.type; } } //Render the heading element render() { //Get the header class var class_name = (typeof this.state.type === 'string') ? 'siimple-h' + this.state.type.trim() : null; //Return the heading element return h.div({ className: class_name }, this.props.children); } }
src/typography/heading.js: fixed dependencies
src/typography/heading.js
src/typography/heading.js: fixed dependencies
<ide><path>rc/typography/heading.js <del>import h from './hyperscript.js'; <add>import h from '../hyperscript.js'; <ide> <ide> //Heading class <ide> export default class SiimpleHeading extends React.Component
Java
bsd-2-clause
492c35aa2a58d7569e857afa44bb9f683d7ba9d8
0
JFormDesigner/RichTextFX,TomasMikula/RichTextFX,afester/RichTextFX,JordanMartinez/RichTextFX,afester/RichTextFX,TomasMikula/RichTextFX,FXMisc/RichTextFX,FXMisc/RichTextFX,JordanMartinez/RichTextFX,JFormDesigner/RichTextFX
package org.fxmisc.richtext; import javafx.beans.value.ObservableValue; import javafx.geometry.Bounds; import javafx.scene.control.IndexRange; import org.fxmisc.richtext.model.StyledDocument; import org.reactfx.EventStream; import org.reactfx.Subscription; import org.reactfx.Suspendable; import org.reactfx.SuspendableNo; import org.reactfx.util.Tuple3; import org.reactfx.util.Tuples; import org.reactfx.value.SuspendableVal; import org.reactfx.value.Val; import org.reactfx.value.Var; import java.util.Optional; public class BoundedSelectionImpl<PS, SEG, S> implements BoundedSelection<PS, SEG, S> { private final UnboundedSelection<PS, SEG, S> delegate; @Override public ObservableValue<IndexRange> rangeProperty() { return delegate.rangeProperty(); } @Override public IndexRange getRange() { return delegate.getRange(); } @Override public ObservableValue<Integer> lengthProperty() { return delegate.lengthProperty(); } @Override public int getLength() { return delegate.getLength(); } @Override public ObservableValue<Integer> paragraphSpanProperty() { return delegate.paragraphSpanProperty(); } @Override public int getParagraphSpan() { return delegate.getParagraphSpan(); } @Override public final ObservableValue<StyledDocument<PS, SEG, S>> selectedDocumentProperty() { return delegate.selectedDocumentProperty(); } @Override public final StyledDocument<PS, SEG, S> getSelectedDocument() { return delegate.getSelectedDocument(); } @Override public ObservableValue<String> selectedTextProperty() { return delegate.selectedTextProperty(); } @Override public String getSelectedText() { return delegate.getSelectedText(); } @Override public ObservableValue<Integer> startPositionProperty() { return delegate.startPositionProperty(); } @Override public int getStartPosition() { return delegate.getStartPosition(); } @Override public ObservableValue<Integer> startParagraphIndexProperty() { return delegate.startParagraphIndexProperty(); } @Override public int getStartParagraphIndex() { return delegate.getStartParagraphIndex(); } @Override public ObservableValue<Integer> startColumnPositionProperty() { return delegate.startColumnPositionProperty(); } @Override public int getStartColumnPosition() { return delegate.getStartColumnPosition(); } @Override public ObservableValue<Integer> endPositionProperty() { return delegate.endPositionProperty(); } @Override public int getEndPosition() { return delegate.getEndPosition(); } @Override public ObservableValue<Integer> endPararagraphIndexProperty() { return delegate.endPararagraphIndexProperty(); } @Override public int getEndPararagraphIndex() { return delegate.getEndPararagraphIndex(); } @Override public ObservableValue<Integer> endColumnPositionProperty() { return delegate.endColumnPositionProperty(); } @Override public int getEndColumnPosition() { return delegate.getEndColumnPosition(); } private final Val<Integer> anchorPosition; @Override public int getAnchorPosition() { return anchorPosition.getValue(); } @Override public ObservableValue<Integer> anchorPositionProperty() { return anchorPosition; } private final Val<Integer> anchorParIndex; @Override public int getAnchorParIndex() { return anchorParIndex.getValue(); } @Override public ObservableValue<Integer> anchorParIndexProperty() { return anchorParIndex; } private final Val<Integer> anchorColPosition; @Override public int getAnchorColPosition() { return anchorColPosition.getValue(); } @Override public ObservableValue<Integer> anchorColPositionProperty() { return anchorColPosition; } @Override public ObservableValue<Optional<Bounds>> boundsProperty() { return delegate.boundsProperty(); } @Override public Optional<Bounds> getBounds() { return delegate.getBounds(); } @Override public EventStream<?> dirtyEvents() { return delegate.dirtyEvents(); } private final SuspendableNo beingUpdated = new SuspendableNo(); public final boolean isBeingUpdated() { return beingUpdated.get(); } public final ObservableValue<Boolean> beingUpdatedProperty() { return beingUpdated; } private final Var<Boolean> internalStartedByAnchor = Var.newSimpleVar(true); private final SuspendableVal<Boolean> startedByAnchor = internalStartedByAnchor.suspendable(); private boolean anchorIsStart() { return startedByAnchor.getValue(); } private final GenericStyledArea<PS, SEG, S> area; private final Caret caret; private Subscription subscription = () -> {}; BoundedSelectionImpl(GenericStyledArea<PS, SEG, S> area) { this(area, area.getMainCaret()); } BoundedSelectionImpl(GenericStyledArea<PS, SEG, S> area, Caret caret) { this(area, caret, new IndexRange(0, 0)); } BoundedSelectionImpl(GenericStyledArea<PS, SEG, S> area, Caret caret, IndexRange startingRange) { this.area = area; this.caret = caret; SuspendableNo delegateUpdater = new SuspendableNo(); delegate = new UnboundedSelectionImpl<>(area, delegateUpdater, startingRange); Val<Tuple3<Integer, Integer, Integer>> anchorPositions = startedByAnchor.flatMap(b -> b ? Val.constant(Tuples.t(getStartPosition(), getStartParagraphIndex(), getStartColumnPosition())) : Val.constant(Tuples.t(getEndPosition(), getEndPararagraphIndex(), getEndColumnPosition())) ); anchorPosition = anchorPositions.map(Tuple3::get1); anchorParIndex = anchorPositions.map(Tuple3::get2); anchorColPosition = anchorPositions.map(Tuple3::get3); Suspendable omniSuspendable = Suspendable.combine( // first, so it's released last beingUpdated, startedByAnchor, // last, so it's released before startedByAnchor, so that anchor's values are correct delegateUpdater ); subscription = omniSuspendable.suspendWhen(area.beingUpdatedProperty()); } @Override public void selectRange(int anchorParagraph, int anchorColumn, int caretParagraph, int caretColumn) { selectRange(textPosition(anchorParagraph, anchorColumn), textPosition(caretParagraph, caretColumn)); } @Override public void selectRange(int anchorPosition, int caretPosition) { if (anchorPosition <= caretPosition) { doSelect(anchorPosition, caretPosition, true); } else { doSelect(caretPosition, anchorPosition, false); } } @Override public void selectRange0(int startPosition, int endPosition) { doSelect(startPosition, endPosition, anchorIsStart()); } @Override public void selectRange0(int startParagraphIndex, int startColPosition, int endParagraphIndex, int endColPosition) { selectRange0(textPosition(startParagraphIndex, startColPosition), textPosition(endParagraphIndex, endColPosition)); } @Override public void moveStartBy(int amount, Direction direction) { int updatedStart = direction == Direction.LEFT ? getStartPosition() - amount : getStartPosition() + amount; selectRange0(updatedStart, getEndPosition()); } @Override public void moveEndBy(int amount, Direction direction) { int updatedEnd = direction == Direction.LEFT ? getEndPosition() - amount : getEndPosition() + amount; selectRange0(getStartPosition(), updatedEnd); } @Override public void moveStartTo(int position) { selectRange0(position, getEndPosition()); } @Override public void moveStartTo(int paragraphIndex, int columnPosition) { moveStartTo(textPosition(paragraphIndex, columnPosition)); } @Override public void moveEndTo(int position) { selectRange0(getStartPosition(), position); } @Override public void moveEndTo(int paragraphIndex, int columnPosition) { moveEndTo(textPosition(paragraphIndex, columnPosition)); } @Override public void dispose() { subscription.unsubscribe(); } private void doSelect(int startPosition, int endPosition, boolean anchorIsStart) { Runnable updateRange = () -> { delegate.selectRange0(startPosition, endPosition); internalStartedByAnchor.setValue(anchorIsStart); caret.moveTo(anchorIsStart ? endPosition : startPosition); }; if (area.isBeingUpdated()) { updateRange.run(); } else { area.beingUpdatedProperty().suspendWhile(updateRange); } } private int textPosition(int paragraphIndex, int columnPosition) { return area.position(paragraphIndex, columnPosition).toOffset(); } }
richtextfx/src/main/java/org/fxmisc/richtext/BoundedSelectionImpl.java
package org.fxmisc.richtext; import javafx.beans.value.ObservableValue; import javafx.geometry.Bounds; import javafx.scene.control.IndexRange; import org.fxmisc.richtext.model.StyledDocument; import org.reactfx.EventStream; import org.reactfx.Subscription; import org.reactfx.Suspendable; import org.reactfx.SuspendableNo; import org.reactfx.util.Tuple3; import org.reactfx.util.Tuples; import org.reactfx.value.SuspendableVal; import org.reactfx.value.Val; import org.reactfx.value.Var; import java.util.Optional; public class BoundedSelectionImpl<PS, SEG, S> implements BoundedSelection<PS, SEG, S> { private final UnboundedSelection<PS, SEG, S> delegate; @Override public ObservableValue<IndexRange> rangeProperty() { return delegate.rangeProperty(); } @Override public IndexRange getRange() { return delegate.getRange(); } @Override public ObservableValue<Integer> lengthProperty() { return delegate.lengthProperty(); } @Override public int getLength() { return delegate.getLength(); } @Override public ObservableValue<Integer> paragraphSpanProperty() { return delegate.paragraphSpanProperty(); } @Override public int getParagraphSpan() { return delegate.getParagraphSpan(); } @Override public final ObservableValue<StyledDocument<PS, SEG, S>> selectedDocumentProperty() { return delegate.selectedDocumentProperty(); } @Override public final StyledDocument<PS, SEG, S> getSelectedDocument() { return delegate.getSelectedDocument(); } @Override public ObservableValue<String> selectedTextProperty() { return delegate.selectedTextProperty(); } @Override public String getSelectedText() { return delegate.getSelectedText(); } @Override public ObservableValue<Integer> startPositionProperty() { return delegate.startPositionProperty(); } @Override public int getStartPosition() { return delegate.getStartPosition(); } @Override public ObservableValue<Integer> startParagraphIndexProperty() { return delegate.startParagraphIndexProperty(); } @Override public int getStartParagraphIndex() { return delegate.getStartParagraphIndex(); } @Override public ObservableValue<Integer> startColumnPositionProperty() { return delegate.startColumnPositionProperty(); } @Override public int getStartColumnPosition() { return delegate.getStartColumnPosition(); } @Override public ObservableValue<Integer> endPositionProperty() { return delegate.endPositionProperty(); } @Override public int getEndPosition() { return delegate.getEndPosition(); } @Override public ObservableValue<Integer> endPararagraphIndexProperty() { return delegate.endPararagraphIndexProperty(); } @Override public int getEndPararagraphIndex() { return delegate.getEndPararagraphIndex(); } @Override public ObservableValue<Integer> endColumnPositionProperty() { return delegate.endColumnPositionProperty(); } @Override public int getEndColumnPosition() { return delegate.getEndColumnPosition(); } private final Val<Integer> anchorPosition; @Override public int getAnchorPosition() { return anchorPosition.getValue(); } @Override public ObservableValue<Integer> anchorPositionProperty() { return anchorPosition; } private final Val<Integer> anchorParIndex; @Override public int getAnchorParIndex() { return anchorParIndex.getValue(); } @Override public ObservableValue<Integer> anchorParIndexProperty() { return anchorParIndex; } private final Val<Integer> anchorColPosition; @Override public int getAnchorColPosition() { return anchorColPosition.getValue(); } @Override public ObservableValue<Integer> anchorColPositionProperty() { return anchorColPosition; } @Override public ObservableValue<Optional<Bounds>> boundsProperty() { return delegate.boundsProperty(); } @Override public Optional<Bounds> getBounds() { return delegate.getBounds(); } @Override public EventStream<?> dirtyEvents() { return delegate.dirtyEvents(); } private final SuspendableNo beingupdated = new SuspendableNo(); public final boolean isBeingUpdated() { return beingupdated.get(); } public final ObservableValue<Boolean> beingUpdatedProperty() { return beingupdated; } private final Var<Boolean> internalStartedByAnchor = Var.newSimpleVar(true); private final SuspendableVal<Boolean> startedByAnchor = internalStartedByAnchor.suspendable(); private boolean anchorIsStart() { return startedByAnchor.getValue(); } private final GenericStyledArea<PS, SEG, S> area; private final Caret caret; private Subscription subscription = () -> {}; BoundedSelectionImpl(GenericStyledArea<PS, SEG, S> area) { this(area, area.getMainCaret()); } BoundedSelectionImpl(GenericStyledArea<PS, SEG, S> area, Caret caret) { this(area, caret, new IndexRange(0, 0)); } BoundedSelectionImpl(GenericStyledArea<PS, SEG, S> area, Caret caret, IndexRange startingRange) { this.area = area; this.caret = caret; SuspendableNo delegateUpdater = new SuspendableNo(); delegate = new UnboundedSelectionImpl<>(area, delegateUpdater, startingRange); Val<Tuple3<Integer, Integer, Integer>> anchorPositions = startedByAnchor.flatMap(b -> b ? Val.constant(Tuples.t(getStartPosition(), getStartParagraphIndex(), getStartColumnPosition())) : Val.constant(Tuples.t(getEndPosition(), getEndPararagraphIndex(), getEndColumnPosition())) ); anchorPosition = anchorPositions.map(Tuple3::get1); anchorParIndex = anchorPositions.map(Tuple3::get2); anchorColPosition = anchorPositions.map(Tuple3::get3); Suspendable omniSuspendable = Suspendable.combine( // first, so it's released last beingupdated, startedByAnchor, // last, so it's released before startedByAnchor, so that anchor's values are correct delegateUpdater ); subscription = omniSuspendable.suspendWhen(area.beingUpdatedProperty()); } @Override public void selectRange(int anchorParagraph, int anchorColumn, int caretParagraph, int caretColumn) { selectRange(textPosition(anchorParagraph, anchorColumn), textPosition(caretParagraph, caretColumn)); } @Override public void selectRange(int anchorPosition, int caretPosition) { if (anchorPosition <= caretPosition) { doSelect(anchorPosition, caretPosition, true); } else { doSelect(caretPosition, anchorPosition, false); } } @Override public void selectRange0(int startPosition, int endPosition) { doSelect(startPosition, endPosition, anchorIsStart()); } @Override public void selectRange0(int startParagraphIndex, int startColPosition, int endParagraphIndex, int endColPosition) { selectRange0(textPosition(startParagraphIndex, startColPosition), textPosition(endParagraphIndex, endColPosition)); } @Override public void moveStartBy(int amount, Direction direction) { int updatedStart = direction == Direction.LEFT ? getStartPosition() - amount : getStartPosition() + amount; selectRange0(updatedStart, getEndPosition()); } @Override public void moveEndBy(int amount, Direction direction) { int updatedEnd = direction == Direction.LEFT ? getEndPosition() - amount : getEndPosition() + amount; selectRange0(getStartPosition(), updatedEnd); } @Override public void moveStartTo(int position) { selectRange0(position, getEndPosition()); } @Override public void moveStartTo(int paragraphIndex, int columnPosition) { moveStartTo(textPosition(paragraphIndex, columnPosition)); } @Override public void moveEndTo(int position) { selectRange0(getStartPosition(), position); } @Override public void moveEndTo(int paragraphIndex, int columnPosition) { moveEndTo(textPosition(paragraphIndex, columnPosition)); } @Override public void dispose() { subscription.unsubscribe(); } private void doSelect(int startPosition, int endPosition, boolean anchorIsStart) { Runnable updateRange = () -> { delegate.selectRange0(startPosition, endPosition); internalStartedByAnchor.setValue(anchorIsStart); caret.moveTo(anchorIsStart ? endPosition : startPosition); }; if (area.isBeingUpdated()) { updateRange.run(); } else { area.beingUpdatedProperty().suspendWhile(updateRange); } } private int textPosition(int paragraphIndex, int columnPosition) { return area.position(paragraphIndex, columnPosition).toOffset(); } }
Fix typo: use camel case
richtextfx/src/main/java/org/fxmisc/richtext/BoundedSelectionImpl.java
Fix typo: use camel case
<ide><path>ichtextfx/src/main/java/org/fxmisc/richtext/BoundedSelectionImpl.java <ide> <ide> @Override public EventStream<?> dirtyEvents() { return delegate.dirtyEvents(); } <ide> <del> private final SuspendableNo beingupdated = new SuspendableNo(); <del> public final boolean isBeingUpdated() { return beingupdated.get(); } <del> public final ObservableValue<Boolean> beingUpdatedProperty() { return beingupdated; } <add> private final SuspendableNo beingUpdated = new SuspendableNo(); <add> public final boolean isBeingUpdated() { return beingUpdated.get(); } <add> public final ObservableValue<Boolean> beingUpdatedProperty() { return beingUpdated; } <ide> <ide> private final Var<Boolean> internalStartedByAnchor = Var.newSimpleVar(true); <ide> private final SuspendableVal<Boolean> startedByAnchor = internalStartedByAnchor.suspendable(); <ide> <ide> Suspendable omniSuspendable = Suspendable.combine( <ide> // first, so it's released last <del> beingupdated, <add> beingUpdated, <ide> <ide> startedByAnchor, <ide>
Java
mit
error: pathspec 'thirty_days_of_code/Interfaces.java' did not match any file(s) known to git
658bdcc9bddd710aa93013a06fa9fb93e8beff54
1
RCoon/HackerRank,RCoon/HackerRank,RCoon/HackerRank
package thirty_days_of_code; import java.util.Scanner; /* * Problem Statement: * Here you are given an interface AdvancedArithmetic which contains a method * signature * * int divisorSum(int n). (The divisorSum function just takes an integer as * input and return the sum of all its divisors.) * Your only task is to write a class Calculator which implements the interface. * * Note : The class Calculator shouldn't be public. * * Input Format: * Only one line containing integer n * * Constraints: * 1 <= n <= 1000 * * Output Format: * In the first line print "I implemented: AdvancedArithmetic" without quotes. * In the next line print the sum of divisors of n as given in problem * statement. * * Sample Input: * 6 * * Sample Output: * I implemented: AdvancedArithmetic * 12 */ public class Interfaces { public static void main(String[] args) { Scanner sc=new Scanner(System.in); int n=sc.nextInt(); AdvancedArithmetic myCalculator=new DivisorCalculator(); int sum=myCalculator.divisorSum(n); System.out.println("I implemented: AdvancedArithmetic\n"+sum); sc.close(); } } class DivisorCalculator implements AdvancedArithmetic { public int divisorSum(int n) { int maxDivisor = (int) Math.sqrt(n); int sum = 0; for (int i = 1; i <= maxDivisor; i++) { if (n % i == 0) { sum += i; int d = n / i; if (d != i) { sum += d; } } } return sum; } } interface AdvancedArithmetic { int divisorSum(int n); }
thirty_days_of_code/Interfaces.java
Add Interfaces
thirty_days_of_code/Interfaces.java
Add Interfaces
<ide><path>hirty_days_of_code/Interfaces.java <add>package thirty_days_of_code; <add> <add>import java.util.Scanner; <add> <add>/* <add> * Problem Statement: <add> * Here you are given an interface AdvancedArithmetic which contains a method <add> * signature <add> * <add> * int divisorSum(int n). (The divisorSum function just takes an integer as <add> * input and return the sum of all its divisors.) <add> * Your only task is to write a class Calculator which implements the interface. <add> * <add> * Note : The class Calculator shouldn't be public. <add> * <add> * Input Format: <add> * Only one line containing integer n <add> * <add> * Constraints: <add> * 1 <= n <= 1000 <add> * <add> * Output Format: <add> * In the first line print "I implemented: AdvancedArithmetic" without quotes. <add> * In the next line print the sum of divisors of n as given in problem <add> * statement. <add> * <add> * Sample Input: <add> * 6 <add> * <add> * Sample Output: <add> * I implemented: AdvancedArithmetic <add> * 12 <add> */ <add>public class Interfaces { <add> <add> public static void main(String[] args) { <add> Scanner sc=new Scanner(System.in); <add> int n=sc.nextInt(); <add> AdvancedArithmetic myCalculator=new DivisorCalculator(); <add> int sum=myCalculator.divisorSum(n); <add> System.out.println("I implemented: AdvancedArithmetic\n"+sum); <add> sc.close(); <add> } <add>} <add> <add>class DivisorCalculator implements AdvancedArithmetic { <add> <add> public int divisorSum(int n) { <add> <add> int maxDivisor = (int) Math.sqrt(n); <add> int sum = 0; <add> <add> for (int i = 1; i <= maxDivisor; i++) { <add> if (n % i == 0) { <add> sum += i; <add> <add> int d = n / i; <add> if (d != i) { <add> sum += d; <add> } <add> } <add> } <add> return sum; <add> } <add>} <add> <add>interface AdvancedArithmetic { <add> int divisorSum(int n); <add>}
JavaScript
mit
ed6ac1dbb767061cc0b0e5c447f37dc0ef6fc7e1
0
hansolo669/iitc-tweaks,hansolo669/iitc-tweaks
// ==UserScript== // @id iitc-plugin-region-score-lead@hansolo669 // @name IITC plugin: region score lead // @category Tweaks // @version 0.0.3 // @namespace https://github.com/hansolo669/iitc-tweaks // @updateURL http://www.reallyawesomedomain.com/iitc-tweaks/region-score-lead.meta.js // @downloadURL http://www.reallyawesomedomain.com/iitc-tweaks/region-score-lead.user.js // @description Small modification to the region scores to show the current mu lead. // @include https://www.ingress.com/intel* // @include http://www.ingress.com/intel* // @match https://www.ingress.com/intel* // @match http://www.ingress.com/intel* // @include https://www.ingress.com/mission/* // @include http://www.ingress.com/mission/* // @match https://www.ingress.com/mission/* // @match http://www.ingress.com/mission/* // @grant none // ==/UserScript== function wrapper(plugin_info) { // ensure plugin framework is there, even if iitc is not yet loaded if(typeof window.plugin !== 'function') window.plugin = function() {}; // PLUGIN START //////////////////////////////////////////////////////// var setup = function() { // include the S2 functions if they don't exist if (!window.S2) { (function() { window.S2 = {}; var LatLngToXYZ = function(latLng) { var d2r = Math.PI/180.0; var phi = latLng.lat*d2r; var theta = latLng.lng*d2r; var cosphi = Math.cos(phi); return [Math.cos(theta)*cosphi, Math.sin(theta)*cosphi, Math.sin(phi)]; }; var XYZToLatLng = function(xyz) { var r2d = 180.0/Math.PI; var lat = Math.atan2(xyz[2], Math.sqrt(xyz[0]*xyz[0]+xyz[1]*xyz[1])); var lng = Math.atan2(xyz[1], xyz[0]); return L.latLng(lat*r2d, lng*r2d); }; var largestAbsComponent = function(xyz) { var temp = [Math.abs(xyz[0]), Math.abs(xyz[1]), Math.abs(xyz[2])]; if (temp[0] > temp[1]) { if (temp[0] > temp[2]) { return 0; } else { return 2; } } else { if (temp[1] > temp[2]) { return 1; } else { return 2; } } }; var faceXYZToUV = function(face,xyz) { var u,v; switch (face) { case 0: u = xyz[1]/xyz[0]; v = xyz[2]/xyz[0]; break; case 1: u = -xyz[0]/xyz[1]; v = xyz[2]/xyz[1]; break; case 2: u = -xyz[0]/xyz[2]; v = -xyz[1]/xyz[2]; break; case 3: u = xyz[2]/xyz[0]; v = xyz[1]/xyz[0]; break; case 4: u = xyz[2]/xyz[1]; v = -xyz[0]/xyz[1]; break; case 5: u = -xyz[1]/xyz[2]; v = -xyz[0]/xyz[2]; break; default: throw {error: 'Invalid face'}; break; } return [u,v]; } var XYZToFaceUV = function(xyz) { var face = largestAbsComponent(xyz); if (xyz[face] < 0) { face += 3; } uv = faceXYZToUV (face,xyz); return [face, uv]; }; var FaceUVToXYZ = function(face,uv) { var u = uv[0]; var v = uv[1]; switch (face) { case 0: return [ 1, u, v]; case 1: return [-u, 1, v]; case 2: return [-u,-v, 1]; case 3: return [-1,-v,-u]; case 4: return [ v,-1,-u]; case 5: return [ v, u,-1]; default: throw {error: 'Invalid face'}; } }; var STToUV = function(st) { var singleSTtoUV = function(st) { if (st >= 0.5) { return (1/3.0) * (4*st*st - 1); } else { return (1/3.0) * (1 - (4*(1-st)*(1-st))); } } return [singleSTtoUV(st[0]), singleSTtoUV(st[1])]; }; var UVToST = function(uv) { var singleUVtoST = function(uv) { if (uv >= 0) { return 0.5 * Math.sqrt (1 + 3*uv); } else { return 1 - 0.5 * Math.sqrt (1 - 3*uv); } } return [singleUVtoST(uv[0]), singleUVtoST(uv[1])]; }; var STToIJ = function(st,order) { var maxSize = (1<<order); var singleSTtoIJ = function(st) { var ij = Math.floor(st * maxSize); return Math.max(0, Math.min(maxSize-1, ij)); }; return [singleSTtoIJ(st[0]), singleSTtoIJ(st[1])]; }; var IJToST = function(ij,order,offsets) { var maxSize = (1<<order); return [ (ij[0]+offsets[0])/maxSize, (ij[1]+offsets[1])/maxSize ]; } // hilbert space-filling curve // based on http://blog.notdot.net/2009/11/Damn-Cool-Algorithms-Spatial-indexing-with-Quadtrees-and-Hilbert-Curves // note: rather then calculating the final integer hilbert position, we just return the list of quads // this ensures no precision issues whth large orders (S3 cell IDs use up to 30), and is more // convenient for pulling out the individual bits as needed later var pointToHilbertQuadList = function(x,y,order) { var hilbertMap = { 'a': [ [0,'d'], [1,'a'], [3,'b'], [2,'a'] ], 'b': [ [2,'b'], [1,'b'], [3,'a'], [0,'c'] ], 'c': [ [2,'c'], [3,'d'], [1,'c'], [0,'b'] ], 'd': [ [0,'a'], [3,'c'], [1,'d'], [2,'d'] ] }; var currentSquare='a'; var positions = []; for (var i=order-1; i>=0; i--) { var mask = 1<<i; var quad_x = x&mask ? 1 : 0; var quad_y = y&mask ? 1 : 0; var t = hilbertMap[currentSquare][quad_x*2+quad_y]; positions.push(t[0]); currentSquare = t[1]; } return positions; }; // S2Cell class S2.S2Cell = function(){}; //static method to construct S2.S2Cell.FromLatLng = function(latLng,level) { var xyz = LatLngToXYZ(latLng); var faceuv = XYZToFaceUV(xyz); var st = UVToST(faceuv[1]); var ij = STToIJ(st,level); return S2.S2Cell.FromFaceIJ (faceuv[0], ij, level); }; S2.S2Cell.FromFaceIJ = function(face,ij,level) { var cell = new S2.S2Cell(); cell.face = face; cell.ij = ij; cell.level = level; return cell; }; S2.S2Cell.prototype.toString = function() { return 'F'+this.face+'ij['+this.ij[0]+','+this.ij[1]+']@'+this.level; }; S2.S2Cell.prototype.getLatLng = function() { var st = IJToST(this.ij,this.level, [0.5,0.5]); var uv = STToUV(st); var xyz = FaceUVToXYZ(this.face, uv); return XYZToLatLng(xyz); }; S2.S2Cell.prototype.getCornerLatLngs = function() { var result = []; var offsets = [ [ 0.0, 0.0 ], [ 0.0, 1.0 ], [ 1.0, 1.0 ], [ 1.0, 0.0 ] ]; for (var i=0; i<4; i++) { var st = IJToST(this.ij, this.level, offsets[i]); var uv = STToUV(st); var xyz = FaceUVToXYZ(this.face, uv); result.push ( XYZToLatLng(xyz) ); } return result; }; S2.S2Cell.prototype.getFaceAndQuads = function() { var quads = pointToHilbertQuadList(this.ij[0], this.ij[1], this.level); return [this.face,quads]; }; S2.S2Cell.prototype.getNeighbors = function() { var fromFaceIJWrap = function(face,ij,level) { var maxSize = (1<<level); if (ij[0]>=0 && ij[1]>=0 && ij[0]<maxSize && ij[1]<maxSize) { // no wrapping out of bounds return S2.S2Cell.FromFaceIJ(face,ij,level); } else { // the new i,j are out of range. // with the assumption that they're only a little past the borders we can just take the points as // just beyond the cube face, project to XYZ, then re-create FaceUV from the XYZ vector var st = IJToST(ij,level,[0.5,0.5]); var uv = STToUV(st); var xyz = FaceUVToXYZ(face,uv); var faceuv = XYZToFaceUV(xyz); face = faceuv[0]; uv = faceuv[1]; st = UVToST(uv); ij = STToIJ(st,level); return S2.S2Cell.FromFaceIJ (face, ij, level); } }; var face = this.face; var i = this.ij[0]; var j = this.ij[1]; var level = this.level; return [ fromFaceIJWrap(face, [i-1,j], level), fromFaceIJWrap(face, [i,j-1], level), fromFaceIJWrap(face, [i+1,j], level), fromFaceIJWrap(face, [i,j+1], level) ]; }; })(); } // we stick all this stuff in the setup function to ensure it loads *after* IITC has booted window.regionScoreboard = function() { var latlng = map.getCenter(); window.requestRegionScores(latlng); } window.regionScoresAtRegion = function(region) { var latlng = regionToLatLong(region); window.requestRegionScores(latlng); }; window.nextCheckpoint = function() { var checkpoint = 5*60*60; var now = Date.now(); return (Math.floor(now / (checkpoint*1000)) * (checkpoint*1000)) + checkpoint*1000; }; // apprently this doesn't exist sometimes? // I have no idea, but I kinda need this // borrowed from hooks.js if (!window.removeHook) { // callback must the SAME function to be unregistered. window.removeHook = function(event, callback) { if (typeof callback !== 'function') throw('Callback must be a function.'); if (window._hooks[event]) { var index = window._hooks[event].indexOf(callback); if(index == -1) console.warn('Callback wasn\'t registered for this event.'); else window._hooks[event].splice(index, 1); } } } window.formattedTimeToCheckpoint = function(checkpointms) { var now = Date.now(); var hours = Math.floor(((((checkpointms-now)/1000)/60)/60)%24); var mins = Math.floor((((checkpointms-now)/1000)/60)%60); var sec = Math.floor(((checkpointms-now)/1000)%60); return 'Next Checkpoint in: ' + hours + 'h ' + mins + 'm ' + sec + 's'; }; // global time to next checkpoint var currentcheckpoint = nextCheckpoint(); setInterval(function() { var nextcheckpoint = nextCheckpoint(); var now = Date.now(); $('.time-to-checkpoint').each(function(i, elem) { if (now > currentcheckpoint) { currentcheckpoint = nextcheckpoint; runHooks('pluginRegionScores', {event:'checkpoint'}); } elem.innerHTML = formattedTimeToCheckpoint(nextCheckpoint()); }); }, 1000); pluginCreateHook('pluginRegionScores'); window.requestRegionScores = function(latlng, existingdlg) { var latE6 = Math.round(latlng.lat*1E6); var lngE6 = Math.round(latlng.lng*1E6); var dlg = null; if (existingdlg) { dlg = existingdlg; dlg.html('Loading regional scores...'); } else { dlg = dialog({ title:'Region scores', html:'Loading regional scores...', width:450, minHeight:345, create: function() { this.currentRegionLatLong = latlng; var _this = this; this.refreshRegionScores = function() { window.requestRegionScores(_this.currentRegionLatLong, _this.currentdlg); }; window.addHook('pluginRegionScores', this.refreshRegionScores); }, closeCallback: function() { window.removeHook('pluginRegionScores', this.refreshRegionScores); } }); } dlg[0].currentdlg = dlg; window.postAjax('getRegionScoreDetails', {latE6:latE6,lngE6:lngE6}, function(res){regionScoreboardSuccess(res,dlg);}, function(){regionScoreboardFailure(dlg);}); }; window.requestRegionScoresRetry = function(ev) { window.requestRegionScores(ev.target.parentNode.currentRegionLatLong, ev.target.parentNode.currentdlg); }; function regionScoreboardFailure(dlg) { dlg.html('Failed to load region scores - <a onclick="window.requestRegionScoresRetry(event)">try again</a>'); } // function to compute a simple linear regression for both teams function simpleLinearRegression(items) { var len = items.length-1; var sumx = 0; var sumxsq = 0; var sumyres = 0; var sumyenl = 0; var sumprodres = 0; var sumprodenl = 0; for (var i = 1; i < items.length; i++) { sumyenl += parseInt(items[i][0]); sumyres += parseInt(items[i][1]); sumprodenl += i * parseInt(items[i][0]); sumprodres += i * parseInt(items[i][1]); sumx += i; sumxsq += i*i; } // a = (sum_of_products - (sum_x * sum_y) / length) / (sum_x_squared - ((sum_x ** 2) / length)) // b = (sum_y - a * sum_x) / length // return a, b var a = function(sxs, sx, sy, sp, len) { return (sp - (sx*sy)/len)/(sxs-((sx*sx)/len)); }; var b = function(sy, sx, alpha, len) { return (sy - alpha * sx)/len; }; return {enl: [ a(sumxsq, sumx, sumyenl, sumprodenl, len), b(sumyenl, sumx, a(sumxsq, sumx, sumyenl, sumprodenl, len), len)], res: [ a(sumxsq, sumx, sumyres,sumprodres, len), b(sumyres, sumx, a(sumxsq, sumx, sumyres,sumprodres, len), len) ]}; } function regionScoreboardScoreHistoryChart(result, logscale) { // svg area 400x130. graph area 350x100, offset to 40,10 if(!Math.log10) Math.log10 = function(x) { return Math.log(x) / Math.LN10; }; var max = Math.max(result.gameScore[0],result.gameScore[1],10); //NOTE: ensure a min of 10 for the graph var items = []; //we'll copy the items to an array indexed by checkpoint number - easier to access! for (var i=0; i<result.scoreHistory.length; i++) { max = Math.max(max, result.scoreHistory[i][1], result.scoreHistory[i][2]); //note: index 0 is the checkpoint number here items[result.scoreHistory[i][0]] = [result.scoreHistory[i][1], result.scoreHistory[i][2]]; } // scale up maximum a little, so graph isn't squashed right against upper edge max *= 1.09; // 0 cannot be displayed on a log scale, so we set the minimum to 0.001 and divide by lg(0.001)=-3 var scale = logscale ? function(y) { return 10 - Math.log10(Math.max(0.001,y/max)) / 3 * 100; } : function(y) { return 110-y/max*100; }; // calculate and generate linear regressions var linregs = simpleLinearRegression(items); var resy1 = linregs.res[0] * 1 + linregs.res[1]; var resy2 = linregs.res[0] * (items.length-1) + linregs.res[1]; var enly1 = linregs.enl[0] * 1 + linregs.enl[1]; var enly2 = linregs.enl[0] * (items.length-1) + linregs.enl[1]; var x1 = 0*10+40 var x2 = items.length*10+40; var regressions = '<line x1="'+x1+'" y1="'+scale(resy1)+'" x2="'+x2+'" y2="'+scale(resy2)+'" stroke="'+COLORS[TEAM_RES]+'" stroke-width="1" />' +'<line x1="'+x1+'" y1="'+scale(enly1)+'" x2="'+x2+'" y2="'+scale(enly2)+'" stroke="'+COLORS[TEAM_ENL]+'" stroke-width="1" />' var teamPaths = [[],[]]; var otherSvg = []; for (var i=0; i<items.length; i++) { var x=i*10+40; if (items[i] !== undefined) { // paths if (i>0 && items[i-1] !== undefined) { for (var t=0; t<2; t++) { teamPaths[t].push('M'+(x-10)+','+scale(items[i-1][t])+' L'+x+','+scale(items[i][t])); } } // markers otherSvg.push('<g title="test" class="checkpoint" data-cp="'+i+'" data-enl="'+items[i][0]+'" data-res="'+items[i][1]+'">'); otherSvg.push('<rect x="'+(i*10+35)+'" y="10" width="10" height="100" fill="black" fill-opacity="0" />'); for (var t=0; t<2; t++) { var col = t==0 ? COLORS[TEAM_ENL] : COLORS[TEAM_RES]; otherSvg.push('<circle cx="'+x+'" cy="'+scale(items[i][t])+'" r="3" stroke-width="1" stroke="'+col+'" fill="'+col+'" fill-opacity="0.5" />'); } otherSvg.push('</g>'); } } var paths = '<path d="M40,110 L40,10 M40,110 L390,110" stroke="#fff" />'; // graph tickmarks - horizontal var ticks = []; for (var i=5; i<=35; i+=5) { var x=i*10+40; ticks.push('M'+x+',10 L'+x+',110'); otherSvg.push('<text x="'+x+'" y="125" font-size="12" font-family="Roboto, Helvetica, sans-serif" text-anchor="middle" fill="#fff">'+i+'</text>'); } // vertical // first we calculate the power of 10 that is smaller than the max limit var vtickStep = Math.pow(10,Math.floor(Math.log10(max))); var vticks = []; if(logscale) { for(var i=0;i<4;i++) { vticks.push(vtickStep); vtickStep /= 10; } } else { // this could be between 1 and 10 grid lines - so we adjust to give nicer spacings if (vtickStep < (max/5)) { vtickStep *= 2; } else if (vtickStep > (max/2)) { vtickStep /= 2; } for (var i=vtickStep; i<=max; i+=vtickStep) { vticks.push(i); } } vticks.forEach(function(i) { var y = scale(i); ticks.push('M40,'+y+' L390,'+y); var istr = i>=1000000000 ? i/1000000000+'B' : i>=1000000 ? i/1000000+'M' : i>=1000 ? i/1000+'k' : i; otherSvg.push('<text x="35" y="'+y+'" font-size="12" font-family="Roboto, Helvetica, sans-serif" text-anchor="end" fill="#fff">'+istr+'</text>'); }); paths += '<path d="'+ticks.join(' ')+'" stroke="#fff" opacity="0.3" />;' for (var t=0; t<2; t++) { var col = t==0 ? COLORS[TEAM_ENL] : COLORS[TEAM_RES]; if (teamPaths[t].length > 0) { paths += '<path d="'+teamPaths[t].join(' ')+'" stroke="'+col+'" />'; } var y = scale(result.gameScore[t]); paths += '<path d="M40,'+y+' L390,'+y+'" stroke="'+col+'" stroke-dasharray="3,2" opacity="0.8" />'; } var svg = '<div><svg width="400" height="130">' +'<rect x="0" y="0" width="400" height="130" stroke="#FFCE00" fill="#08304E" />' +paths +otherSvg.join('') +regressions +'<foreignObject height="18" width="45" y="111" x="0" class="node"><label title="Logarithmic scale">' +'<input type="checkbox" class="logscale" style="height:auto;padding:0;vertical-align:middle"'+(logscale?' checked':'')+'/>' +'log</label></foreignObject>' +'</svg></div>'; return svg; } function regionScoreboardScoreHistoryTable(result) { var history = result.scoreHistory; var table = '<table class="checkpoint_table" style="width: 370px;"> \ <thead><tr><th>Checkpoint</th><th>Enlightened</th><th>Resistance</th> \ <th>Lead</th></tr></thead>'; var lead = 0; var rows = ''; for(var i=history.length-1; i >= 0; i--) { lead += history[i][1] - history[i][2]; var checkpoint_lead = lead < 0?'res: ' + digits(Math.abs(lead)):'enl: ' + digits(lead); rows = '<tr><td>' + history[i][0] + '</td><td>' + digits(history[i][1]) + '</td><td>' + digits(history[i][2]) + '</td><td class="' + (lead < 0?'res':'enl') + '" style="text-align: left;" >' + checkpoint_lead + '</td></tr>' + rows; } return table += rows +'</table>'; } // start crazy region code // facenames and codewords var facenames = [ 'AF', 'AS', 'NR', 'PA', 'AM', 'ST' ]; var codewords = [ 'ALPHA', 'BRAVO', 'CHARLIE', 'DELTA', 'ECHO', 'FOXTROT', 'GOLF', 'HOTEL', 'JULIET', 'KILO', 'LIMA', 'MIKE', 'NOVEMBER', 'PAPA', 'ROMEO', 'SIERRA', ]; var regionToLatLong = function(region) { // rot, d2xy, facenames, and codewords taken from regions.user.js var rot = function(n, x, y, rx, ry) { if(ry == 0) { if(rx == 1) { x = n-1 - x; y = n-1 - y; } return [y, x]; } return [x, y]; } var d2xy = function(n, d) { var rx, ry, s, t = d, xy = [0, 0]; for(s=1; s<n; s*=2) { rx = 1 & (t/2); ry = 1 & (t ^ rx); xy = rot(s, xy[0], xy[1], rx, ry); xy[0] += s * rx; xy[1] += s * ry; t /= 4; } return xy; } // inspired by regions.user.js getSearchResult region = region.split("-"); var faceId = facenames.indexOf(region[0].slice(0, 2)); var regionI = parseInt(region[0].slice(2)) - 1; var regionJ = codewords.indexOf(region[1]); var xy = d2xy(4, parseInt(region[2])); regionI = (regionI << 2) + xy[0]; regionJ = (regionJ << 2) + xy[1]; var cell = (faceId % 2 == 1) ? S2.S2Cell.FromFaceIJ(faceId, [regionJ,regionI], 6) : S2.S2Cell.FromFaceIJ(faceId, [regionI,regionJ], 6); return cell.getLatLng(); } // borrowed from the "regions" plugin function regionName(cell) { // ingress does some odd things with the naming. for some faces, the i and j coords are flipped when converting // (and not only the names - but the full quad coords too!). easiest fix is to create a temporary cell with the coords // swapped if (cell.face == 1 || cell.face == 3 || cell.face == 5) { cell = S2.S2Cell.FromFaceIJ ( cell.face, [cell.ij[1], cell.ij[0]], cell.level ); } // first component of the name is the face var name = facenames[cell.face]; if (cell.level >= 4) { // next two components are from the most signifitant four bits of the cell I/J var regionI = cell.ij[0] >> (cell.level-4); var regionJ = cell.ij[1] >> (cell.level-4); name += zeroPad(regionI+1,2)+'-'+codewords[regionJ]; } if (cell.level >= 6) { // the final component is based on the hibbert curve for the relevant cell var facequads = cell.getFaceAndQuads(); var number = facequads[1][4]*4+facequads[1][5]; name += '-'+zeroPad(number,2); } return name; } window.regionSearch = function(ev) { if (ev.target.name === "regionsearch") { var search = ev.target.value.toUpperCase(); var latlng = map.getCenter(); var currentregion = regionName(S2.S2Cell.FromLatLng(latlng, 6)).split("-"); // Borrwed from the regions plugin and modified to allow spaces OR dashes OR nothing between keywords. // It's a good enough regex for it's purpose. // This regexp is quite forgiving. Dashes are allowed between all components, each dash and leading zero is optional. // All whitespace is removed in onSearch(). If the first or both the first and second component are omitted, they are // replaced with the current cell's coordinates (=the cell which contains the center point of the map). If the last // component is ommited, the 4x4 cell group is used. var reparse = new RegExp('^(?:(?:(' + facenames.join('|') + ')(?:\\s?|-?))?((?:1[0-6])|(?:0?[1-9]))(?:\\s?|-?))?(' + codewords.join('|') + ')(?:(?:\\s?|-?)((?:1[0-5])|(?:0?\\d)))?$', 'i'); var matches = search.match(reparse); var result = ""; if (matches === null) { var searches = search.match(/^(\w{1,2})(\d{1,2})$/i); if (facenames.includes(search)) { for (var i = 1; i <= 16; i++) { result += '<span>' + search + (i>=10?i:('0' + i)) + '</span><br>'; } } else if (searches !== null) { var num = searches[2]?parseInt(searches[2]):''; var word = searches[1]?searches[1]:''; for (var i = 0; i < codewords.length; i++) { result += '<span>' + word + (num>=10?num:'0'+num) + '-' + codewords[i] + '</span><br>'; } } } else { var face = !matches[1]?currentregion[0]:matches[1]; var facenum = matches[2]?parseInt(matches[2]):false; var region = !matches[3]?currentregion[1]:matches[3]; var regionnum = matches[4]?parseInt(matches[4]):false; if (!regionnum) { for (var i = 0; i < 16; i++) { var res = face + (facenum?(facenum>=10?facenum:'0'+facenum):'') + '-' + region + '-' + (i>=10?i:('0'+i)); result += '<a onclick="window.regionScoresAtRegion(\'' + res + '\')">' + res + '</a><br>'; } } else { var res = face + (facenum?(facenum>=10?facenum:'0'+facenum):'') + '-' + region + '-' + (regionnum>=10?regionnum:('0' + regionnum)); result = '<a onclick="window.regionScoresAtRegion(\'' + res + '\')">' + res + '</a><br>' } } $('.regionresults').html(result); } } window.regionSelector = function() { var selectorhtml = '<input style="width:100%;" type="text" name="regionsearch" placeholder="search" onkeyup="window.regionSearch(event)"/>' +'<div style="overflow-y: scroll; height: 241px; padding-top: 5px;">' +'<div class="regionresults"></div>' +'<div> possible regions: ' +codewords.reduce(function(html, word) { return html += ', ' + word; }) +'</div></div>'; var dlg = dialog({title:'Region selector',html:selectorhtml,width:300,minHeight:345}); }; var handleRegionClick = function(e) { $('.leaflet-container')[0].style.cursor = ''; this.textContent = 'Select region from map'; requestRegionScores(e.latlng); map.off('click', handleRegionClick, this); }; window.regionClickSelector = function(ev) { var target = ev.target; if ($('.leaflet-container')[0].style.cursor === 'crosshair') { ev.target.textContent = 'Select region from map'; $('.leaflet-container')[0].style.cursor = ''; map.off('click', handleRegionClick, target); return; } ev.target.textContent = 'click to cancel select from map'; $('.leaflet-container')[0].style.cursor = 'crosshair'; map.on('click', handleRegionClick, target); }; // end of crazy region code function regionScoreboardSuccess(data,dlg,logscale) { if (data.result === undefined) { return regionScoreboardFailure(dlg); } var agentTable = '<table><tr><th>#</th><th>Agent</th></tr>'; for (var i=0; i<data.result.topAgents.length; i++) { var agent = data.result.topAgents[i]; agentTable += '<tr><td>'+(i+1)+'</td><td class="nickname '+(agent.team=='RESISTANCE'?'res':'enl')+'">'+agent.nick+'</td></tr>'; } if (data.result.topAgents.length === 0) { agentTable += '<tr><td colspan="2"><i>no top agents</i></td></tr>'; } agentTable += '</table>'; var maxAverage = Math.max(data.result.gameScore[0], data.result.gameScore[1], 1); var teamRow = []; for (var t=0; t<2; t++) { var team = t===0 ? 'Enlightened' : 'Resistance'; var teamClass = t===0 ? 'enl' : 'res'; var teamCol = t===0 ? COLORS[TEAM_ENL] : COLORS[TEAM_RES]; var barSize = Math.round(data.result.gameScore[t]/maxAverage*200); teamRow[t] = '<tr><th class="'+teamClass+'">' +team+'</th><td class="'+teamClass+'">' +digits(data.result.gameScore[t])+'</td><td><div style="background:' +teamCol+'; width: '+barSize+'px; height: 1.3ex; border: 2px outset ' +teamCol+'"> </td></tr>'; } var history = data.result.scoreHistory; // the lead is the sum of the difference of each checkpoint var lead = history.map(function(cp) { return cp[1] - cp[2] }).reduce(function(acc, diff) { return acc + diff }, 0); var leadinfo = '<div style="padding-left: 5px;">'; // res lead when we sum to a negative value if (lead < 0) { leadinfo += '<span class="res">res lead: ' + digits(Math.abs(lead)) + 'mu</span></div>'; } else { leadinfo += '<span class="enl">enl lead: ' + digits(lead) + 'mu</span></div>'; } var first = PLAYER.team == 'RESISTANCE' ? 1 : 0; // we need some divs to make the accordion work properly dlg.html('<div class="cellscore">' +'<b>Region scores for '+data.result.regionName+'</b>' +'<div><a title="Search region" onclick="window.regionSelector()">Search region</a> OR '// lets add the ability to select another region +'<a title="Click to select region" onclick="window.regionClickSelector(event)">Select region from map</a>' +'<table>'+teamRow[first]+teamRow[1-first]+'</table>' +leadinfo // stick our info under the score bars +regionScoreboardScoreHistoryChart(data.result, logscale) +'<div class="time-to-checkpoint">'+ formattedTimeToCheckpoint(nextCheckpoint()) +'</div></div>' +'<b>Checkpoint overview</b>' +'<div>'+regionScoreboardScoreHistoryTable(data.result)+'</div>' +'<b>Top agents</b>' +'<div>'+agentTable+'</div>' +'</div>'); $('g.checkpoint', dlg).each(function(i, elem) { elem = $(elem); var tooltip = 'CP:\t'+elem.attr('data-cp') + '\nEnl:\t' + digits(elem.attr('data-enl')) + '\nRes:\t' + digits(elem.attr('data-res')) + '\nDiff:\t' + digits(Math.abs(elem.attr('data-res')-elem.attr('data-enl'))); elem.tooltip({ content: convertTextToTableMagic(tooltip), position: {my: "center bottom", at: "center top-10"} }); }); $('.cellscore', dlg).accordion({ header: 'b', heightStyle: "fill", }); $('input.logscale', dlg).change(function(){ var input = $(this); regionScoreboardSuccess(data, dlg, input.prop('checked')); }); } } // PLUGIN END ////////////////////////////////////////////////////////// setup.info = plugin_info; //add the script info data to the function as a property if(!window.bootPlugins) window.bootPlugins = []; window.bootPlugins.push(setup); // if IITC has already booted, immediately run the 'setup' function if(window.iitcLoaded && typeof setup === 'function') setup(); } // wrapper end // inject code into site context var script = document.createElement('script'); var info = {}; if (typeof GM_info !== 'undefined' && GM_info && GM_info.script) info.script = { version: GM_info.script.version, name: GM_info.script.name, description: GM_info.script.description }; script.appendChild(document.createTextNode('('+ wrapper +')('+JSON.stringify(info)+');')); (document.body || document.head || document.documentElement).appendChild(script);
region-score-lead.user.js
// ==UserScript== // @id iitc-plugin-region-score-lead@hansolo669 // @name IITC plugin: region score lead // @category Tweaks // @version 0.0.3 // @namespace https://github.com/hansolo669/iitc-tweaks // @updateURL http://www.reallyawesomedomain.com/iitc-tweaks/region-score-lead.meta.js // @downloadURL http://www.reallyawesomedomain.com/iitc-tweaks/region-score-lead.user.js // @description Small modification to the region scores to show the current mu lead. // @include https://www.ingress.com/intel* // @include http://www.ingress.com/intel* // @match https://www.ingress.com/intel* // @match http://www.ingress.com/intel* // @include https://www.ingress.com/mission/* // @include http://www.ingress.com/mission/* // @match https://www.ingress.com/mission/* // @match http://www.ingress.com/mission/* // @grant none // ==/UserScript== function wrapper(plugin_info) { // ensure plugin framework is there, even if iitc is not yet loaded if(typeof window.plugin !== 'function') window.plugin = function() {}; // PLUGIN START //////////////////////////////////////////////////////// var setup = function() { // we stick all this stuff in the setup function to ensure it loads *after* IITC has booted window.regionScoreboard = function() { var latlng = map.getCenter(); window.requestRegionScores(latlng); } window.regionScoresAtRegion = function(region) { var latlng = regionToLatLong(region); window.requestRegionScores(latlng); }; window.nextCheckpoint = function() { var checkpoint = 5*60*60; var now = Date.now(); return (Math.floor(now / (checkpoint*1000)) * (checkpoint*1000)) + checkpoint*1000; }; // apprently this doesn't exist sometimes? // I have no idea, but I kinda need this // borrowed from hooks.js if (!window.removeHook) { // callback must the SAME function to be unregistered. window.removeHook = function(event, callback) { if (typeof callback !== 'function') throw('Callback must be a function.'); if (window._hooks[event]) { var index = window._hooks[event].indexOf(callback); if(index == -1) console.warn('Callback wasn\'t registered for this event.'); else window._hooks[event].splice(index, 1); } } } window.formattedTimeToCheckpoint = function(checkpointms) { var now = Date.now(); var hours = Math.floor(((((checkpointms-now)/1000)/60)/60)%24); var mins = Math.floor((((checkpointms-now)/1000)/60)%60); var sec = Math.floor(((checkpointms-now)/1000)%60); return 'Next Checkpoint in: ' + hours + 'h ' + mins + 'm ' + sec + 's'; }; // global time to next checkpoint var currentcheckpoint = nextCheckpoint(); setInterval(function() { var nextcheckpoint = nextCheckpoint(); var now = Date.now(); $('.time-to-checkpoint').each(function(i, elem) { if (now > currentcheckpoint) { currentcheckpoint = nextcheckpoint; runHooks('pluginRegionScores', {event:'checkpoint'}); } elem.innerHTML = formattedTimeToCheckpoint(nextCheckpoint()); }); }, 1000); pluginCreateHook('pluginRegionScores'); window.requestRegionScores = function(latlng, existingdlg) { var latE6 = Math.round(latlng.lat*1E6); var lngE6 = Math.round(latlng.lng*1E6); var dlg = null; if (existingdlg) { dlg = existingdlg; dlg.html('Loading regional scores...'); } else { dlg = dialog({ title:'Region scores', html:'Loading regional scores...', width:450, minHeight:345, create: function() { this.currentRegionLatLong = latlng; var _this = this; this.refreshRegionScores = function() { window.requestRegionScores(_this.currentRegionLatLong, _this.currentdlg); }; window.addHook('pluginRegionScores', this.refreshRegionScores); }, closeCallback: function() { window.removeHook('pluginRegionScores', this.refreshRegionScores); } }); } dlg[0].currentdlg = dlg; window.postAjax('getRegionScoreDetails', {latE6:latE6,lngE6:lngE6}, function(res){regionScoreboardSuccess(res,dlg);}, function(){regionScoreboardFailure(dlg);}); }; window.requestRegionScoresRetry = function(ev) { window.requestRegionScores(ev.target.parentNode.currentRegionLatLong, ev.target.parentNode.currentdlg); }; function regionScoreboardFailure(dlg) { dlg.html('Failed to load region scores - <a onclick="window.requestRegionScoresRetry(event)">try again</a>'); } // function to compute a simple linear regression for both teams function simpleLinearRegression(items) { var len = items.length-1; var sumx = 0; var sumxsq = 0; var sumyres = 0; var sumyenl = 0; var sumprodres = 0; var sumprodenl = 0; for (var i = 1; i < items.length; i++) { sumyenl += parseInt(items[i][0]); sumyres += parseInt(items[i][1]); sumprodenl += i * parseInt(items[i][0]); sumprodres += i * parseInt(items[i][1]); sumx += i; sumxsq += i*i; } // a = (sum_of_products - (sum_x * sum_y) / length) / (sum_x_squared - ((sum_x ** 2) / length)) // b = (sum_y - a * sum_x) / length // return a, b var a = function(sxs, sx, sy, sp, len) { return (sp - (sx*sy)/len)/(sxs-((sx*sx)/len)); }; var b = function(sy, sx, alpha, len) { return (sy - alpha * sx)/len; }; return {enl: [ a(sumxsq, sumx, sumyenl, sumprodenl, len), b(sumyenl, sumx, a(sumxsq, sumx, sumyenl, sumprodenl, len), len)], res: [ a(sumxsq, sumx, sumyres,sumprodres, len), b(sumyres, sumx, a(sumxsq, sumx, sumyres,sumprodres, len), len) ]}; } function regionScoreboardScoreHistoryChart(result, logscale) { // svg area 400x130. graph area 350x100, offset to 40,10 if(!Math.log10) Math.log10 = function(x) { return Math.log(x) / Math.LN10; }; var max = Math.max(result.gameScore[0],result.gameScore[1],10); //NOTE: ensure a min of 10 for the graph var items = []; //we'll copy the items to an array indexed by checkpoint number - easier to access! for (var i=0; i<result.scoreHistory.length; i++) { max = Math.max(max, result.scoreHistory[i][1], result.scoreHistory[i][2]); //note: index 0 is the checkpoint number here items[result.scoreHistory[i][0]] = [result.scoreHistory[i][1], result.scoreHistory[i][2]]; } // scale up maximum a little, so graph isn't squashed right against upper edge max *= 1.09; // 0 cannot be displayed on a log scale, so we set the minimum to 0.001 and divide by lg(0.001)=-3 var scale = logscale ? function(y) { return 10 - Math.log10(Math.max(0.001,y/max)) / 3 * 100; } : function(y) { return 110-y/max*100; }; // calculate and generate linear regressions var linregs = simpleLinearRegression(items); var resy1 = linregs.res[0] * 1 + linregs.res[1]; var resy2 = linregs.res[0] * (items.length-1) + linregs.res[1]; var enly1 = linregs.enl[0] * 1 + linregs.enl[1]; var enly2 = linregs.enl[0] * (items.length-1) + linregs.enl[1]; var x1 = 0*10+40 var x2 = items.length*10+40; var regressions = '<line x1="'+x1+'" y1="'+scale(resy1)+'" x2="'+x2+'" y2="'+scale(resy2)+'" stroke="'+COLORS[TEAM_RES]+'" stroke-width="1" />' +'<line x1="'+x1+'" y1="'+scale(enly1)+'" x2="'+x2+'" y2="'+scale(enly2)+'" stroke="'+COLORS[TEAM_ENL]+'" stroke-width="1" />' var teamPaths = [[],[]]; var otherSvg = []; for (var i=0; i<items.length; i++) { var x=i*10+40; if (items[i] !== undefined) { // paths if (i>0 && items[i-1] !== undefined) { for (var t=0; t<2; t++) { teamPaths[t].push('M'+(x-10)+','+scale(items[i-1][t])+' L'+x+','+scale(items[i][t])); } } // markers otherSvg.push('<g title="test" class="checkpoint" data-cp="'+i+'" data-enl="'+items[i][0]+'" data-res="'+items[i][1]+'">'); otherSvg.push('<rect x="'+(i*10+35)+'" y="10" width="10" height="100" fill="black" fill-opacity="0" />'); for (var t=0; t<2; t++) { var col = t==0 ? COLORS[TEAM_ENL] : COLORS[TEAM_RES]; otherSvg.push('<circle cx="'+x+'" cy="'+scale(items[i][t])+'" r="3" stroke-width="1" stroke="'+col+'" fill="'+col+'" fill-opacity="0.5" />'); } otherSvg.push('</g>'); } } var paths = '<path d="M40,110 L40,10 M40,110 L390,110" stroke="#fff" />'; // graph tickmarks - horizontal var ticks = []; for (var i=5; i<=35; i+=5) { var x=i*10+40; ticks.push('M'+x+',10 L'+x+',110'); otherSvg.push('<text x="'+x+'" y="125" font-size="12" font-family="Roboto, Helvetica, sans-serif" text-anchor="middle" fill="#fff">'+i+'</text>'); } // vertical // first we calculate the power of 10 that is smaller than the max limit var vtickStep = Math.pow(10,Math.floor(Math.log10(max))); var vticks = []; if(logscale) { for(var i=0;i<4;i++) { vticks.push(vtickStep); vtickStep /= 10; } } else { // this could be between 1 and 10 grid lines - so we adjust to give nicer spacings if (vtickStep < (max/5)) { vtickStep *= 2; } else if (vtickStep > (max/2)) { vtickStep /= 2; } for (var i=vtickStep; i<=max; i+=vtickStep) { vticks.push(i); } } vticks.forEach(function(i) { var y = scale(i); ticks.push('M40,'+y+' L390,'+y); var istr = i>=1000000000 ? i/1000000000+'B' : i>=1000000 ? i/1000000+'M' : i>=1000 ? i/1000+'k' : i; otherSvg.push('<text x="35" y="'+y+'" font-size="12" font-family="Roboto, Helvetica, sans-serif" text-anchor="end" fill="#fff">'+istr+'</text>'); }); paths += '<path d="'+ticks.join(' ')+'" stroke="#fff" opacity="0.3" />;' for (var t=0; t<2; t++) { var col = t==0 ? COLORS[TEAM_ENL] : COLORS[TEAM_RES]; if (teamPaths[t].length > 0) { paths += '<path d="'+teamPaths[t].join(' ')+'" stroke="'+col+'" />'; } var y = scale(result.gameScore[t]); paths += '<path d="M40,'+y+' L390,'+y+'" stroke="'+col+'" stroke-dasharray="3,2" opacity="0.8" />'; } var svg = '<div><svg width="400" height="130">' +'<rect x="0" y="0" width="400" height="130" stroke="#FFCE00" fill="#08304E" />' +paths +otherSvg.join('') +regressions +'<foreignObject height="18" width="45" y="111" x="0" class="node"><label title="Logarithmic scale">' +'<input type="checkbox" class="logscale" style="height:auto;padding:0;vertical-align:middle"'+(logscale?' checked':'')+'/>' +'log</label></foreignObject>' +'</svg></div>'; return svg; } function regionScoreboardScoreHistoryTable(result) { var history = result.scoreHistory; var table = '<table class="checkpoint_table" style="width: 370px;"> \ <thead><tr><th>Checkpoint</th><th>Enlightened</th><th>Resistance</th> \ <th>Lead</th></tr></thead>'; var lead = 0; var rows = ''; for(var i=history.length-1; i >= 0; i--) { lead += history[i][1] - history[i][2]; var checkpoint_lead = lead < 0?'res: ' + digits(Math.abs(lead)):'enl: ' + digits(lead); rows = '<tr><td>' + history[i][0] + '</td><td>' + digits(history[i][1]) + '</td><td>' + digits(history[i][2]) + '</td><td class="' + (lead < 0?'res':'enl') + '" style="text-align: left;" >' + checkpoint_lead + '</td></tr>' + rows; } return table += rows +'</table>'; } // start crazy region code // facenames and codewords var facenames = [ 'AF', 'AS', 'NR', 'PA', 'AM', 'ST' ]; var codewords = [ 'ALPHA', 'BRAVO', 'CHARLIE', 'DELTA', 'ECHO', 'FOXTROT', 'GOLF', 'HOTEL', 'JULIET', 'KILO', 'LIMA', 'MIKE', 'NOVEMBER', 'PAPA', 'ROMEO', 'SIERRA', ]; var regionToLatLong = function(region) { // rot, d2xy, facenames, and codewords taken from regions.user.js var rot = function(n, x, y, rx, ry) { if(ry == 0) { if(rx == 1) { x = n-1 - x; y = n-1 - y; } return [y, x]; } return [x, y]; } var d2xy = function(n, d) { var rx, ry, s, t = d, xy = [0, 0]; for(s=1; s<n; s*=2) { rx = 1 & (t/2); ry = 1 & (t ^ rx); xy = window.plugin.regions.rot(s, xy[0], xy[1], rx, ry); xy[0] += s * rx; xy[1] += s * ry; t /= 4; } return xy; } // inspired by regions.user.js getSearchResult region = region.split("-"); var faceId = facenames.indexOf(region[0].slice(0, 2)); var regionI = parseInt(region[0].slice(2)) - 1; var regionJ = codewords.indexOf(region[1]); var xy = d2xy(4, parseInt(region[2])); regionI = (regionI << 2) + xy[0]; regionJ = (regionJ << 2) + xy[1]; var cell = (faceId % 2 == 1) ? S2.S2Cell.FromFaceIJ(faceId, [regionJ,regionI], 6) : S2.S2Cell.FromFaceIJ(faceId, [regionI,regionJ], 6); return cell.getLatLng(); } // borrowed from the "regions" plugin function regionName(cell) { // ingress does some odd things with the naming. for some faces, the i and j coords are flipped when converting // (and not only the names - but the full quad coords too!). easiest fix is to create a temporary cell with the coords // swapped if (cell.face == 1 || cell.face == 3 || cell.face == 5) { cell = S2.S2Cell.FromFaceIJ ( cell.face, [cell.ij[1], cell.ij[0]], cell.level ); } // first component of the name is the face var name = facenames[cell.face]; if (cell.level >= 4) { // next two components are from the most signifitant four bits of the cell I/J var regionI = cell.ij[0] >> (cell.level-4); var regionJ = cell.ij[1] >> (cell.level-4); name += zeroPad(regionI+1,2)+'-'+codewords[regionJ]; } if (cell.level >= 6) { // the final component is based on the hibbert curve for the relevant cell var facequads = cell.getFaceAndQuads(); var number = facequads[1][4]*4+facequads[1][5]; name += '-'+zeroPad(number,2); } return name; } window.regionSearch = function(ev) { if (ev.target.name === "regionsearch") { var search = ev.target.value.toUpperCase(); var latlng = map.getCenter(); var currentregion = regionName(S2.S2Cell.FromLatLng(latlng, 6)).split("-"); // Borrwed from the regions plugin and modified to allow spaces OR dashes OR nothing between keywords. // It's a good enough regex for it's purpose. // This regexp is quite forgiving. Dashes are allowed between all components, each dash and leading zero is optional. // All whitespace is removed in onSearch(). If the first or both the first and second component are omitted, they are // replaced with the current cell's coordinates (=the cell which contains the center point of the map). If the last // component is ommited, the 4x4 cell group is used. var reparse = new RegExp('^(?:(?:(' + facenames.join('|') + ')(?:\\s?|-?))?((?:1[0-6])|(?:0?[1-9]))(?:\\s?|-?))?(' + codewords.join('|') + ')(?:(?:\\s?|-?)((?:1[0-5])|(?:0?\\d)))?$', 'i'); var matches = search.match(reparse); var result = ""; if (matches === null) { var searches = search.match(/^(\w{1,2})(\d{1,2})$/i); if (facenames.includes(search)) { for (var i = 1; i <= 16; i++) { result += '<span>' + search + (i>=10?i:('0' + i)) + '</span><br>'; } } else if (searches !== null) { var num = searches[2]?parseInt(searches[2]):''; var word = searches[1]?searches[1]:''; for (var i = 0; i < codewords.length; i++) { result += '<span>' + word + (num>=10?num:'0'+num) + '-' + codewords[i] + '</span><br>'; } } } else { var face = !matches[1]?currentregion[0]:matches[1]; var facenum = matches[2]?parseInt(matches[2]):false; var region = !matches[3]?currentregion[1]:matches[3]; var regionnum = matches[4]?parseInt(matches[4]):false; if (!regionnum) { for (var i = 0; i < 16; i++) { var res = face + (facenum?(facenum>=10?facenum:'0'+facenum):'') + '-' + region + '-' + (i>=10?i:('0'+i)); result += '<a onclick="window.regionScoresAtRegion(\'' + res + '\')">' + res + '</a><br>'; } } else { var res = face + (facenum?(facenum>=10?facenum:'0'+facenum):'') + '-' + region + '-' + (regionnum>=10?regionnum:('0' + regionnum)); result = '<a onclick="window.regionScoresAtRegion(\'' + res + '\')">' + res + '</a><br>' } } $('.regionresults').html(result); } } window.regionSelector = function() { var selectorhtml = '<input style="width:100%;" type="text" name="regionsearch" placeholder="search" onkeyup="window.regionSearch(event)"/>' +'<div style="overflow-y: scroll; height: 241px; padding-top: 5px;">' +'<div class="regionresults"></div>' +'<div> possible regions: ' +codewords.reduce(function(html, word) { return html += ', ' + word; }) +'</div></div>'; var dlg = dialog({title:'Region selector',html:selectorhtml,width:300,minHeight:345}); }; var handleRegionClick = function(e) { $('.leaflet-container')[0].style.cursor = ''; this.textContent = 'Select region from map'; requestRegionScores(e.latlng); map.off('click', handleRegionClick, this); }; window.regionClickSelector = function(ev) { var target = ev.target; if ($('.leaflet-container')[0].style.cursor === 'crosshair') { ev.target.textContent = 'Select region from map'; $('.leaflet-container')[0].style.cursor = ''; map.off('click', handleRegionClick, target); return; } ev.target.textContent = 'click to cancel select from map'; $('.leaflet-container')[0].style.cursor = 'crosshair'; map.on('click', handleRegionClick, target); }; // end of crazy region code function regionScoreboardSuccess(data,dlg,logscale) { if (data.result === undefined) { return regionScoreboardFailure(dlg); } var agentTable = '<table><tr><th>#</th><th>Agent</th></tr>'; for (var i=0; i<data.result.topAgents.length; i++) { var agent = data.result.topAgents[i]; agentTable += '<tr><td>'+(i+1)+'</td><td class="nickname '+(agent.team=='RESISTANCE'?'res':'enl')+'">'+agent.nick+'</td></tr>'; } if (data.result.topAgents.length === 0) { agentTable += '<tr><td colspan="2"><i>no top agents</i></td></tr>'; } agentTable += '</table>'; var maxAverage = Math.max(data.result.gameScore[0], data.result.gameScore[1], 1); var teamRow = []; for (var t=0; t<2; t++) { var team = t===0 ? 'Enlightened' : 'Resistance'; var teamClass = t===0 ? 'enl' : 'res'; var teamCol = t===0 ? COLORS[TEAM_ENL] : COLORS[TEAM_RES]; var barSize = Math.round(data.result.gameScore[t]/maxAverage*200); teamRow[t] = '<tr><th class="'+teamClass+'">' +team+'</th><td class="'+teamClass+'">' +digits(data.result.gameScore[t])+'</td><td><div style="background:' +teamCol+'; width: '+barSize+'px; height: 1.3ex; border: 2px outset ' +teamCol+'"> </td></tr>'; } var history = data.result.scoreHistory; // the lead is the sum of the difference of each checkpoint var lead = history.map(function(cp) { return cp[1] - cp[2] }).reduce(function(acc, diff) { return acc + diff }, 0); var leadinfo = '<div style="padding-left: 5px;">'; // res lead when we sum to a negative value if (lead < 0) { leadinfo += '<span class="res">res lead: ' + digits(Math.abs(lead)) + 'mu</span></div>'; } else { leadinfo += '<span class="enl">enl lead: ' + digits(lead) + 'mu</span></div>'; } var first = PLAYER.team == 'RESISTANCE' ? 1 : 0; // we need some divs to make the accordion work properly dlg.html('<div class="cellscore">' +'<b>Region scores for '+data.result.regionName+'</b>' +'<div><a title="Search region" onclick="window.regionSelector()">Search region</a> OR '// lets add the ability to select another region +'<a title="Click to select region" onclick="window.regionClickSelector(event)">Select region from map</a>' +'<table>'+teamRow[first]+teamRow[1-first]+'</table>' +leadinfo // stick our info under the score bars +regionScoreboardScoreHistoryChart(data.result, logscale) +'<div class="time-to-checkpoint">'+ formattedTimeToCheckpoint(nextCheckpoint()) +'</div></div>' +'<b>Checkpoint overview</b>' +'<div>'+regionScoreboardScoreHistoryTable(data.result)+'</div>' +'<b>Top agents</b>' +'<div>'+agentTable+'</div>' +'</div>'); $('g.checkpoint', dlg).each(function(i, elem) { elem = $(elem); var tooltip = 'CP:\t'+elem.attr('data-cp') + '\nEnl:\t' + digits(elem.attr('data-enl')) + '\nRes:\t' + digits(elem.attr('data-res')) + '\nDiff:\t' + digits(Math.abs(elem.attr('data-res')-elem.attr('data-enl'))); elem.tooltip({ content: convertTextToTableMagic(tooltip), position: {my: "center bottom", at: "center top-10"} }); }); $('.cellscore', dlg).accordion({ header: 'b', heightStyle: "fill", }); $('input.logscale', dlg).change(function(){ var input = $(this); regionScoreboardSuccess(data, dlg, input.prop('checked')); }); } } // PLUGIN END ////////////////////////////////////////////////////////// setup.info = plugin_info; //add the script info data to the function as a property if(!window.bootPlugins) window.bootPlugins = []; window.bootPlugins.push(setup); // if IITC has already booted, immediately run the 'setup' function if(window.iitcLoaded && typeof setup === 'function') setup(); } // wrapper end // inject code into site context var script = document.createElement('script'); var info = {}; if (typeof GM_info !== 'undefined' && GM_info && GM_info.script) info.script = { version: GM_info.script.version, name: GM_info.script.name, description: GM_info.script.description }; script.appendChild(document.createTextNode('('+ wrapper +')('+JSON.stringify(info)+');')); (document.body || document.head || document.documentElement).appendChild(script);
fixes because I didn't realize S2 wasn't part of the default IITC install
region-score-lead.user.js
fixes because I didn't realize S2 wasn't part of the default IITC install
<ide><path>egion-score-lead.user.js <ide> <ide> // PLUGIN START //////////////////////////////////////////////////////// <ide> var setup = function() { <del>// we stick all this stuff in the setup function to ensure it loads *after* IITC has booted <add> // include the S2 functions if they don't exist <add> if (!window.S2) { <add> (function() { <add> window.S2 = {}; <add> <add> var LatLngToXYZ = function(latLng) { <add> var d2r = Math.PI/180.0; <add> <add> var phi = latLng.lat*d2r; <add> var theta = latLng.lng*d2r; <add> <add> var cosphi = Math.cos(phi); <add> <add> return [Math.cos(theta)*cosphi, Math.sin(theta)*cosphi, Math.sin(phi)]; <add> }; <add> <add> var XYZToLatLng = function(xyz) { <add> var r2d = 180.0/Math.PI; <add> <add> var lat = Math.atan2(xyz[2], Math.sqrt(xyz[0]*xyz[0]+xyz[1]*xyz[1])); <add> var lng = Math.atan2(xyz[1], xyz[0]); <add> <add> return L.latLng(lat*r2d, lng*r2d); <add> }; <add> <add> var largestAbsComponent = function(xyz) { <add> var temp = [Math.abs(xyz[0]), Math.abs(xyz[1]), Math.abs(xyz[2])]; <add> <add> if (temp[0] > temp[1]) { <add> if (temp[0] > temp[2]) { <add> return 0; <add> } else { <add> return 2; <add> } <add> } else { <add> if (temp[1] > temp[2]) { <add> return 1; <add> } else { <add> return 2; <add> } <add> } <add> <add> }; <add> <add> var faceXYZToUV = function(face,xyz) { <add> var u,v; <add> <add> switch (face) { <add> case 0: u = xyz[1]/xyz[0]; v = xyz[2]/xyz[0]; break; <add> case 1: u = -xyz[0]/xyz[1]; v = xyz[2]/xyz[1]; break; <add> case 2: u = -xyz[0]/xyz[2]; v = -xyz[1]/xyz[2]; break; <add> case 3: u = xyz[2]/xyz[0]; v = xyz[1]/xyz[0]; break; <add> case 4: u = xyz[2]/xyz[1]; v = -xyz[0]/xyz[1]; break; <add> case 5: u = -xyz[1]/xyz[2]; v = -xyz[0]/xyz[2]; break; <add> default: throw {error: 'Invalid face'}; break; <add> } <add> <add> return [u,v]; <add> } <add> <add> var XYZToFaceUV = function(xyz) { <add> var face = largestAbsComponent(xyz); <add> <add> if (xyz[face] < 0) { <add> face += 3; <add> } <add> <add> uv = faceXYZToUV (face,xyz); <add> <add> return [face, uv]; <add> }; <add> <add> var FaceUVToXYZ = function(face,uv) { <add> var u = uv[0]; <add> var v = uv[1]; <add> <add> switch (face) { <add> case 0: return [ 1, u, v]; <add> case 1: return [-u, 1, v]; <add> case 2: return [-u,-v, 1]; <add> case 3: return [-1,-v,-u]; <add> case 4: return [ v,-1,-u]; <add> case 5: return [ v, u,-1]; <add> default: throw {error: 'Invalid face'}; <add> } <add> }; <add> <add> var STToUV = function(st) { <add> var singleSTtoUV = function(st) { <add> if (st >= 0.5) { <add> return (1/3.0) * (4*st*st - 1); <add> } else { <add> return (1/3.0) * (1 - (4*(1-st)*(1-st))); <add> } <add> } <add> <add> return [singleSTtoUV(st[0]), singleSTtoUV(st[1])]; <add> }; <add> <add> var UVToST = function(uv) { <add> var singleUVtoST = function(uv) { <add> if (uv >= 0) { <add> return 0.5 * Math.sqrt (1 + 3*uv); <add> } else { <add> return 1 - 0.5 * Math.sqrt (1 - 3*uv); <add> } <add> } <add> <add> return [singleUVtoST(uv[0]), singleUVtoST(uv[1])]; <add> }; <add> <add> var STToIJ = function(st,order) { <add> var maxSize = (1<<order); <add> <add> var singleSTtoIJ = function(st) { <add> var ij = Math.floor(st * maxSize); <add> return Math.max(0, Math.min(maxSize-1, ij)); <add> }; <add> <add> return [singleSTtoIJ(st[0]), singleSTtoIJ(st[1])]; <add> }; <add> <add> var IJToST = function(ij,order,offsets) { <add> var maxSize = (1<<order); <add> <add> return [ <add> (ij[0]+offsets[0])/maxSize, <add> (ij[1]+offsets[1])/maxSize <add> ]; <add> } <add> <add> // hilbert space-filling curve <add> // based on http://blog.notdot.net/2009/11/Damn-Cool-Algorithms-Spatial-indexing-with-Quadtrees-and-Hilbert-Curves <add> // note: rather then calculating the final integer hilbert position, we just return the list of quads <add> // this ensures no precision issues whth large orders (S3 cell IDs use up to 30), and is more <add> // convenient for pulling out the individual bits as needed later <add> var pointToHilbertQuadList = function(x,y,order) { <add> var hilbertMap = { <add> 'a': [ [0,'d'], [1,'a'], [3,'b'], [2,'a'] ], <add> 'b': [ [2,'b'], [1,'b'], [3,'a'], [0,'c'] ], <add> 'c': [ [2,'c'], [3,'d'], [1,'c'], [0,'b'] ], <add> 'd': [ [0,'a'], [3,'c'], [1,'d'], [2,'d'] ] <add> }; <add> <add> var currentSquare='a'; <add> var positions = []; <add> <add> for (var i=order-1; i>=0; i--) { <add> <add> var mask = 1<<i; <add> <add> var quad_x = x&mask ? 1 : 0; <add> var quad_y = y&mask ? 1 : 0; <add> <add> var t = hilbertMap[currentSquare][quad_x*2+quad_y]; <add> <add> positions.push(t[0]); <add> <add> currentSquare = t[1]; <add> } <add> <add> return positions; <add> }; <add> <add> // S2Cell class <add> <add> S2.S2Cell = function(){}; <add> <add> //static method to construct <add> S2.S2Cell.FromLatLng = function(latLng,level) { <add> <add> var xyz = LatLngToXYZ(latLng); <add> <add> var faceuv = XYZToFaceUV(xyz); <add> var st = UVToST(faceuv[1]); <add> <add> var ij = STToIJ(st,level); <add> <add> return S2.S2Cell.FromFaceIJ (faceuv[0], ij, level); <add> }; <add> <add> S2.S2Cell.FromFaceIJ = function(face,ij,level) { <add> var cell = new S2.S2Cell(); <add> cell.face = face; <add> cell.ij = ij; <add> cell.level = level; <add> <add> return cell; <add> }; <add> <add> S2.S2Cell.prototype.toString = function() { <add> return 'F'+this.face+'ij['+this.ij[0]+','+this.ij[1]+']@'+this.level; <add> }; <add> <add> S2.S2Cell.prototype.getLatLng = function() { <add> var st = IJToST(this.ij,this.level, [0.5,0.5]); <add> var uv = STToUV(st); <add> var xyz = FaceUVToXYZ(this.face, uv); <add> <add> return XYZToLatLng(xyz); <add> }; <add> <add> S2.S2Cell.prototype.getCornerLatLngs = function() { <add> var result = []; <add> var offsets = [ <add> [ 0.0, 0.0 ], <add> [ 0.0, 1.0 ], <add> [ 1.0, 1.0 ], <add> [ 1.0, 0.0 ] <add> ]; <add> <add> for (var i=0; i<4; i++) { <add> var st = IJToST(this.ij, this.level, offsets[i]); <add> var uv = STToUV(st); <add> var xyz = FaceUVToXYZ(this.face, uv); <add> <add> result.push ( XYZToLatLng(xyz) ); <add> } <add> return result; <add> }; <add> <add> S2.S2Cell.prototype.getFaceAndQuads = function() { <add> var quads = pointToHilbertQuadList(this.ij[0], this.ij[1], this.level); <add> <add> return [this.face,quads]; <add> }; <add> <add> S2.S2Cell.prototype.getNeighbors = function() { <add> <add> var fromFaceIJWrap = function(face,ij,level) { <add> var maxSize = (1<<level); <add> if (ij[0]>=0 && ij[1]>=0 && ij[0]<maxSize && ij[1]<maxSize) { <add> // no wrapping out of bounds <add> return S2.S2Cell.FromFaceIJ(face,ij,level); <add> } else { <add> // the new i,j are out of range. <add> // with the assumption that they're only a little past the borders we can just take the points as <add> // just beyond the cube face, project to XYZ, then re-create FaceUV from the XYZ vector <add> <add> var st = IJToST(ij,level,[0.5,0.5]); <add> var uv = STToUV(st); <add> var xyz = FaceUVToXYZ(face,uv); <add> var faceuv = XYZToFaceUV(xyz); <add> face = faceuv[0]; <add> uv = faceuv[1]; <add> st = UVToST(uv); <add> ij = STToIJ(st,level); <add> return S2.S2Cell.FromFaceIJ (face, ij, level); <add> } <add> }; <add> <add> var face = this.face; <add> var i = this.ij[0]; <add> var j = this.ij[1]; <add> var level = this.level; <add> <add> <add> return [ <add> fromFaceIJWrap(face, [i-1,j], level), <add> fromFaceIJWrap(face, [i,j-1], level), <add> fromFaceIJWrap(face, [i+1,j], level), <add> fromFaceIJWrap(face, [i,j+1], level) <add> ]; <add> <add> }; <add> })(); <add> } <add> // we stick all this stuff in the setup function to ensure it loads *after* IITC has booted <ide> window.regionScoreboard = function() { <ide> var latlng = map.getCenter(); <ide> window.requestRegionScores(latlng); <ide> for(s=1; s<n; s*=2) { <ide> rx = 1 & (t/2); <ide> ry = 1 & (t ^ rx); <del> xy = window.plugin.regions.rot(s, xy[0], xy[1], rx, ry); <add> xy = rot(s, xy[0], xy[1], rx, ry); <ide> xy[0] += s * rx; <ide> xy[1] += s * ry; <ide> t /= 4;
Java
bsd-3-clause
4d915e59120465f3cedaacef13841e483b41f64f
0
owlcollab/owltools,owlcollab/owltools,owlcollab/owltools,owlcollab/owltools,owlcollab/owltools,owlcollab/owltools
package owltools.cli; import java.awt.Color; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Stack; import java.util.TimeZone; import java.util.UUID; import java.util.Vector; import java.util.stream.Collectors; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.LineIterator; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.eclipse.jetty.server.Server; import org.geneontology.reasoner.ExpressionMaterializingReasoner; import org.geneontology.reasoner.ExpressionMaterializingReasonerFactory; import org.geneontology.reasoner.OWLExtendedReasoner; import org.obolibrary.macro.MacroExpansionVisitor; import org.obolibrary.macro.ManchesterSyntaxTool; import org.obolibrary.obo2owl.Obo2OWLConstants; import org.obolibrary.obo2owl.Obo2OWLConstants.Obo2OWLVocabulary; import org.obolibrary.obo2owl.OboInOwlCardinalityTools; import org.obolibrary.obo2owl.Owl2Obo; import org.obolibrary.oboformat.model.Frame; import org.obolibrary.oboformat.model.OBODoc; import org.obolibrary.oboformat.parser.OBOFormatConstants.OboFormatTag; import org.obolibrary.oboformat.parser.OBOFormatParser; import org.obolibrary.oboformat.writer.OBOFormatWriter; import org.obolibrary.oboformat.writer.OBOFormatWriter.NameProvider; import org.semanticweb.elk.owlapi.ElkReasonerFactory; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.formats.FunctionalSyntaxDocumentFormat; import org.semanticweb.owlapi.formats.ManchesterSyntaxDocumentFormat; import org.semanticweb.owlapi.formats.OBODocumentFormat; import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat; import org.semanticweb.owlapi.formats.RDFXMLDocumentFormat; import org.semanticweb.owlapi.formats.TrigDocumentFormat; import org.semanticweb.owlapi.formats.TurtleDocumentFormat; import org.semanticweb.owlapi.io.OWLParserException; import org.semanticweb.owlapi.model.AddImport; import org.semanticweb.owlapi.model.AddOntologyAnnotation; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotation; import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAnnotationSubject; import org.semanticweb.owlapi.model.OWLAnnotationValue; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; import org.semanticweb.owlapi.model.OWLClassAxiom; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLDeclarationAxiom; import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; import org.semanticweb.owlapi.model.OWLDocumentFormat; import org.semanticweb.owlapi.model.OWLEntity; import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; import org.semanticweb.owlapi.model.OWLImportsDeclaration; import org.semanticweb.owlapi.model.OWLLiteral; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLNamedObject; import org.semanticweb.owlapi.model.OWLObject; import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyCharacteristicAxiom; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyChange; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyID; import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.model.OWLProperty; import org.semanticweb.owlapi.model.OWLPropertyExpression; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; import org.semanticweb.owlapi.model.RemoveAxiom; import org.semanticweb.owlapi.model.RemoveImport; import org.semanticweb.owlapi.model.SetOntologyID; import org.semanticweb.owlapi.model.parameters.AxiomAnnotations; import org.semanticweb.owlapi.model.parameters.Imports; import org.semanticweb.owlapi.reasoner.Node; import org.semanticweb.owlapi.reasoner.NodeSet; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.util.AutoIRIMapper; import org.semanticweb.owlapi.util.OWLClassExpressionVisitorAdapter; import org.semanticweb.owlapi.util.OWLEntityRenamer; import org.semanticweb.owlapi.util.OWLEntityVisitorAdapter; import org.semanticweb.owlapi.util.SimpleIRIMapper; import org.semanticweb.owlapi.vocab.OWL2Datatype; import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; import owltools.InferenceBuilder.OWLClassFilter; import owltools.RedundantInferences; import owltools.RedundantInferences.RedundantAxiom; import owltools.cli.tools.CLIMethod; import owltools.gfx.GraphicsConfig; import owltools.gfx.GraphicsConfig.RelationConfig; import owltools.gfx.OWLGraphLayoutRenderer; import owltools.graph.AxiomAnnotationTools; import owltools.graph.OWLGraphEdge; import owltools.graph.OWLGraphWrapper; import owltools.graph.OWLGraphWrapper.ISynonym; import owltools.graph.OWLGraphWrapperBasic.LabelPolicy; import owltools.graph.OWLQuantifiedProperty; import owltools.graph.OWLQuantifiedProperty.Quantifier; import owltools.idmap.IDMapPairWriter; import owltools.idmap.IDMappingPIRParser; import owltools.idmap.UniProtIDMapParser; import owltools.io.CatalogXmlIRIMapper; import owltools.io.ChadoGraphClosureRenderer; import owltools.io.CompactGraphClosureReader; import owltools.io.CompactGraphClosureRenderer; import owltools.io.EdgeTableRenderer; import owltools.io.GraphClosureRenderer; import owltools.io.GraphReader; import owltools.io.GraphRenderer; import owltools.io.ImportClosureSlurper; import owltools.io.InferredParentRenderer; import owltools.io.OWLJSONFormat; import owltools.io.OWLJsonLDFormat; import owltools.io.OWLOboGraphsFormat; import owltools.io.OWLOboGraphsYamlFormat; import owltools.io.OWLPrettyPrinter; import owltools.io.ParserWrapper; import owltools.io.ParserWrapper.OWLGraphWrapperNameProvider; import owltools.io.StanzaToOWLConverter; import owltools.io.TableRenderer; import owltools.io.TableToAxiomConverter; import owltools.mooncat.BridgeExtractor; import owltools.mooncat.Diff; import owltools.mooncat.DiffUtil; import owltools.mooncat.EquivalenceSetMergeUtil; import owltools.mooncat.Mooncat; import owltools.mooncat.OWLInAboxTranslator; import owltools.mooncat.PropertyExtractor; import owltools.mooncat.PropertyViewOntologyBuilder; import owltools.mooncat.ProvenanceReasonerWrapper; import owltools.mooncat.QuerySubsetGenerator; import owltools.mooncat.SpeciesMergeUtil; import owltools.mooncat.SpeciesSubsetterUtil; import owltools.mooncat.TransformationUtils; import owltools.mooncat.ontologymetadata.ImportChainDotWriter; import owltools.mooncat.ontologymetadata.ImportChainExtractor; import owltools.mooncat.ontologymetadata.OntologyMetadataMarkdownWriter; import owltools.ontologyrelease.OboBasicDagCheck; import owltools.ontologyrelease.OntologyMetadata; import owltools.reasoner.GCIUtil; import owltools.reasoner.GraphReasonerFactory; import owltools.renderer.markdown.MarkdownRenderer; import owltools.sim2.preprocessor.ABoxUtils; import owltools.tr.LinkMaker; import owltools.tr.LinkMaker.LinkMakerResult; import owltools.tr.LinkMaker.LinkPattern; import owltools.util.OwlHelper; import owltools.web.OWLServer; import uk.ac.manchester.cs.owlapi.modularity.ModuleType; import uk.ac.manchester.cs.owlapi.modularity.SyntacticLocalityModuleExtractor; import com.clarkparsia.owlapi.explanation.DefaultExplanationGenerator; import com.clarkparsia.owlapi.explanation.ExplanationGenerator; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import de.derivo.sparqldlapi.Query; import de.derivo.sparqldlapi.QueryArgument; import de.derivo.sparqldlapi.QueryBinding; import de.derivo.sparqldlapi.QueryEngine; import de.derivo.sparqldlapi.QueryResult; import de.derivo.sparqldlapi.exceptions.QueryEngineException; import de.derivo.sparqldlapi.exceptions.QueryParserException; import de.derivo.sparqldlapi.types.QueryArgumentType; /** * An instance of this class can execute owltools commands in sequence. * * Typically, this class is called from a wrapper within its main() method. * * Extend this class to implement additional functions. Use the {@link CLIMethod} * annotation, to designate the relevant methods. * * @author cjm * @see GafCommandRunner * @see JsCommandRunner * @see SimCommandRunner * @see SolrCommandRunner * @see TaxonCommandRunner */ public class CommandRunner extends CommandRunnerBase { private static Logger LOG = Logger.getLogger(CommandRunner.class); public void runSingleIteration(Opts opts) throws Exception { Set<OWLSubClassOfAxiom> removedSubClassOfAxioms = null; GraphicsConfig gfxCfg = new GraphicsConfig(); //Configuration config = new PropertiesConfiguration("owltools.properties"); while (opts.hasArgs()) { if (opts.nextArgIsHelp()) { help(); opts.setHelpMode(true); } //String opt = opts.nextOpt(); //System.out.println("processing arg: "+opt); if (opts.nextEq("--pellet")) { System.err.println("The Pellet reasoner is no longer supported, use Hermit '--hermit', or ELK '--elk' instead"); exit(-1); return; } else if (opts.nextEq("--hermit")) { reasonerName = "hermit"; } else if (opts.nextEq("--elk")) { reasonerName = "elk"; } else if (opts.nextEq("--jfact")) { System.err.println("The JFact reasoner is no longer supported, use Hermit '--hermit', or ELK '--elk' instead"); exit(-1); return; } else if (opts.nextEq("--more")) { System.err.println("The MORE reasoner is no longer supported, use Hermit '--hermit', or ELK '--elk' instead"); exit(-1); return; } else if (opts.nextEq("--use-reasoner|--set-reasoner-name")) { reasonerName = opts.nextOpt(); } else if (opts.nextEq("--no-dispose")) { this.isDisposeReasonerOnExit = false; } else if (opts.nextEq("--reasoner")) { reasonerName = opts.nextOpt(); g.setReasoner(createReasoner(g.getSourceOntology(),reasonerName,g.getManager())); reasoner = g.getReasoner(); } else if (opts.nextEq("--init-reasoner")) { while (opts.hasOpts()) { if (opts.nextEq("-r")) { reasonerName = opts.nextOpt(); } else { break; } } g.setReasoner(createReasoner(g.getSourceOntology(),reasonerName,g.getManager())); reasoner = g.getReasoner(); } else if (opts.nextEq("--reasoner-dispose")) { reasoner.dispose(); } else if (opts.nextEq("--reasoner-flush")) { reasoner.flush(); } else if (opts.nextEq("--no-reasoner")) { reasonerName = ""; } else if (opts.nextEq("--log-info")) { Logger.getRootLogger().setLevel(Level.INFO); } else if (opts.nextEq("--log-warning")) { Logger.getRootLogger().setLevel(Level.WARN); } else if (opts.nextEq("--log-debug")) { Logger.getRootLogger().setLevel(Level.DEBUG); } else if (opts.nextEq("--log-error")) { Logger.getRootLogger().setLevel(Level.ERROR); } else if (opts.nextEq("--no-debug")) { Logger.getRootLogger().setLevel(Level.OFF); } else if (opts.nextEq("--no-logging")) { Logger.getRootLogger().setLevel(Level.OFF); } else if (opts.nextEq("--silence-elk")) { Logger.getLogger("org.semanticweb.elk").setLevel(Level.OFF); } else if (opts.nextEq("--monitor-memory")) { g.getConfig().isMonitorMemory = true; } else if (opts.nextEq("--list-classes")) { Set<OWLClass> clss = g.getSourceOntology().getClassesInSignature(); for (OWLClass c : clss) { System.out.println(c); } } else if (opts.nextEq("--object-to-label-table")) { Set<OWLObject> objs = g.getAllOWLObjects(); boolean useIds = false; while (opts.hasOpts()) { if (opts.nextEq("-i")) { useIds = true; } else { break; } } for (OWLObject c : objs) { if (c instanceof OWLNamedObject) { String label = g.getLabel(c); String id; if (useIds) { id = g.getIdentifier(c); } else { id = ((OWLNamedObject)c).getIRI().toString(); } System.out.println(id+"\t"+label); } } } else if (opts.nextEq("--write-all-subclass-relationships")) { for (OWLSubClassOfAxiom ax : g.getSourceOntology().getAxioms(AxiomType.SUBCLASS_OF)) { OWLClassExpression parent = ax.getSuperClass(); OWLClassExpression child = ax.getSubClass(); if (parent.isAnonymous() || child.isAnonymous()) continue; System.out.println(g.getIdentifier(child) + "\t" + g.getIdentifier(parent)); } } else if (opts.nextEq("--query-ontology")) { opts.info("[-m]", "specify an ontology that has classes to be used as queries. See also: --reasoner-query"); boolean isMerge = false; while (opts.hasOpts()) { if (opts.nextEq("-m")) isMerge = true; else opts.nextOpt(); } queryOntology = pw.parse(opts.nextOpt()); queryExpressionMap = new HashMap<OWLClass,OWLClassExpression>(); for (OWLClass qc : queryOntology.getClassesInSignature()) { for (OWLClassExpression ec : OwlHelper.getEquivalentClasses(qc, queryOntology)) { queryExpressionMap.put(qc, ec); } } if (isMerge) { g.mergeOntology(queryOntology); } } else if (opts.nextEq("--merge")) { opts.info("ONT", "merges ONT into current source ontology"); g.mergeOntology(pw.parse(opts.nextOpt())); } else if (opts.nextEq("--use-catalog") || opts.nextEq("--use-catalog-xml")) { opts.info("", "uses default catalog-v001.xml"); pw.addIRIMapper(new CatalogXmlIRIMapper("catalog-v001.xml")); } else if (opts.nextEq("--catalog-xml")) { opts.info("CATALOG-FILE", "uses the specified file as a catalog"); pw.addIRIMapper(new CatalogXmlIRIMapper(opts.nextOpt())); } else if (opts.nextEq("--map-ontology-iri")) { opts.info("OntologyIRI FILEPATH", "maps an ontology IRI to a file in your filesystem"); OWLOntologyIRIMapper iriMapper = new SimpleIRIMapper(IRI.create(opts.nextOpt()), IRI.create(new File(opts.nextOpt()))); LOG.info("Adding "+iriMapper+" to "+pw.getManager()); pw.addIRIMapper(iriMapper); } else if (opts.nextEq("--auto-ontology-iri")) { opts.info("[-r] ROOTDIR", "uses an AutoIRI mapper [EXPERIMENTAL]"); boolean isRecursive = false; while (opts.hasOpts()) { if (opts.nextEq("-r")) { isRecursive = true; } else { break; } } File file = new File(opts.nextOpt()); OWLOntologyIRIMapper iriMapper = new AutoIRIMapper(file, isRecursive); LOG.info("Adding "+iriMapper+" to "+pw.getManager()+" dir:"+file+" isRecursive="+isRecursive); pw.addIRIMapper(iriMapper); } else if (opts.nextEq("--remove-imports-declarations")) { Set<OWLImportsDeclaration> oids = g.getSourceOntology().getImportsDeclarations(); for (OWLImportsDeclaration oid : oids) { RemoveImport ri = new RemoveImport(g.getSourceOntology(), oid); g.getManager().applyChange(ri); } } else if (opts.nextEq("--remove-import-declaration")) { opts.info("IRI", "Removes a specific import"); String rmImport = opts.nextOpt(); Set<OWLImportsDeclaration> oids = g.getSourceOntology().getImportsDeclarations(); for (OWLImportsDeclaration oid : oids) { LOG.info("Testing "+oid.getIRI().toString()+" == "+rmImport); if (oid.getIRI().toString().equals(rmImport)) { RemoveImport ri = new RemoveImport(g.getSourceOntology(), oid); LOG.info(ri); g.getManager().applyChange(ri); } } } else if (opts.nextEq("--add-imports-declarations")) { opts.info("IRI-LIST", "Adds declaration for each ontology IRI"); List<String> importsIRIs = opts.nextList(); for (String importIRI : importsIRIs) { AddImport ai = new AddImport(g.getSourceOntology(), g.getDataFactory().getOWLImportsDeclaration(IRI.create(importIRI))); g.getManager().applyChange(ai); } } else if (opts.nextEq("--subtract")) { opts.info("ONTS", "subtract axioms in support ontology from main ontology"); boolean preserveDeclarations = false; boolean preserveAnnotations = false; while (opts.hasOpts()) { if (opts.nextEq("-d|--preserve-declarations")) { preserveDeclarations = true; } if (opts.nextEq("-a|--preserve-annotations")) { preserveAnnotations = true; } else { break; } } List<String> ontFiles = opts.nextList(); for (String ontFile: ontFiles) { g.addSupportOntology(pw.parse(ontFile)); } Set<OWLAxiom> rmAxioms = new HashSet<>(); int n=0; for (OWLOntology o : g.getSupportOntologySet()) { for (OWLAxiom a : o.getAxioms()) { if (preserveDeclarations && a instanceof OWLDeclarationAxiom) { continue; } if (preserveAnnotations && a.isAnnotationAxiom()) { continue; } rmAxioms.add(a); n++; } } LOG.info("Removing "+n+" axioms"); g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); } else if (opts.nextEq("--diff")) { opts.info("[--o1r FILE][--o2r FILE][--oi FILE][--od FILE][-u][-s][-f FMT]", "ontology difference or subtraction"); String o1r = null; String o2r = null; String oi = null; String od = null; OWLDocumentFormat ofmt = new RDFXMLDocumentFormat(); boolean isCompareClassesInCommon = false; boolean isCompareUnannotatedForm = false; boolean isAddSharedDeclarations = false; while (opts.hasOpts()) { if (opts.nextEq("--o1r")) { o1r = opts.nextOpt(); } else if (opts.nextEq("--o2r")) { o2r = opts.nextOpt(); } else if (opts.nextEq("--oi")) { oi = opts.nextOpt(); } else if (opts.nextEq("--od")) { od = opts.nextOpt(); } else if (opts.nextEq("-s")) { opts.info("", "shared: compare only classes in common to both"); isCompareClassesInCommon = true; } else if (opts.nextEq("-u")) { opts.info("", "compare unannotated forms: remove axiom annotations before comparison"); isCompareUnannotatedForm = true; } else if (opts.nextEq("-f")) { String fmt = opts.nextOpt(); if (fmt.equals("obo")) { ofmt = new OBODocumentFormat(); isAddSharedDeclarations = true; } else LOG.error("Cannot do: "+fmt); } else { break; } } if (g.getSupportOntologySet().size() != 1) { LOG.error("must be exactly 1 support ontology. E.g. owltools f1.owl f2.owl --dif"); } OWLOntology ont2 = g.getSupportOntologySet().iterator().next(); Diff diff = new Diff(); diff.ontology1 = g.getSourceOntology(); diff.ontology2 = ont2; diff.isAddSharedDeclarations = isAddSharedDeclarations; diff.isCompareClassesInCommon = isCompareClassesInCommon; diff.isCompareUnannotatedForm = isCompareUnannotatedForm; diff = DiffUtil.getDiff(diff); System.out.println(diff); final ParserWrapper pw = new ParserWrapper(); if (o1r != null) pw.saveOWL(diff.ontology1remaining , ofmt, o1r); if (o2r != null) pw.saveOWL(diff.ontology2remaining , ofmt, o2r); if (oi != null) pw.saveOWL(diff.ontology2remaining , ofmt, oi); if (od != null) pw.saveOWL(diff.ontologyDiff , ofmt, od); } else if (opts.nextEq("--set-ontology-id")) { opts.info("[-v VERSION-IRI][-a] IRI", "Sets the OWLOntologyID (i.e. IRI and versionIRI)"); IRI v = null; IRI iri = null; boolean isAnonymous = false; while (opts.hasOpts()) { if (opts.nextEq("-v|--version-iri")) { v = IRI.create(opts.nextOpt()); } else if (opts.nextEq("-a|--anonymous")) { opts.info("", "if specified, do not specify an IRI"); isAnonymous = true; } else { break; } } if (!isAnonymous) iri = IRI.create(opts.nextOpt()); OWLOntologyID oid = new OWLOntologyID(Optional.fromNullable(iri), Optional.fromNullable(v)); SetOntologyID soid; soid = new SetOntologyID(g.getSourceOntology(), oid); g.getManager().applyChange(soid); } else if (opts.nextEq("--add-ontology-annotation")) { opts.info("[-u] PROP VAL", "Sets an ontology annotation"); OWL2Datatype dt = OWL2Datatype.XSD_STRING; while (opts.hasOpts()) { if (opts.nextEq("-u")) { opts.info("", "Ase xsd:anyURI as datatype"); dt = OWL2Datatype.XSD_ANY_URI; } else break; } IRI p = IRI.create(opts.nextOpt()); OWLLiteral v = g.getDataFactory().getOWLLiteral(opts.nextOpt(), dt); OWLAnnotation ann = g.getDataFactory().getOWLAnnotation(g.getDataFactory().getOWLAnnotationProperty(p), v); AddOntologyAnnotation addAnn = new AddOntologyAnnotation(g.getSourceOntology(), ann); g.getManager().applyChange(addAnn); } else if (opts.nextEq("--create-ontology")) { IRI v = null; while (opts.hasOpts()) { if (opts.nextEq("-v|--version-iri")) { v = IRI.create(opts.nextOpt()); } else { break; } } String iri = opts.nextOpt(); if (!iri.startsWith("http:")) { iri = Obo2OWLConstants.DEFAULT_IRI_PREFIX+iri; } g = new OWLGraphWrapper(iri); if (v != null) { OWLOntologyID oid = new OWLOntologyID(Optional.of(IRI.create(iri)), Optional.of(v)); SetOntologyID soid; soid = new SetOntologyID(g.getSourceOntology(), oid); g.getManager().applyChange(soid); } } else if (opts.nextEq("--merge-import")) { opts.info("ONTOLOGY-IRI", "Ensure all axioms from the ontology are merged into the main ontology"); String iriString = opts.nextOpt(); g.mergeSpecificImport(IRI.create(iriString)); } else if (opts.nextEq("--merge-import-closure") || opts.nextEq("--merge-imports-closure")) { opts.info("[--ni]", "All axioms from ontologies in import closure are copied into main ontology"); boolean isRmImports = false; if (opts.nextEq("--ni")) { opts.info("", "removes imports declarations after merging"); isRmImports = true; } g.mergeImportClosure(isRmImports); } else if (opts.nextEq("--merge-support-ontologies")) { opts.info("[-l]", "This will merge the support ontologies from the OWLGraphWrapper into the main ontology. This is usually required while working with a reasoner."); LabelPolicy lp = LabelPolicy.ALLOW_DUPLICATES; while (opts.hasArgs()) { if (opts.nextEq("-l|--labels")) { opts.info("", "preserve source labels"); lp = LabelPolicy.PRESERVE_SOURCE; } else break; } for (OWLOntology ont : g.getSupportOntologySet()) g.mergeOntology(ont, lp); g.setSupportOntologySet(new HashSet<OWLOntology>()); } else if (opts.nextEq("--add-support-from-imports")) { opts.info("", "All ontologies in direct import are removed and added as support ontologies"); g.addSupportOntologiesFromImportsClosure(); } else if (opts.nextEq("--add-imports-from-support|--add-imports-from-supports")) { g.addImportsFromSupportOntologies(); } else if (opts.nextEq("-m") || opts.nextEq("--mcat")) { catOntologies(opts); } else if (opts.nextEq("--remove-entities-marked-imported")) { opts.info("","Removes all classes, individuals and object properties that are marked with IAO_0000412"); Mooncat m = new Mooncat(g); m.removeExternalEntities(); } else if (opts.nextEq("--remove-external-classes")) { opts.info("IDSPACE","Removes all classes not in the specified ID space"); boolean removeDangling = true; while (opts.hasOpts()) { if (opts.nextEq("-k|--keepDangling")) { removeDangling = false; } else { break; } } String idspace = opts.nextOpt(); Mooncat m = new Mooncat(g); m.removeClassesNotInIDSpace(idspace, removeDangling); } else if (opts.nextEq("--remove-dangling")) { Mooncat m = new Mooncat(g); m.removeDanglingAxioms(); } else if (opts.nextEq("--remove-uninstantiated-classes")) { opts.info("", "removes all classes for which the reasoner can infer no instances"); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); int n = 0; reasoner.flush(); for (OWLClass obj : g.getAllOWLClasses()) { if (reasoner.getInstances(obj, false).getFlattened().size() == 0) { LOG.info("Unused: "+obj); n++; rmAxioms.addAll(g.getSourceOntology().getReferencingAxioms(obj, Imports.INCLUDED)); } } LOG.info("Removing "+rmAxioms.size()+" referencing "+n+" unused classes"); g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); } else if (opts.nextEq("--make-subset-by-properties")) { opts.info("PROPERTY-LIST", "make an ontology subset that excludes axioms that use properties not in the specified set.\n"+ " Note the ontology should be relaxed e.g. X=A and R some B ==> X SubClassOf A" + " A property list is a space-separated list of object property OBO-IDs, shorthands, URIs, or labels.\n"+ " Example: my.owl --make-subset-by-properties BFO:0000050 'develops from' // -o my-slim.owl \n"+ " The special symbol 'ALL-PROPERTIES' selects all properties in the signature.\n"+ " The property list should be terminated by '//' (this is optional and a new command starting with '-' is sufficient to end the list)"); boolean isForceRemoveDangling = false; boolean isSuppressRemoveDangling = false; while (opts.hasOpts()) { if (opts.nextEq("-f|--force")) { isForceRemoveDangling = true; } else if (opts.nextEq("-n|--no-remove-dangling")) { isSuppressRemoveDangling = true; } else { break; } } Set<OWLObjectProperty> props = this.resolveObjectPropertyList(opts); Mooncat m = new Mooncat(g); int numDanglingAxioms = m.getDanglingAxioms(g.getSourceOntology()).size(); LOG.info("# Dangling axioms prior to filtering: "+numDanglingAxioms); if (numDanglingAxioms > 0) { if (!isForceRemoveDangling && !isSuppressRemoveDangling) { LOG.error("Will not proceed until dangling axioms removed, or -n or -f options are set"); throw new Exception("Dangling axioms will be lost"); } } m.retainAxiomsInPropertySubset(g.getSourceOntology(),props,reasoner); if (!isSuppressRemoveDangling) { LOG.info("# Dangling axioms post-filtering: " + m.getDanglingAxioms(g.getSourceOntology()).size()); m.removeDanglingAxioms(); } } else if (opts.nextEq("--list-class-axioms")) { OWLClass c = resolveClass(opts.nextOpt()); System.out.println("Class = "+c); OWLPrettyPrinter owlpp = getPrettyPrinter(); for (OWLClassAxiom ax : g.getSourceOntology().getAxioms(c, Imports.EXCLUDED)) { //System.out.println(ax); owlpp.print(ax); } } else if (opts.nextEq("--list-all-axioms")) { OWLPrettyPrinter owlpp = getPrettyPrinter(); for (OWLAxiom a : g.getSourceOntology().getAxioms()) { owlpp.print(a); } } else if (opts.nextEq("--make-species-subset")) { opts.info("-t TAXCLASS","Creates a taxon-specific ontology"); OWLObjectProperty viewProperty = null; OWLClass taxClass = null; String suffix = null; SpeciesSubsetterUtil smu = new SpeciesSubsetterUtil(g); while (opts.hasOpts()) { if (opts.nextEq("-t|--taxon")) { taxClass = this.resolveClass(opts.nextOpt()); } else if (opts.nextEq("-p|--property")) { viewProperty = this.resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-r|--root")) { smu.rootClass = this.resolveClass(opts.nextOpt()); } else break; } smu.viewProperty = viewProperty; smu.taxClass = taxClass; smu.reasoner = reasoner; smu.removeOtherSpecies(); } else if (opts.nextEq("--merge-species-ontology")) { opts.info("-t TAXCLASS [-s SUFFIX] [-p PROP] [-q PROPS]","Creates a composite/merged species ontology"); OWLObjectProperty viewProperty = g.getOWLObjectPropertyByIdentifier("BFO:0000050"); OWLClass taxClass = null; String suffix = null; Set<OWLObjectProperty> includeProps = null; while (opts.hasOpts()) { if (opts.nextEq("-t|--taxon")) { taxClass = this.resolveClass(opts.nextOpt()); } else if (opts.nextEq("-p|--property")) { viewProperty = this.resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-q|--include-property")) { opts.info("PROPS", "object props to include - ALL if unspecified"); includeProps = this.resolveObjectPropertyList(opts); LOG.info("|IP|"+includeProps.size()); LOG.info("IP"+includeProps); } else if (opts.nextEq("-s|--suffix")) { suffix = opts.nextOpt(); } else break; } SpeciesMergeUtil smu = new SpeciesMergeUtil(g); smu.viewProperty = viewProperty; smu.taxClass = taxClass; smu.reasoner = reasoner; smu.includedProperties = includeProps; if (suffix != null) smu.suffix = suffix; smu.merge(); } else if (opts.nextEq("--info")) { opts.info("","show ontology statistics"); for (OWLOntology ont : g.getAllOntologies()) { summarizeOntology(ont); } } else if (opts.nextEq("--reason-intra-ontology")) { opts.info("[-r reasoner] ONT", ""); String ont = null; while (opts.hasOpts()) { if (opts.nextEq("-r|--reasoner")) { opts.info("REASONER", "select reasoner."); reasonerName = opts.nextOpt(); } else { break; } } ont = opts.nextOpt(); OWLPrettyPrinter owlpp = getPrettyPrinter(); for (OWLClass c : g.getAllOWLClasses()) { if (g.getIdSpace(c).equals(ont)) { Set<OWLClassExpression> parents = g.getSourceOntology().getSubClassAxiomsForSubClass(c).stream().map( a -> a.getSuperClass()).collect(Collectors.toSet()); Set<OWLClass> infParents = reasoner.getSuperClasses(c, true).getFlattened(); for (OWLClass p : infParents) { if (g.getIdSpace(p).equals(ont)) { if (!parents.contains(p)) { System.out.println("INFERRED: "+owlpp(c)+" SubClassOf " + owlpp(p)); } } } } } } else if (opts.nextEq("--spike-and-reason")) { opts.info("[-r reasoner]", "spikes in support ontologies and determines new inferences"); while (opts.hasOpts()) { if (opts.nextEq("-r|--reasoner")) { opts.info("REASONER", "select reasoner."); reasonerName = opts.nextOpt(); } else { break; } } OWLPrettyPrinter owlpp = getPrettyPrinter(); Set<OWLClass> sourceOntologyClasses = g.getSourceOntology().getClassesInSignature(Imports.EXCLUDED); LOG.info("SOURCE ONTOLOGY CLASSES:" +sourceOntologyClasses.size()); // create ancestor lookup, pre-spiking reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); Map<OWLClass, Set<OWLClass>> assertedParentMap = new HashMap<>(); for (OWLClass c : sourceOntologyClasses) { assertedParentMap.put(c, reasoner.getSuperClasses(c, false).getFlattened()); } reasoner.dispose(); // spike in support ontologies for (OWLOntology ont : g.getSupportOntologySet()) { LOG.info("MERGING:" +ont); g.mergeOntology(ont); } g.setSupportOntologySet(new HashSet<OWLOntology>()); // perform inference on spiked ontology, determine difference reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); int n = 0; LOG.info("TESTING:" +sourceOntologyClasses.size()); for (OWLClass c : sourceOntologyClasses) { // all ancestors in spiked ontology Set<OWLClass> infParents = new HashSet<>(reasoner.getSuperClasses(c, false).getFlattened()); Set<OWLClass> infParentsDirect = new HashSet<>(reasoner.getSuperClasses(c, true).getFlattened()); // get those unique to spiked ontology infParents.removeAll(assertedParentMap.get(c)); for (OWLClass p : infParents) { // only report new inferences within source if (sourceOntologyClasses.contains(p)) { Set<OWLClass> pSubs = reasoner.getSubClasses(p, true).getFlattened(); boolean isRedundant = false; for (OWLClass p2 : pSubs) { if (sourceOntologyClasses.contains(p2) && infParents.contains(p2)) { isRedundant = true; break; } } if (!isRedundant) { String isDirect = infParentsDirect.contains(p) ? "PARENT" : "ANCESTOR"; System.out.println(owlpp.render(c)+ "\t" + isDirect + "\t" + owlpp.render(p)); n++; } } } } LOG.info("NEW SUPERCLASSES:"+n); } else if (opts.nextEq("--save-superclass-closure")) { opts.info("[-r reasoner] FILENAME", "write out superclass closure of graph."); GraphRenderer gcw; while (opts.hasOpts()) { if (opts.nextEq("-r|--reasoner")) { opts.info("REASONER", "select reasoner."); reasonerName = opts.nextOpt(); } else { break; } } String filename = opts.nextOpt(); List<String> lines = new ArrayList<String>(); for (OWLClass c : g.getAllOWLClasses()) { Set<OWLClass> ecs = reasoner.getEquivalentClasses(c).getEntities(); Set<OWLClass> scs = reasoner.getSuperClasses(c, false).getFlattened(); Set<OWLClass> all = new HashSet<OWLClass>(ecs); all.addAll(scs); List<String> ids = new ArrayList<String>(); for (OWLClass sc : all) { ids.add(g.getIdentifier(sc)); } Collections.sort(ids); lines.add(g.getIdentifier(c)+"\t"+StringUtils.join(ids.iterator(), ",")); } Collections.sort(lines); FileUtils.writeLines(new File(filename), lines); } else if (opts.nextEq("--save-closure")) { opts.info("[-c] FILENAME", "write out closure of graph."); GraphRenderer gcw; if (opts.nextEq("-c")) { opts.info("", "compact storage option."); gcw = new CompactGraphClosureRenderer(opts.nextOpt()); } else { gcw = new GraphClosureRenderer(opts.nextOpt()); } gcw.render(g); } else if (opts.nextEq("--read-closure")) { opts.info("FILENAME", "reads closure previously saved using --save-closure (compact format only)"); GraphReader gr = new CompactGraphClosureReader(g); gr.read(opts.nextOpt()); LOG.info("RESTORED CLOSURE CACHE"); LOG.info("size="+g.inferredEdgeBySource.size()); } else if (opts.nextEq("--save-closure-for-chado")) { opts.info("OUTPUTFILENAME", "saves the graph closure in a format that is oriented towards loading into a Chado database"); boolean isChain = opts.nextEq("--chain"); ChadoGraphClosureRenderer gcw = new ChadoGraphClosureRenderer(opts.nextOpt()); gcw.isChain = isChain; gcw.render(g); } else if (opts.nextEq("--export-table")) { opts.info("[-c] OUTPUTFILENAME", "saves the ontology in tabular format (PARTIALLY IMPLEMENTED)"); boolean isWriteHeader = false; while (opts.hasOpts()) { if (opts.nextEq("-c")) opts.info("", "write column headers"); isWriteHeader = true; } String out = opts.nextOpt(); TableRenderer tr = new TableRenderer(out); tr.isWriteHeader = isWriteHeader; tr.render(g); } else if (opts.nextEq("--export-edge-table")) { opts.info("OUTPUTFILENAME", "saves the ontology edges in tabular format"); String out = opts.nextOpt(); EdgeTableRenderer tr = new EdgeTableRenderer(out); tr.render(g); } else if (opts.nextEq("--materialize-gcis")) { opts.info("[-m]", "infers axioms using GCIUtil"); boolean isMerge = false; while (opts.hasOpts()) { if (opts.nextEq("-m|--merge")) { isMerge = true; } else { break; } } if (reasoner == null) { System.err.println("REASONER NOT INITIALIZED!"); } OWLDataFactory df = g.getDataFactory(); Set<OWLSubClassOfAxiom> axioms = GCIUtil.getSubClassOfSomeValuesFromAxioms(g.getSourceOntology(), reasoner); if (!isMerge) { g.setSourceOntology(g.getManager().createOntology()); } g.getManager().addAxioms(g.getSourceOntology(), axioms); } else if (opts.nextEq("--assert-inferred-svfs")) { opts.info("[-p LIST] [-m] [-gp PROPERTY] [-gf FILLER]", "asserts inferred parents by property using ExtendedReasoner"); String out = null; boolean isMerge = false; List<OWLObjectProperty> props = null; OWLObjectProperty gciProperty = null; List<OWLClass> gciFillers = null; while (opts.hasOpts()) { if (opts.nextEq("-p|--plist")) { props = this.resolveObjectPropertyListAsList(opts); } //else if (opts.nextEq("-o|--output")) { // out = opts.nextOpt(); //} else if (opts.nextEq("-gp|--gci-property")) { gciProperty = this.resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-gf|--gci-fillers")) { gciFillers = resolveClassList(opts); } else if (opts.nextEq("-m|--merge")) { opts.info("", "adds inferred axioms to current ontology (default is new"); isMerge = true; } else { break; } } if (reasoner == null) { System.err.println("REASONER NOT INITIALIZED!"); } if (!(reasoner instanceof OWLExtendedReasoner)) { System.err.println("REASONER NOT AN EXTENDED REASONER. Recommended: add '--reasoner mexr' prior to this command"); } OWLExtendedReasoner emr = (OWLExtendedReasoner) reasoner; OWLDataFactory df = g.getDataFactory(); Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); for (OWLClass c : g.getAllOWLClasses()) { for (OWLObjectProperty p : props) { for (OWLClass parent : emr.getSuperClassesOver(c, p, true)) { axioms.add(df.getOWLSubClassOfAxiom(c, df.getOWLObjectSomeValuesFrom(p, parent))); } } } if (!isMerge) { g.setSourceOntology(g.getManager().createOntology()); } g.getManager().addAxioms(g.getSourceOntology(), axioms); } else if (opts.nextEq("--export-parents")) { opts.info("[-p LIST] [-o OUTPUTFILENAME]", "saves a table of all direct inferred parents by property using ExtendedReasoner"); String out = null; List<OWLObjectProperty> props = null; OWLObjectProperty gciProperty = null; List<OWLClass> gciFillers = null; while (opts.hasOpts()) { if (opts.nextEq("-p|--plist")) { props = this.resolveObjectPropertyListAsList(opts); } else if (opts.nextEq("-o|--output")) { out = opts.nextOpt(); } else if (opts.nextEq("-gp|--gci-property")) { gciProperty = this.resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-gf|--gci-fillers")) { gciFillers = resolveClassList(opts); } else { break; } } InferredParentRenderer tr = new InferredParentRenderer(out); tr.setProperties(props); if (gciFillers != null) tr.setGciFillers(gciFillers); if (gciProperty != null) tr.setGciProperty(gciProperty); tr.setReasoner((OWLExtendedReasoner) reasoner); tr.render(g); } else if (opts.nextEq("--remove-annotation-assertions")) { opts.info("[-l][-d][-s][-r][-p IRI]*", "removes annotation assertions to make a pure logic subset. Select axioms can be preserved"); boolean isPreserveLabels = false; boolean isPreserveDefinitions = false; boolean isPreserveSynonyms = false; boolean isPreserveRelations = false; boolean isPreserveDeprecations = true; Set<IRI> preserveAnnotationPropertyIRIs = new HashSet<IRI>(); while (opts.hasOpts()) { if (opts.nextEq("-l|--preserve-labels")) { opts.info("", "if specified, all rdfs labels are preserved"); isPreserveLabels = true; } else if (opts.nextEq("-d|--preserve-definitions")) { opts.info("", "if specified, all obo text defs are preserved"); isPreserveDefinitions = true; } else if (opts.nextEq("-s|--preserve-synonyms")) { opts.info("", "if specified, all obo-style synonyms are preserved"); isPreserveSynonyms = true; } else if (opts.nextEq("--remove-deprecation-axioms")) { opts.info("", "if specified, all owl:deprecated in NOT preserved"); isPreserveDeprecations = true; } else if (opts.nextEq("-r|--preserve-relations")) { opts.info("", "unless specified, all axioms about properties are removed"); isPreserveRelations = true; } else if (opts.nextEq("-p|--preserve-property")) { opts.info("IRI", "if specified, all properties with IRI are preserved. Can be specified multiple times"); preserveAnnotationPropertyIRIs.add(IRI.create(opts.nextOpt())); } else break; } for (OWLOntology o : g.getAllOntologies()) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); Set<OWLAxiom> keepAxioms = new HashSet<OWLAxiom>(); Set<OWLAnnotationProperty> propsToKeep = new HashSet<OWLAnnotationProperty>(); rmAxioms.addAll( o.getAxioms(AxiomType.ANNOTATION_ASSERTION) ); for (OWLAnnotationAssertionAxiom aaa : o.getAxioms(AxiomType.ANNOTATION_ASSERTION)) { IRI piri = aaa.getProperty().getIRI(); if (isPreserveLabels) { if (aaa.getProperty().isLabel()) { keepAxioms.add(aaa); } } if (isPreserveDeprecations) { if (aaa.getProperty().isDeprecated()) { keepAxioms.add(aaa); } } if (isPreserveDefinitions) { if (piri.equals(Obo2OWLVocabulary.IRI_IAO_0000115.getIRI())) { keepAxioms.add(aaa); } } if (isPreserveSynonyms) { if (piri.equals(Obo2OWLVocabulary.IRI_OIO_hasBroadSynonym.getIRI()) || piri.equals(Obo2OWLVocabulary.IRI_OIO_hasExactSynonym.getIRI()) || piri.equals(Obo2OWLVocabulary.IRI_OIO_hasRelatedSynonym.getIRI()) || piri.equals(Obo2OWLVocabulary.IRI_OIO_hasNarrowSynonym.getIRI())) { keepAxioms.add(aaa); } } if (preserveAnnotationPropertyIRIs.contains(piri)) keepAxioms.add(aaa); // remove non-classes if (!isPreserveRelations) { if (aaa.getSubject() instanceof IRI) { OWLClass c = g.getDataFactory().getOWLClass((IRI) aaa.getSubject()); if (o.getDeclarationAxioms(c).size() == 0) { keepAxioms.remove(aaa); } } } if (keepAxioms.contains(aaa)) { propsToKeep.add(aaa.getProperty()); } } LOG.info("Number of annotation assertion axioms:"+rmAxioms.size()); LOG.info("Axioms to preserve:"+keepAxioms.size()); rmAxioms.removeAll(keepAxioms); LOG.info("Number of annotation assertion axioms being removed:"+rmAxioms.size()); if (!isPreserveRelations) { for (OWLAnnotationProperty p : o.getAnnotationPropertiesInSignature()) { if (propsToKeep.contains(p)) continue; rmAxioms.addAll(o.getAnnotationAssertionAxioms(p.getIRI())); rmAxioms.add(g.getDataFactory().getOWLDeclarationAxiom(p)); } LOG.info("Total number of axioms being removed, including annotation properties:"+rmAxioms.size()); } g.getManager().removeAxioms(o, rmAxioms); // TODO - remove axiom annotations } } else if (opts.nextEq("--apply-patch")) { opts.info("minusAxiomsOntology plusAxiomsOntology", "applies 'patch' to current ontology"); OWLOntology ontMinus = pw.parse(opts.nextOpt()); OWLOntology ontPlus = pw.parse(opts.nextOpt()); OWLOntology src = g.getSourceOntology(); Set<OWLAxiom> rmAxioms = ontMinus.getAxioms(); Set<OWLAxiom> addAxioms = ontPlus.getAxioms(); int numPre = src.getAxiomCount(); LOG.info("Removing "+rmAxioms.size()+" axioms from src, current axiom count="+numPre); g.getManager().removeAxioms(src, rmAxioms); int numPost = src.getAxiomCount(); LOG.info("Removed axioms from src, new axiom count="+numPost); if (numPre-numPost != rmAxioms.size()) { LOG.error("Some axioms not found!"); } LOG.info("Adding "+addAxioms.size()+" axioms to src, current axiom count="+numPost); g.getManager().addAxioms(src, addAxioms); LOG.info("Added "+addAxioms.size()+" axioms to src, new count="+src.getAxiomCount()); if (src.getAxiomCount() - numPost != addAxioms.size()) { LOG.error("Some axioms already there!"); } } else if (opts.nextEq("--translate-xrefs-to-equivs")) { opts.info("[-m PREFIX URI]* [-p PREFIX] [-a] [-n]", "Translates the OBO xref property (or alt_id property, if -a set) into equivalence axioms"); Map<String,String> prefixMap = new HashMap<String,String>(); Set<String> prefixes = new HashSet<String>(); boolean isNew = false; boolean isUseAltIds = false; while (opts.hasOpts()) { if (opts.nextEq("-m")) { opts.info("PREFIX URI", "maps prefixs/DBs to URI prefixes"); prefixMap.put(opts.nextOpt(), opts.nextOpt()); } else if (opts.nextEq("-p")) { opts.info("PREFIX", "prefix to filter on"); prefixes.add(opts.nextOpt()); } else if (opts.nextEq("-a")) { opts.info("", "if true, use obo alt_ids"); isUseAltIds = true; } else if (opts.nextEq("-n")) { opts.info("", "if set, will generate a new ontology containing only equiv axioms"); isNew = true; } else { break; } } Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); for (OWLClass c : g.getAllOWLClasses()) { List<String> xrefs = g.getXref(c); if (isUseAltIds) { xrefs = g.getAltIds(c); LOG.info("Class "+c+" altIds: "+xrefs.size()); } for (String x : xrefs) { IRI iri = null; if (x.contains(" ")) { LOG.warn("Ignore xref with space: "+x); continue; } if (x.contains(":")) { String[] parts = x.split(":",2); if (prefixes.size() > 0 && !prefixes.contains(parts[0])) { continue; } if (prefixMap.containsKey(parts[0])) { iri = IRI.create(prefixMap.get(parts[0]) + parts[1]); } } if (iri == null) { iri = g.getIRIByIdentifier(x); } axioms.add(g.getDataFactory().getOWLEquivalentClassesAxiom(c, g.getDataFactory().getOWLClass(iri))); } } if (isNew) { g.setSourceOntology(g.getManager().createOntology()); } g.getManager().addAxioms(g.getSourceOntology(), axioms); } else if (opts.nextEq("--repair-relations")) { opts.info("", "replaces un-xrefed relations with correct IRIs"); OWLEntityRenamer oer = new OWLEntityRenamer(g.getManager(), g.getAllOntologies()); List<OWLOntologyChange> changes = new ArrayList<OWLOntologyChange> (); for (OWLObjectProperty p : g.getSourceOntology().getObjectPropertiesInSignature()) { IRI piri = p.getIRI(); if (piri.getFragment().equals("part_of")) { List<OWLOntologyChange> ch = oer.changeIRI(piri, g.getIRIByIdentifier("BFO:0000050")); changes.addAll(ch); } if (piri.getFragment().equals("has_part")) { List<OWLOntologyChange> ch = oer.changeIRI(piri, g.getIRIByIdentifier("BFO:0000051")); changes.addAll(ch); } } LOG.info("Repairs: "+changes.size()); g.getManager().applyChanges(changes); OboInOwlCardinalityTools.checkAnnotationCardinality(g.getSourceOntology()); } else if (opts.nextEq("--rename-entity")) { opts.info("OLD-IRI NEW-IRI", "used OWLEntityRenamer to switch IDs/IRIs"); OWLEntityRenamer oer = new OWLEntityRenamer(g.getManager(), g.getAllOntologies()); List<OWLOntologyChange> changes = oer.changeIRI(IRI.create(opts.nextOpt()),IRI.create(opts.nextOpt())); g.getManager().applyChanges(changes); } else if (opts.nextEq("--merge-equivalence-sets")) { opts.info("[-s PREFIX SCORE]* [-l PREFIX SCORE]* [-c PREFIX SCORE]* [-d PREFIX SCORE]* [-x]", "merges sets of equivalent classes. Prefix-based priorities used to determine representative member"); EquivalenceSetMergeUtil esmu = new EquivalenceSetMergeUtil(g, reasoner); while (opts.hasOpts()) { if (opts.nextEq("-s")) { opts.info("PREFIX SCORE", "Assigns a priority score for a prefix used to determine REPRESENTATIVE IRI for merging. E.g. -s HP 5 -s MP 4"); esmu.setPrefixScore( opts.nextOpt(), Double.parseDouble(opts.nextOpt()) ); } else if (opts.nextEq("-l")) { opts.info("PREFIX SCORE", "Assigns a priority score to determine which LABEL should be used post-merge. E.g. -s HP 5 -s MP 4 means HP prefered"); OWLAnnotationProperty p = g.getDataFactory().getOWLAnnotationProperty( OWLRDFVocabulary.RDFS_LABEL.getIRI() ); esmu.setPropertyPrefixScore( p, opts.nextOpt(), Double.parseDouble(opts.nextOpt()) ); } else if (opts.nextEq("-c")) { opts.info("PREFIX SCORE", "Assigns a priority score to determine which COMMENT should be used post-merge. E.g. -s HP 5 -s MP 4 means HP prefered"); OWLAnnotationProperty p = g.getDataFactory().getOWLAnnotationProperty( OWLRDFVocabulary.RDFS_COMMENT.getIRI() ); esmu.setPropertyPrefixScore( p, opts.nextOpt(), Double.parseDouble(opts.nextOpt()) ); } else if (opts.nextEq("-d")) { opts.info("PREFIX SCORE", "Assigns a priority score to determine which DEFINITION should be used post-merge. E.g. -s HP 5 -s MP 4"); OWLAnnotationProperty p = g.getDataFactory().getOWLAnnotationProperty( Obo2OWLVocabulary.IRI_IAO_0000115.getIRI() ); esmu.setPropertyPrefixScore( p, opts.nextOpt(), Double.parseDouble(opts.nextOpt()) ); } else if (opts.nextEq("-x")) { opts.info("", "Removes xrefs between named classes"); esmu.setRemoveAxiomatizedXRefs(true); } else { break; } } esmu.merge(); } else if (opts.nextEq("--merge-equivalent-classes")) { opts.info("[-f FROM-URI-PREFIX]* [-t TO-URI-PREFIX] [-a] [-sa]", "merges equivalent classes, from source(s) to target ontology"); List<String> prefixFroms = new Vector<String>(); String prefixTo = null; boolean isKeepAllAnnotations = false; boolean isPrioritizeAnnotationsFromSource = false; while (opts.hasOpts()) { if (opts.nextEq("-f")) { opts.info("", "a URI or OBO prefix for the source entities. This may be listed multiple times"); String pfx = opts.nextOpt(); if (!pfx.startsWith("http")) pfx = "http://purl.obolibrary.org/obo/"+pfx+"_"; prefixFroms.add(pfx); } else if (opts.nextEq("-t")) { opts.info("", "a URI or OBO prefix for the target entities. This must be listed once"); prefixTo = opts.nextOpt(); if (!prefixTo.startsWith("http")) prefixTo = "http://purl.obolibrary.org/obo/"+prefixTo+"_"; } else if (opts.nextEq("-a|--keep-all-annotations")) { opts.info("", "if set, all annotations are preserved. Resulting ontology may have duplicate labels and definitions"); isKeepAllAnnotations = true; } else if (opts.nextEq("-sa|--prioritize-annotations-from-source")) { opts.info("", "if set, then when collapsing label and def annotations, use the source annotation over the target"); isPrioritizeAnnotationsFromSource = true; } else break; } Map<OWLEntity,IRI> e2iri = new HashMap<OWLEntity,IRI>(); LOG.info("building entity2IRI map...: " + prefixFroms + " --> "+prefixTo); OWLEntityRenamer oer = new OWLEntityRenamer(g.getManager(), g.getAllOntologies()); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); // we only map classes in the source ontology - however, // we use equivalence axioms from the full complement of ontologies // TODO - allow arbitrary entities Map<Integer,Integer> binSizeMap = new HashMap<Integer,Integer>(); for (OWLClass e : g.getSourceOntology().getClassesInSignature()) { //LOG.info(" testing "+c+" ECAs: "+g.getSourceOntology().getEquivalentClassesAxioms(c)); // TODO - may be more efficient to invert order of testing String iriStr = e.getIRI().toString(); boolean isMatch = false; for (String prefixFrom : prefixFroms) { if (iriStr.startsWith(prefixFrom)) { isMatch = true; break; } } if (prefixFroms.size()==0) isMatch = true; if (isMatch) { Set<OWLClass> ecs = new HashSet<OWLClass>(); if (reasoner != null) { ecs = reasoner.getEquivalentClasses(e).getEntities(); } else { // we also scan support ontologies for equivalencies for (OWLOntology ont : g.getAllOntologies()) { // c is the same of e.. why do this? OWLClass c = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(e.getIRI()); for (OWLEquivalentClassesAxiom eca : ont.getEquivalentClassesAxioms(c)) { ecs.addAll(eca.getNamedClasses()); } } } int size = ecs.size(); if (binSizeMap.containsKey(size)) { binSizeMap.put(size, binSizeMap.get(size) +1); } else { binSizeMap.put(size, 1); } for (OWLClass d : ecs) { if (d.equals(e)) continue; if (prefixTo == null || d.getIRI().toString().startsWith(prefixTo)) { // add to mapping. Renaming will happen later e2iri.put(e, d.getIRI()); // TODO one-to-many // annotation collapsing. In OBO, max cardinality of label, comment and definition is 1 // note that this not guaranteed to work if multiple terms are being merged in if (!isKeepAllAnnotations) { OWLClass mainObj = d; OWLClass secondaryObj = e; if (isPrioritizeAnnotationsFromSource) { mainObj = e; secondaryObj = d; } // ensure OBO cardinality of properties is preserved for (OWLAnnotationAssertionAxiom aaa : g.getSourceOntology().getAnnotationAssertionAxioms(secondaryObj.getIRI())) { if (aaa.getProperty().isLabel()) { if (g.getLabel(mainObj) != null) { rmAxioms.add(aaa); // todo - option to translate to synonym } } if (aaa.getProperty().getIRI().equals(Obo2OWLVocabulary.IRI_IAO_0000115.getIRI())) { if (g.getDef(mainObj) != null) { rmAxioms.add(aaa); } } if (aaa.getProperty().isComment()) { if (g.getComment(mainObj) != null) { rmAxioms.add(aaa); } } } } } } } } for (Integer k : binSizeMap.keySet()) { LOG.info(" | Bin( "+k+" classes ) | = "+binSizeMap.get(k)); } g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); LOG.info("Mapping "+e2iri.size()+" entities"); // TODO - this is slow List<OWLOntologyChange> changes = oer.changeIRI(e2iri); g.getManager().applyChanges(changes); LOG.info("Mapped "+e2iri.size()+" entities!"); } else if (opts.nextEq("--rename-entities-via-equivalent-classes")) { opts.info("[-p PREFIX]", "renames entities in source ontology, using equiv axioms from all"); String prefix = null; String prefixTo = null; while (opts.hasOpts()) { if (opts.nextEq("-p|--prefix")) { opts.info("", "prefix to map from (FULL URI)"); prefix = opts.nextOpt(); } else if (opts.nextEq("-q|--prefix-to")) { opts.info("", "prefix to map to (FULL URI)"); prefixTo = opts.nextOpt(); } else break; } Map<OWLEntity,IRI> e2iri = new HashMap<OWLEntity,IRI>(); OWLEntityRenamer oer = new OWLEntityRenamer(g.getManager(), g.getAllOntologies()); Set<IRI> entities = new HashSet<IRI>(); for (OWLClass c : g.getSourceOntology().getClassesInSignature()) { entities.add(c.getIRI()); } for (OWLAnnotationAssertionAxiom aaa : g.getSourceOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION)) { if (aaa.getSubject() instanceof IRI) { entities.add((IRI) aaa.getSubject()); } } // we only map classes in the source ontology - however, // we use equivalence axioms from the full complement of ontologies // TODO - allow arbitrary entities for (IRI e : entities) { for (OWLOntology ont : g.getAllOntologies()) { OWLClass c = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(e); if (prefix != null && !c.getIRI().toString().startsWith(prefix)) { continue; } for (OWLClassExpression d : OwlHelper.getEquivalentClasses(c, ont)) { if (d instanceof OWLClass) { if (prefixTo != null && !d.asOWLClass().getIRI().toString().startsWith(prefixTo)) { continue; } e2iri.put(c, d.asOWLClass().getIRI()); LOG.info(" "+c+" ==> "+d ); } } } } LOG.info("Mapping "+e2iri.size()+" entities"); // TODO - this is slow List<OWLOntologyChange> changes = oer.changeIRI(e2iri); g.getManager().applyChanges(changes); LOG.info("Mapped "+e2iri.size()+" entities!"); } else if (opts.nextEq("--query-cw")) { opts.info("", "closed-world query"); OWLPrettyPrinter owlpp = getPrettyPrinter(); for (OWLClass qc : queryExpressionMap.keySet()) { System.out.println(" CWQueryClass: "+qc); System.out.println(" CWQueryClass: "+owlpp.render(qc)+" "+qc.getIRI().toString()); OWLClassExpression ec = queryExpressionMap.get(qc); System.out.println(" CWQueryExpression: "+owlpp.render(ec)); Set<OWLObject> results = g.queryDescendants(ec); for (OWLObject result : results) { if (result instanceof OWLClass) { System.out.println(" "+owlpp.render((OWLClass)result)); } } } } else if (opts.nextEq("--extract-ontology-metadata")) { opts.info("[-c ONT-IRI]", "extracts annotations from ontology"); String mdoIRI = "http://x.org"; while (opts.hasOpts()) { if (opts.nextEq("-c")) { mdoIRI = opts.nextOpt(); } else break; } OWLOntology mdo = ImportChainExtractor.extractOntologyMetadata(g, mdoIRI); g.setSourceOntology(mdo); } else if (opts.nextEq("--write-imports-dot")) { opts.info("OUT", "writes imports chain as dot file"); String output = opts.nextOpt(); ImportChainDotWriter writer = new ImportChainDotWriter(g); writer.renderDot(g.getSourceOntology(), g.getOntologyId(), output, true); } else if (opts.nextEq("--ontology-metadata-to-markdown")) { opts.info("OUT", "writes md from ontology metadata"); String output = opts.nextOpt(); BufferedWriter fileWriter = new BufferedWriter(new FileWriter(new File(output))); String s = OntologyMetadataMarkdownWriter.renderMarkdown(g, ".", true); fileWriter.write(s); fileWriter.close(); } else if (opts.nextEq("--ontology-to-markdown")) { opts.info("[-l LEVEL] DIR", "writes md from ontology"); int level = 2; while (opts.hasOpts()) { if (opts.nextEq("-l|--level")) { level = Integer.parseInt(opts.nextOpt()); } else break; } String dir = opts.nextOpt(); MarkdownRenderer mr = new MarkdownRenderer(); mr.setChunkLevel(level); mr.render(g.getSourceOntology(), dir); } else if (opts.nextEq("--add-obo-shorthand-to-properties")) { Set<OWLObjectProperty> props = g.getSourceOntology().getObjectPropertiesInSignature(Imports.INCLUDED); OWLDataFactory df = g.getDataFactory(); Set<OWLAxiom> addAxioms = new HashSet<OWLAxiom>(); Set<OWLAxiom> removeAxioms = new HashSet<OWLAxiom>(); final String MODE_MISSING = "missing"; // add missing axioms final String MODE_REPLACE = "replace"; // replace all axioms final String MODE_ADD = "add"; // old mode, which is very broken String mode = MODE_MISSING; // safe default, only add missing axioms while (opts.hasOpts()) { if (opts.nextEq("-m|--add-missing")) { mode = MODE_MISSING; } else if (opts.nextEq("-r|--replace-all")) { mode = MODE_REPLACE; } else if (opts.nextEq("--always-add")) { // this models the old broken behavior, generally not recommended mode = MODE_ADD; } else { break; } } if (MODE_ADD.equals(mode)) { LOG.warn("Using the always add mode is not recommended. Make an explicit choice by either add missing '-m' or replace all '-r' shorthand information."); } final OWLAnnotationProperty shorthandProperty = df.getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#shorthand")); final OWLAnnotationProperty xrefProperty = df.getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#hasDbXref")); for (OWLObjectProperty prop : props) { if (prop.isBuiltIn()) { continue; } IRI entity = prop.getIRI(); // retrieve existing Set<OWLAnnotationAssertionAxiom> annotationAxioms = g.getSourceOntology().getAnnotationAssertionAxioms(entity); Set<OWLAnnotationAssertionAxiom> shorthandAxioms = new HashSet<OWLAnnotationAssertionAxiom>(); Set<OWLAnnotationAssertionAxiom> xrefAxioms = new HashSet<OWLAnnotationAssertionAxiom>(); for (OWLAnnotationAssertionAxiom axiom : annotationAxioms) { OWLAnnotationProperty property = axiom.getProperty(); if (shorthandProperty.equals(property)) { shorthandAxioms.add(axiom); } else if (xrefProperty.equals(property)) { xrefAxioms.add(axiom); } } // check what needs to be added boolean addShortHand = false; boolean addXref = false; if (MODE_REPLACE.equals(mode)) { // replace existing axioms removeAxioms.addAll(shorthandAxioms); removeAxioms.addAll(xrefAxioms); addShortHand = true; addXref = true; } else if (MODE_MISSING.equals(mode)) { // add missing axioms addShortHand = shorthandAxioms.isEmpty(); addXref = xrefAxioms.isEmpty(); } else if (MODE_ADD.equals(mode)) { // old broken mode: regardless of current axioms always add axioms addShortHand = true; addXref = true; } // create required axioms if (addShortHand) { // shorthand String id = g.getLabel(prop); if (id != null) { id = id.replaceAll(" ", "_"); OWLAxiom ax = df.getOWLAnnotationAssertionAxiom( shorthandProperty, prop.getIRI(), df.getOWLLiteral(id)); addAxioms.add(ax); LOG.info(ax); } else { LOG.error("No label: "+prop); } } if (addXref) { // xref to OBO style ID String pid = Owl2Obo.getIdentifier(prop.getIRI()); OWLAxiom ax = df.getOWLAnnotationAssertionAxiom( xrefProperty, prop.getIRI(), df.getOWLLiteral(pid)); addAxioms.add(ax); LOG.info(ax); } } // update axioms if (removeAxioms.isEmpty() == false) { LOG.info("Total axioms removed: "+removeAxioms.size()); g.getManager().addAxioms(g.getSourceOntology(), removeAxioms); } if (addAxioms.isEmpty() == false) { LOG.info("Total axioms added: "+addAxioms.size()); g.getManager().addAxioms(g.getSourceOntology(), addAxioms); } } else if (opts.nextEq("--extract-properties")) { LOG.warn("Deprecated - use --extract-module"); opts.info("[-p PROP]* [--list PLIST] [--no-shorthand]", "extracts properties from source ontology. If properties not specified, then support ontologies will be used"); Set<OWLProperty> props = new HashSet<OWLProperty>(); boolean useProps = false; boolean isCreateShorthand = true; boolean isExpansive = false; boolean isUseSubProperties = false; UUID uuid = UUID.randomUUID(); IRI newIRI = IRI.create("http://purl.obolibrary.org/obo/temporary/"+uuid.toString()); while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("PROP", "Add this property to the set of interest"); props.add(this.resolveObjectProperty(opts.nextOpt())); useProps = true; } else if (opts.nextEq("-s|--subproperties")) { opts.info("", "If set, subproperties are used"); isUseSubProperties = true; } else if (opts.nextEq("--list")) { opts.info("PROPLIST", "Terminated by '//'. Add these properties to the set of interest. ALL-POPERTIES for all"); Set<OWLObjectProperty> nprops = this.resolveObjectPropertyList(opts); props.addAll(nprops); useProps = true; } else if (opts.nextEq("--no-shorthand")) { opts.info("", "Do not create OBO shorthands. Resulting OBO format will use numeric IDs as primary"); isCreateShorthand = false; } else { break; } } PropertyExtractor pe; pe = new PropertyExtractor(g.getSourceOntology()); pe.isCreateShorthand = isCreateShorthand; pe.isUseSubProperties = isUseSubProperties; OWLOntology pont; if (useProps) { // use user-specified proeprty list pont = pe.extractPropertyOntology(newIRI, props); } else { // use the support ontology as the source of property usages pont = pe.extractPropertyOntology(newIRI, g.getSupportOntologySet().iterator().next()); } g.setSourceOntology(pont); } else if (opts.nextEq("--extract-mingraph")) { opts.info("", "Extracts a minimal graph ontology containing only label, subclass and equivalence axioms"); String idspace = null; boolean isPreserveOntologyAnnotations = false; while (opts.hasOpts()) { if (opts.nextEq("--idspace")) { opts.info("IDSPACE", "E.g. GO. If set, only the reflexive closure of this ontology will be included"); idspace = opts.nextOpt(); } else if (opts.nextEq("-a|--preserve-ontology-annotations")) { opts.info("", "Set if ontology header is to be preserved"); isPreserveOntologyAnnotations = true; } else { break; } } Set <OWLClass> seedClasses = new HashSet<OWLClass>(); OWLOntology src = g.getSourceOntology(); Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); Set<OWLAnnotation> anns = new HashSet<OWLAnnotation>(); if (isPreserveOntologyAnnotations) { anns = src.getAnnotations(); } axioms.addAll(src.getAxioms(AxiomType.SUBCLASS_OF)); axioms.addAll(src.getAxioms(AxiomType.EQUIVALENT_CLASSES)); for (OWLAnnotationAssertionAxiom aaa : src.getAxioms(AxiomType.ANNOTATION_ASSERTION)) { if (aaa.getProperty().isLabel()) { axioms.add(aaa); //LOG.info("LABEL:"+aaa); } } removeAxiomsReferencingDeprecatedClasses(axioms); LOG.info("#axioms: "+axioms.size()); for (OWLClass c : src.getClassesInSignature()) { String id = g.getIdentifier(c); if (idspace == null || id.startsWith(idspace+":")) { boolean isDep = false; for (OWLAnnotation ann : OwlHelper.getAnnotations(c, src)) { if (ann.isDeprecatedIRIAnnotation()) { isDep = true; break; } } if (!isDep) { seedClasses.add(c); } } } LOG.info("#classes: "+seedClasses.size()); g.addSupportOntology(src); OWLOntology newOnt = src.getOWLOntologyManager().createOntology(axioms); Set<OWLClass> retainedClasses = removeUnreachableAxioms(newOnt, seedClasses); for (OWLClass c : retainedClasses) { newOnt.getOWLOntologyManager().addAxiom(newOnt, g.getDataFactory().getOWLDeclarationAxiom(c)); } PropertyExtractor pe; pe = new PropertyExtractor(src); pe.isCreateShorthand = true; OWLOntology pont; HashSet<OWLProperty> props = new HashSet<OWLProperty>(); for (OWLObjectProperty p : newOnt.getObjectPropertiesInSignature()) { props.add(p); } pont = pe.extractPropertyOntology(null, props); axioms = new HashSet<OWLAxiom>(); for (OWLAxiom axiom : pont.getAxioms()) { if (axiom instanceof OWLObjectPropertyCharacteristicAxiom) { axioms.add(axiom); } else if (axiom instanceof OWLSubObjectPropertyOfAxiom) { axioms.add(axiom); } else if (axiom instanceof OWLSubPropertyChainOfAxiom) { axioms.add(axiom); } else if (axiom instanceof OWLAnnotationAssertionAxiom) { OWLAnnotationAssertionAxiom aaa = (OWLAnnotationAssertionAxiom) axiom; if (aaa.getProperty().isLabel()) { axioms.add(axiom); } else if (aaa.getProperty().getIRI().toString().toLowerCase().contains("shorthand")) { // TODO: fix hack axioms.add(axiom); } else if (aaa.getProperty().getIRI().toString().toLowerCase().contains("xref")) { // TODO: fix hack axioms.add(axiom); } } else if (axiom instanceof OWLDeclarationAxiom) { axioms.add(axiom); } } newOnt.getOWLOntologyManager().addAxioms(newOnt, axioms); g.setSourceOntology(newOnt); for (OWLAnnotation ann : anns ) { AddOntologyAnnotation addAnn = new AddOntologyAnnotation(g.getSourceOntology(), ann); g.getManager().applyChange(addAnn); } //g.mergeOntology(pont); AxiomAnnotationTools.reduceAxiomAnnotationsToOboBasic(newOnt); OboInOwlCardinalityTools.checkAnnotationCardinality(newOnt); } else if (opts.nextEq("--extract-axioms")) { opts.info("[-t TYPE]", "Extracts axioms of specified type into the source ontology (existing source is moved to support)"); AxiomType axiomType = AxiomType.EQUIVALENT_CLASSES; while (opts.hasOpts()) { if (opts.nextEq("-t|--type")) { opts.info("AxiomType", "OWL2 syntax for axiom type. Default is EquivalentClasses"); String axiomTypeStr = opts.nextOpt(); axiomType = AxiomType.getAxiomType(axiomTypeStr); if (axiomType == null) { throw new OptionException("invalid axiom type "+axiomTypeStr+" -- must be OWL2 syntax, e.g. 'SubClassOf'"); } } else { break; } } OWLOntology src = g.getSourceOntology(); LOG.info("axiomType = "+axiomType); Set<OWLAxiom> axioms = src.getAxioms(axiomType); LOG.info("#axioms: "+axioms.size()); g.addSupportOntology(src); OWLOntology newOnt = src.getOWLOntologyManager().createOntology(axioms); g.setSourceOntology(newOnt); } else if (opts.nextEq("--extract-bridge-ontologies")) { opts.info("[-d OUTDIR] [-x] [-s ONTID]", ""); String dir = "bridge/"; String ontId = null; boolean isRemoveBridgeAxiomsFromSource = false; RDFXMLDocumentFormat fmt = new RDFXMLDocumentFormat(); while (opts.hasOpts()) { if (opts.nextEq("-d")) { opts.info("DIR", "bridge files are generated in this directory. Default: ./bridge/"); dir = opts.nextOpt(); } else if (opts.nextEq("-c")) { opts.info("TGT SRCLIST", "Combines all src onts to tgt. TODO"); String tgt = opts.nextOpt(); List<String> srcs = opts.nextList(); } else if (opts.nextEq("-x")) { opts.info("", "If specified, bridge axioms are removed from the source"); isRemoveBridgeAxiomsFromSource = true; } else if (opts.nextEq("-s")) { opts.info("ONTID", "If specified, ..."); ontId = opts.nextOpt(); } else { break; } } BridgeExtractor be = new BridgeExtractor(g.getSourceOntology()); be.subDir = dir; be.extractBridgeOntologies(ontId, isRemoveBridgeAxiomsFromSource); be.saveBridgeOntologies(dir, fmt); } else if (opts.nextEq("--expand-macros")) { opts.info("", "performs expansion on assertions and expressions. "+ "See http://oboformat.googlecode.com/svn/trunk/doc/obo-syntax.html#7"); MacroExpansionVisitor mev = new MacroExpansionVisitor(g.getSourceOntology()); mev.expandAll(); } else if (opts.nextEq("--expand-expression")) { opts.info("PROP EXPRESSION", "uses OBO Macros to expand expressions with PROP to the target expression using ?Y"); OWLObjectProperty p = resolveObjectProperty(opts.nextOpt()); String expr = opts.nextOpt(); OWLAnnotationAssertionAxiom aaa = g.getDataFactory().getOWLAnnotationAssertionAxiom( g.getDataFactory().getOWLAnnotationProperty(Obo2OWLVocabulary.IRI_IAO_0000424.getIRI()), p.getIRI(), g.getDataFactory().getOWLLiteral(expr)); g.getManager().addAxiom(g.getSourceOntology(), aaa); MacroExpansionVisitor mev = new MacroExpansionVisitor(g.getSourceOntology()); mev.expandAll(); } else if (opts.nextEq("--expand-assertion")) { opts.info("PROP ASSERTION", "uses OBO Macros to expand expressions with PROP to the target expression using ?X and ?Y"); OWLNamedObject p = (OWLNamedObject) this.resolveEntity(opts.nextOpt()); String expr = opts.nextOpt(); OWLAnnotationAssertionAxiom aaa = g.getDataFactory().getOWLAnnotationAssertionAxiom( g.getDataFactory().getOWLAnnotationProperty(Obo2OWLVocabulary.IRI_IAO_0000425.getIRI()), p.getIRI(), g.getDataFactory().getOWLLiteral(expr)); g.getManager().addAxiom(g.getSourceOntology(), aaa); MacroExpansionVisitor mev = new MacroExpansionVisitor(g.getSourceOntology()); mev.expandAll(); } else if (opts.nextEq("--sparql-dl")) { opts.info("\"QUERY-TEXT\"", "executes a SPARQL-DL query using the reasoner"); /* Examples: * SELECT * WHERE { SubClassOf(?x,?y)} */ if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); } String q = opts.nextOpt(); System.out.println("Q="+q); OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); try { QueryEngine engine; Query query = Query.create(q); engine = QueryEngine.create(g.getManager(), reasoner, true); QueryResult result = engine.execute(query); if(query.isAsk()) { System.out.print("Result: "); if(result.ask()) { System.out.println("yes"); } else { System.out.println("no"); } } else { if(!result.ask()) { System.out.println("Query has no solution.\n"); } else { System.out.println("Results:"); for (int i=0; i < result.size(); i++) { System.out.print("["+i+"] "); QueryBinding qb = result.get(i); for (QueryArgument qa : qb.getBoundArgs()) { String k = qa.toString(); System.out.print(" "+k+"="); QueryArgument v = qb.get(qa); String out = v.toString(); if (v.getType().equals(QueryArgumentType.URI)) { out = owlpp.renderIRI(v.toString()); } System.out.print(out+"; "); } System.out.println(""); } //System.out.print(result); System.out.println("-------------------------------------------------"); System.out.println("Size of result set: " + result.size()); } } } catch (QueryParserException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (QueryEngineException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else if (opts.nextEq("--remove-abox")) { opts.info("", "removes all named individual declarations and all individual axioms (e.g. class/property assertion"); for (OWLOntology ont : g.getAllOntologies()) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); rmAxioms.addAll(ont.getAxioms(AxiomType.DIFFERENT_INDIVIDUALS)); rmAxioms.addAll(ont.getAxioms(AxiomType.CLASS_ASSERTION)); rmAxioms.addAll(ont.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION)); for (OWLNamedIndividual ind : ont.getIndividualsInSignature()) { rmAxioms.add(g.getDataFactory().getOWLDeclarationAxiom(ind)); } g.getManager().removeAxioms(ont, rmAxioms); } } else if (opts.nextEq("--remove-tbox")) { opts.info("", "removes all class axioms"); for (OWLOntology ont : g.getAllOntologies()) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLAxiom ax : ont.getAxioms()) { if (ax instanceof OWLClassAxiom) { rmAxioms.add(ax); } else if (ax instanceof OWLDeclarationAxiom) { if ( ((OWLDeclarationAxiom)ax).getEntity() instanceof OWLClass) { rmAxioms.add(ax); } } else if (ax instanceof OWLAnnotationAssertionAxiom) { OWLAnnotationSubject subj = ((OWLAnnotationAssertionAxiom)ax).getSubject(); if (subj instanceof IRI) { // warning - excessive pruning if there is punning if (ont.getClassesInSignature(Imports.INCLUDED).contains(g.getDataFactory().getOWLClass((IRI) subj))) { rmAxioms.add(ax); } } } } g.getManager().removeAxioms(ont, rmAxioms); } } else if (opts.nextEq("--i2c")) { opts.info("[-s]", "Converts individuals to classes"); boolean isReplaceOntology = false; while (opts.hasOpts()) { if (opts.nextEq("-s")) { isReplaceOntology = true; } else { break; } } Set<OWLAxiom> axs = new HashSet<OWLAxiom>(); OWLOntology ont = g.getSourceOntology(); for (OWLNamedIndividual i : ont.getIndividualsInSignature()) { OWLClass c = g.getDataFactory().getOWLClass(i.getIRI()); for (OWLClassExpression ce : OwlHelper.getTypes(i, ont)) { axs.add(g.getDataFactory().getOWLSubClassOfAxiom(c, ce)); } //g.getDataFactory().getOWLDe for (OWLClassAssertionAxiom ax : ont.getClassAssertionAxioms(i)) { g.getManager().removeAxiom(ont, ax); } for (OWLDeclarationAxiom ax : ont.getDeclarationAxioms(i)) { g.getManager().removeAxiom(ont, ax); } //g.getDataFactory().getOWLDeclarationAxiom(owlEntity) } if (isReplaceOntology) { for (OWLAxiom ax : g.getSourceOntology().getAxioms()) { g.getManager().removeAxiom(ont, ax); } } for (OWLAxiom axiom : axs) { g.getManager().addAxiom(ont, axiom); } } else if (opts.nextEq("--init-reasoner")) { opts.info("[-r reasonername]", "Creates a reasoner object"); while (opts.hasOpts()) { if (opts.nextEq("-r")) { reasonerName = opts.nextOpt(); } else { break; } } reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); } else if (opts.nextEq("--reasoner-query")) { opts.info("[-r reasonername] [-m] [-d] [-a] [-x] [-c IRI] (--stdin | CLASS-EXPRESSION | -l CLASS-LABEL)", "Queries current ontology for descendants, ancestors and equivalents of CE using reasoner.\n"+ "Enclose all labels in quotes (--stdin only). E.g. echo \"'part of' some 'tentacle'\" | owltools ceph.owl --reasoner-query --stdin"); boolean isManifest = false; boolean isDescendants = true; boolean isIndividuals = false; boolean isAncestors = true; boolean isEquivalents = true; boolean isExtended = false; boolean isCache = false; boolean isRemoveUnsatisfiable = false; boolean isSubOntExcludeClosure = false; String subOntologyIRI = null; OWLClassExpression ce = null; String expression = null; while (opts.hasOpts()) { if (opts.nextEq("-r")) { reasoner = null; reasonerName = opts.nextOpt(); if (reasonerName.toLowerCase().equals("elk")) isManifest = true; } else if (opts.nextEq("-m")) { opts.info("", "manifests the class exression as a class equivalent to query CE and uses this as a query; required for older versions of Elk"); isManifest = true; } else if (opts.nextEq("-d")) { opts.info("", "show descendants, but not ancestors (default is both + equivs)"); isDescendants = true; isAncestors = false; } else if (opts.nextEq("-a")) { opts.info("", "show ancestors, but not descendants (default is both + equivs)"); isDescendants = false; isAncestors = true; } else if (opts.nextEq("-e")) { opts.info("", "show equivalents only (default is ancestors + descendants + equivs)"); isDescendants = false; isAncestors = false; } else if (opts.nextEq("-i")) { opts.info("", "show inferred individuals, as well as ancestors/descendants/equivalents"); isIndividuals = true; } else if (opts.nextEq("--stdin")) { try { BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); System.out.print("> QUERY: "); expression = in.readLine(); } catch (IOException e) { } } else if (opts.nextEq("-x")) { isExtended = true; } else if (opts.nextEq("-c")) { if (opts.nextEq("--exclude-closure")) isSubOntExcludeClosure = true; subOntologyIRI = opts.nextOpt(); } else if (opts.nextEq("--cache")) { isCache = true; } else if (opts.nextEq("-l")) { ce = (OWLClassExpression) resolveEntity(opts); } else { break; } } if (ce == null && expression == null) expression = opts.nextOpt(); OWLPrettyPrinter owlpp = getPrettyPrinter(); Set<OWLClass> results = new HashSet<OWLClass>(); ManchesterSyntaxTool parser = new ManchesterSyntaxTool(g.getSourceOntology(), g.getSupportOntologySet()); try { if (ce == null) { System.out.println("# PARSING: "+expression); ce = parser.parseManchesterExpression(expression); } System.out.println("# QUERY: "+owlpp.render(ce)); if (ce instanceof OWLClass) results.add((OWLClass) ce); // some reasoners such as elk cannot query using class expressions - we manifest // the class expression as a named class in order to bypass this limitation if (isManifest && !(ce instanceof OWLClass)) { System.err.println("-m deprecated: consider using --reasoner welk"); OWLClass qc = g.getDataFactory().getOWLClass(IRI.create("http://owltools.org/Q")); g.getManager().removeAxioms(g.getSourceOntology(), g.getSourceOntology().getAxioms(qc, Imports.EXCLUDED)); OWLEquivalentClassesAxiom ax = g.getDataFactory().getOWLEquivalentClassesAxiom(ce, qc); g.getManager().addAxiom(g.getSourceOntology(), ax); if (reasoner != null) reasoner.flush(); ce = qc; } ExpressionMaterializingReasoner xr = null; if (isExtended) { if (reasoner != null) { LOG.error("Reasoner should NOT be set prior to creating EMR - unsetting"); } xr = new ExpressionMaterializingReasoner(g.getSourceOntology(), createReasonerFactory(reasonerName)); LOG.info("materializing... [doing this before initializing reasoner]"); xr.materializeExpressions(); LOG.info("set extended reasoner: "+xr); reasoner = xr; } else { if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(), reasonerName, g.getManager()); } } if (isIndividuals) { for (OWLNamedIndividual r : reasoner.getInstances(ce, false).getFlattened()) { //results.add(r); if (!isCache) System.out.println("D: "+owlpp.render(r)); } } if (isEquivalents) { for (OWLClass r : reasoner.getEquivalentClasses(ce).getEntities()) { results.add(r); if (!isCache) System.out.println("E: "+owlpp.render(r)); } } if (isDescendants) { for (OWLClass r : reasoner.getSubClasses(ce, false).getFlattened()) { results.add(r); if (!isCache) System.out.println("D: "+owlpp.render(r)); } } if (isAncestors) { if (isExtended) { for (OWLClassExpression r : ((OWLExtendedReasoner) reasoner).getSuperClassExpressions(ce, false)) { if (r instanceof OWLClass) results.add((OWLClass) r); if (!isCache) System.out.println("A:"+owlpp.render(r)); } } else { for (OWLClass r : reasoner.getSuperClasses(ce, false).getFlattened()) { results.add(r); if (!isCache) System.out.println("A:"+owlpp.render(r)); } } } } catch (OWLParserException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { // always dispose parser to avoid a memory leak parser.dispose(); } if (owlObjectCachedSet == null) owlObjectCachedSet = new HashSet<OWLObject>(); owlObjectCachedSet.addAll(results); // --- // Create a sub-ontology // --- if (subOntologyIRI != null) { //g.mergeImportClosure(); QuerySubsetGenerator subsetGenerator = new QuerySubsetGenerator(); OWLOntology srcOnt = g.getSourceOntology(); g.setSourceOntology(g.getManager().createOntology(IRI.create(subOntologyIRI))); g.addSupportOntology(srcOnt); subsetGenerator.createSubSet(g, results, g.getSupportOntologySet(), isSubOntExcludeClosure, isSubOntExcludeClosure); } } else if (opts.nextEq("--make-ontology-from-results")) { // TODO - use Mooncat opts.info("[-m] [-f] IRI", "takes the most recent reasoner query and generates a subset ontology using ONLY classes from results"); boolean followClosure = false; boolean useMooncat = false; while (opts.hasOpts()) { if (opts.nextEq("-f|--follow-closure|--fill-gaps")) { opts.info("", "using mooncat will have the effect of including the graph closure of all results in the output ontology"); followClosure = true; } else if (opts.nextEq("-m|--use-mooncat")) { opts.info("", "using mooncat will have the effect of including the graph closure of all results in the output ontology"); useMooncat = true; } else break; } if (followClosure) useMooncat = true; String subOntologyIRI = opts.nextOpt(); if (useMooncat) { Mooncat m = new Mooncat(g); Set<OWLClass> cs = new HashSet<OWLClass>(); for (OWLObject obj : owlObjectCachedSet) { if (obj instanceof OWLClass) cs.add((OWLClass) obj); } // TODO OWLOntology subOnt = m.makeMinimalSubsetOntology(cs, IRI.create(subOntologyIRI), followClosure); g.setSourceOntology(subOnt); } else { Set<OWLAxiom> subsetAxioms = new HashSet<OWLAxiom>(); Set <OWLObjectProperty> objPropsUsed = new HashSet<OWLObjectProperty>(); for (OWLOntology mergeOntology : g.getAllOntologies()) { for (OWLObject cls : owlObjectCachedSet) { if (cls instanceof OWLClass) { // TODO - translate equivalence axioms; assume inferred for now for (OWLAxiom ax : mergeOntology.getAxioms((OWLClass)cls, Imports.EXCLUDED)) { boolean ok = true; for (OWLClass refCls : ax.getClassesInSignature()) { if (!owlObjectCachedSet.contains(refCls)) { LOG.info("Skipping: "+ax); ok = false; break; } } if (ok) subsetAxioms.add(ax); } for (OWLAxiom ax : mergeOntology.getAnnotationAssertionAxioms(((OWLClass)cls).getIRI())) { subsetAxioms.add(ax); } } subsetAxioms.add(g.getDataFactory().getOWLDeclarationAxiom(((OWLClass)cls))); } } for (OWLAxiom ax : subsetAxioms) { objPropsUsed.addAll(ax.getObjectPropertiesInSignature()); } for (OWLObjectProperty p : objPropsUsed) { for (OWLOntology mergeOntology : g.getAllOntologies()) { subsetAxioms.addAll(mergeOntology.getAxioms(p, Imports.EXCLUDED)); subsetAxioms.addAll(mergeOntology.getAnnotationAssertionAxioms(p.getIRI())); } } OWLOntology subOnt = g.getManager().createOntology(IRI.create(subOntologyIRI)); g.getManager().addAxioms(subOnt, subsetAxioms); g.setSourceOntology(subOnt); } } else if (opts.nextEq("--remove-equivalent-to-nothing-axioms")) { Set<OWLAxiom> axs = new HashSet<OWLAxiom>(); OWLClass nothing = g.getDataFactory().getOWLNothing(); for (OWLAxiom ax : g.getSourceOntology().getAxioms(AxiomType.EQUIVALENT_CLASSES)) { if (ax.getClassesInSignature().contains(nothing)) { axs.add(ax); } } g.getManager().removeAxioms(g.getSourceOntology(), axs); } else if (opts.nextEq("--check-disjointness-axioms")) { opts.info("", "DEPRECATED: this command precedes the ability of Elk to check disjointness"); boolean isTranslateEquivalentToNothing = true; OWLPrettyPrinter owlpp = getPrettyPrinter(); OWLOntology ont = g.getSourceOntology(); Set<OWLObjectIntersectionOf> dPairs = new HashSet<OWLObjectIntersectionOf>(); Map<OWLClassExpression, Set<OWLClassExpression>> dMap = new HashMap<OWLClassExpression, Set<OWLClassExpression>>(); OWLClass nothing = g.getDataFactory().getOWLNothing(); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); if (isTranslateEquivalentToNothing) { // TODO for (OWLEquivalentClassesAxiom eca : ont.getAxioms(AxiomType.EQUIVALENT_CLASSES)) { if (eca.contains(nothing)) { for (OWLClassExpression x : eca.getClassExpressionsMinus(nothing)) { if (x instanceof OWLObjectIntersectionOf) { dPairs.add((OWLObjectIntersectionOf) x); System.out.println("TRANSLATED:"+x); } } rmAxioms.add(eca); } } } for (OWLDisjointClassesAxiom dca : ont.getAxioms(AxiomType.DISJOINT_CLASSES)) { for (OWLClassExpression x : dca.getClassExpressions()) { for (OWLClassExpression y : dca.getClassExpressions()) { if (!x.equals(y)) { dPairs.add(g.getDataFactory().getOWLObjectIntersectionOf(x,y)); } } } } g.getManager().removeAxioms(ont, ont.getAxioms(AxiomType.DISJOINT_CLASSES)); g.getManager().removeAxioms(ont, rmAxioms); reasoner.flush(); for (OWLObjectIntersectionOf x : dPairs) { //System.out.println("TESTING: "+owlpp.render(x)+" using "+reasoner); for (Node<OWLClass> v : reasoner.getSubClasses(x, false)) { if (v.contains(nothing)) continue; System.out.println("VIOLATION: "+owlpp.render(v.getRepresentativeElement())+" SubClassOf "+owlpp.render(x)); } } } else if (opts.nextEq("--remove-disjoints")) { List<AxiomType<? extends OWLAxiom>> disjointTypes = new ArrayList<AxiomType<? extends OWLAxiom>>(); disjointTypes.add(AxiomType.DISJOINT_CLASSES); disjointTypes.add(AxiomType.DISJOINT_UNION); disjointTypes.add(AxiomType.DISJOINT_OBJECT_PROPERTIES); disjointTypes.add(AxiomType.DISJOINT_DATA_PROPERTIES); for(AxiomType<? extends OWLAxiom> axtype : disjointTypes) { Set<? extends OWLAxiom> axioms = g.getSourceOntology().getAxioms(axtype); if (axioms.isEmpty() == false) { g.getManager().removeAxioms(g.getSourceOntology(), axioms); } } } else if (opts.nextEq("--abox-to-tbox")) { ABoxUtils.translateABoxToTBox(g.getSourceOntology()); } else if (opts.nextEq("--make-default-abox")) { ABoxUtils.makeDefaultIndividuals(g.getSourceOntology()); } else if (opts.nextEq("--tbox-to-abox")) { OWLInAboxTranslator t = new OWLInAboxTranslator(g.getSourceOntology()); while (opts.hasOpts()) { if (opts.nextEq("-p|--preserve-iris|--preserve-object-properties")) { opts.info("", "Use the same OP IRIs for ABox shows (danger will robinson!)"); t.setPreserveObjectPropertyIRIs(true); } else { break; } } OWLOntology abox = t.translate(); g.setSourceOntology(abox); } else if (opts.nextEq("--map-abox-to-results")) { Set<OWLClass> cs = new HashSet<OWLClass>(); for (OWLObject obj : owlObjectCachedSet) { if (obj instanceof OWLClass) cs.add((OWLClass) obj); } ABoxUtils.mapClassAssertionsUp(g.getSourceOntology(), reasoner, cs, null); } else if (opts.nextEq("--map-abox-to-namespace")) { String ns = opts.nextOpt(); Set<OWLClass> cs = new HashSet<OWLClass>(); for (OWLClass c : g.getSourceOntology().getClassesInSignature(Imports.INCLUDED)) { if (c.getIRI().toString().startsWith(ns)) cs.add(c); } ABoxUtils.mapClassAssertionsUp(g.getSourceOntology(), reasoner, cs, null); } else if (opts.nextEq("--reasoner-ask-all")) { opts.info("[-r REASONERNAME] [-s] [-a] AXIOMTYPE", "list all inferred equivalent named class pairs"); boolean isReplaceOntology = false; boolean isAddToCurrentOntology = false; boolean isDirect = true; boolean isRemoveIndirect = false; while (opts.hasOpts()) { if (opts.nextEq("-r")) { opts.info("REASONERNAME", "E.g. elk"); reasonerName = opts.nextOpt(); } else if (opts.nextEq("-s")) { opts.info("", "Replaces ALL axioms in ontology with inferred axioms"); isReplaceOntology = true; } else if (opts.nextEq("-a")) { opts.info("", "Add inferred axioms to current ontology"); isAddToCurrentOntology = true; } else if (opts.nextEq("--remove-indirect")) { opts.info("", "Remove indirect assertions from current ontology"); isRemoveIndirect = true; } else if (opts.nextEq("--indirect")) { opts.info("", "Include indirect inferences"); isDirect = false; } else { break; } } if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); } if (isRemoveIndirect && !isAddToCurrentOntology) { System.err.println("You asked to remove indirect but not to assert direct - I am proceeding, but check this is what you want"); } if (isRemoveIndirect && !isDirect) { System.err.println("You asked to remove indirect and yet you want indirect inferences - invalid combination"); System.exit(1); } Set<OWLAxiom> iAxioms = new HashSet<OWLAxiom>(); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); String q = opts.nextOpt().toLowerCase(); OWLPrettyPrinter owlpp = getPrettyPrinter(); OWLOntology ont = g.getSourceOntology(); for (OWLClass c : g.getSourceOntology().getClassesInSignature()) { if (q.startsWith("e")) { for (OWLClass ec : reasoner.getEquivalentClasses(c)) { OWLEquivalentClassesAxiom ax = g.getDataFactory().getOWLEquivalentClassesAxiom(c, ec); if (!ont.containsAxiom(ax, Imports.INCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { LOG.info("INFERRED: "+owlpp.render(ax)); iAxioms.add(ax); } } } else if (q.startsWith("s")) { Set<OWLClass> supers = reasoner.getSuperClasses(c, isDirect).getFlattened(); for (OWLClass sc : supers) { OWLSubClassOfAxiom ax = g.getDataFactory().getOWLSubClassOfAxiom(c, sc); ax.getObjectPropertiesInSignature(); if (!ont.containsAxiom(ax, Imports.INCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { LOG.info("INFERRED: "+owlpp.render(ax)); iAxioms.add(ax); } } if (isRemoveIndirect) { for (OWLClass sc : reasoner.getSuperClasses(c, false).getFlattened()) { if (!supers.contains(sc)) { OWLSubClassOfAxiom ax = g.getDataFactory().getOWLSubClassOfAxiom(c, sc); if (ont.containsAxiom(ax, Imports.INCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { rmAxioms.add(ax); LOG.info("INDIRECT: "+owlpp.render(ax)); } } } } } } if (q.startsWith("i")) { for (OWLNamedIndividual i : g.getSourceOntology().getIndividualsInSignature()) { Set<OWLClass> types = reasoner.getTypes(i, isDirect).getFlattened(); for (OWLClass ce : types) { OWLClassAssertionAxiom ax = g.getDataFactory().getOWLClassAssertionAxiom(ce, i); if (!ont.containsAxiom(ax, Imports.INCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { LOG.info("INFERRED: "+owlpp.render(ax)); iAxioms.add(ax); } } if (isRemoveIndirect) { for (OWLClass ce : reasoner.getTypes(i, false).getFlattened()) { if (!types.contains(ce)) { OWLClassAssertionAxiom ax = g.getDataFactory().getOWLClassAssertionAxiom(ce, i); if (ont.containsAxiom(ax, Imports.EXCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { rmAxioms.add(ax); LOG.info("INDIRECT: "+owlpp.render(ax)); } } } } } } if (isReplaceOntology) { Set<OWLAxiom> allAxioms = ont.getAxioms(); g.getManager().removeAxioms(ont, allAxioms); g.getManager().addAxioms(ont, iAxioms); } if (isAddToCurrentOntology) { System.out.println("Adding "+iAxioms.size()+" axioms"); g.getManager().addAxioms(ont, iAxioms); } rmAxioms.retainAll(ont.getAxioms()); if (rmAxioms.size() > 0) { System.out.println("Removing "+rmAxioms.size()+" axioms"); g.getManager().removeAxioms(ont, rmAxioms); } } else if (opts.nextEq("--annotate-with-reasoner")) { opts.info("[-c OntologyIRI]", "annotated existing and inferred subClassOf axioms with source"); OWLOntology outputOntology = null; while (opts.hasOpts()) { if (opts.nextEq("-c||--create")) { outputOntology = g.getManager().createOntology(IRI.create(opts.nextOpt())); } else { break; } } ProvenanceReasonerWrapper pr = new ProvenanceReasonerWrapper(g.getSourceOntology(), new ElkReasonerFactory()); if (outputOntology != null) { pr.outputOntology = outputOntology; } pr.reason(); if (outputOntology != null) { g.setSourceOntology(outputOntology); } } else if (opts.nextEq("--run-reasoner")) { opts.info("[-r reasonername] [--assert-implied] [--indirect] [-u]", "infer new relationships"); boolean isAssertImplied = false; boolean isDirect = true; boolean isShowUnsatisfiable = false; boolean isRemoveUnsatisfiable = false; boolean showExplanation = false; String unsatisfiableModule = null; boolean traceModuleAxioms = false; // related to unsatisfiableModule while (opts.hasOpts()) { if (opts.nextEq("-r")) { opts.info("REASONERNAME", "selects the reasoner to use"); reasonerName = opts.nextOpt(); } else if (opts.nextEq("--assert-implied")) { isAssertImplied = true; } else if (opts.nextEq("--indirect")) { opts.info("", "include indirect inferences"); isDirect = false; } else if (opts.nextEq("-u|--list-unsatisfiable")) { opts.info("", "list all unsatisfiable classes"); isShowUnsatisfiable = true; } else if (opts.nextEq("-e|--show-explanation")) { opts.info("", "add a single explanation for each unsatisfiable class"); showExplanation = true; } else if (opts.nextEq("-x|--remove-unsatisfiable")) { opts.info("", "remove all unsatisfiable classes"); isRemoveUnsatisfiable = true; isShowUnsatisfiable = true; } else if (opts.nextEq("-m|--unsatisfiable-module")) { opts.info("", "create a module for the unsatisfiable classes."); unsatisfiableModule = opts.nextOpt(); } else if (opts.nextEq("--trace-module-axioms")) { traceModuleAxioms = true; } else { break; } } OWLPrettyPrinter owlpp = getPrettyPrinter(); boolean isQueryProcessed = false; if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); } if (isShowUnsatisfiable || isRemoveUnsatisfiable) { OWLClass nothing = g.getDataFactory().getOWLNothing(); Set<OWLObjectProperty> unsats = new HashSet<>(); for (OWLObjectProperty p : g.getSourceOntology().getObjectPropertiesInSignature(true)) { try { if (reasoner.getObjectPropertyDomains(p, false).getFlattened().contains(nothing)) { LOG.error("Domain is unsat: "+p); unsats.add(p); } if (reasoner.getObjectPropertyRanges(p, false).getFlattened().contains(nothing)) { LOG.error("Range is unsat: "+p); unsats.add(p); } } catch (UnsupportedOperationException e) { LOG.warn("Could not perform operation (expected with Elk)"); } } if (unsats.size() > 0) { LOG.error("Ontology has unsat properties - will not proceed"); exit(1); } else { LOG.info("All properties have satisfiable domain and range"); } } if (isShowUnsatisfiable || isRemoveUnsatisfiable) { int n = 0; Set<OWLClass> unsats = new HashSet<OWLClass>(); LOG.info("Finding unsatisfiable classes"); Set<OWLClass> unsatisfiableClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); ExplanationGenerator explanationGenerator = null; if (showExplanation) { OWLReasonerFactory factory = createReasonerFactory(reasonerName); explanationGenerator = new DefaultExplanationGenerator(g.getManager(), factory, g.getSourceOntology(), reasoner, null); } for (OWLClass c : unsatisfiableClasses) { if (c.isBuiltIn()) { continue; } unsats.add(c); StringBuilder msgBuilder = new StringBuilder(); msgBuilder.append("UNSAT: ").append(owlpp.render(c)); if (explanationGenerator != null) { Set<OWLAxiom> explanation = explanationGenerator.getExplanation(c); if (explanation.isEmpty() == false) { msgBuilder.append('\t'); msgBuilder.append("explanation:"); for (OWLAxiom axiom : explanation) { msgBuilder.append('\t'); msgBuilder.append(owlpp.render(axiom)); } } } System.out.println(msgBuilder); n++; } System.out.println("NUMBER_OF_UNSATISFIABLE_CLASSES: "+n); if (unsatisfiableModule != null) { LOG.info("Creating module for unsatisfiable classes in file: "+unsatisfiableModule); ModuleType mtype = ModuleType.BOT; OWLOntologyManager m = g.getManager(); SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(m, g.getSourceOntology(), mtype ); Set<OWLEntity> seeds = new HashSet<OWLEntity>(unsatisfiableClasses); Set<OWLAxiom> axioms = sme.extract(seeds); OWLOntology module = m.createOntology(); if (traceModuleAxioms) { axioms = traceAxioms(axioms, g, module.getOWLOntologyManager().getOWLDataFactory()); } m.addAxioms(module, axioms); File moduleFile = new File(unsatisfiableModule).getCanonicalFile(); m.saveOntology(module, IRI.create(moduleFile)); } if (n > 0) { if (isRemoveUnsatisfiable) { Mooncat m = new Mooncat(g); m.removeSubsetClasses(unsats, true); isQueryProcessed = true; } else { LOG.error("Ontology has unsat classes - will not proceed"); exit(1); } } } if (opts.hasOpts()) { if (opts.nextEq("-i")) { OWLClass qc = (OWLClass)resolveEntity(opts); System.out.println("Getting individuals of class: "+qc); for (Node<OWLNamedIndividual> ni : reasoner.getInstances(qc, false)) { for (OWLNamedIndividual i : ni.getEntities()) { System.out.println(i); } } isQueryProcessed = true; } } // this should probably be deprecated - deliberate // non-local effects from separate command if (queryExpressionMap != null) { // Assume --query-ontontology -m ONT has been processed for (OWLClass qc : queryExpressionMap.keySet()) { System.out.println(" CWQueryClass: "+owlpp.render(qc)+" "+qc.getIRI().toString()); OWLClassExpression ec = queryExpressionMap.get(qc); System.out.println(" CWQueryExpression: "+owlpp.render(ec)); // note jcel etc will not take class expressions NodeSet<OWLClass> results = reasoner.getSubClasses(qc, false); for (OWLClass result : results.getFlattened()) { if (reasoner.isSatisfiable(result)) { System.out.println(" "+owlpp.render(result)); } else { // will not report unsatisfiable classes, as they trivially //LOG.error("unsatisfiable: "+owlpp.render(result)); } } } isQueryProcessed = true; } if (!isQueryProcessed) { if (removedSubClassOfAxioms != null) { System.out.println("attempting to recapitulate "+removedSubClassOfAxioms.size()+" axioms"); for (OWLSubClassOfAxiom a : removedSubClassOfAxioms) { OWLClassExpression sup = a.getSuperClass(); if (sup instanceof OWLClass) { boolean has = false; for (Node<OWLClass> isup : reasoner.getSuperClasses(a.getSubClass(),false)) { if (isup.getEntities().contains(sup)) { has = true; break; } } System.out.print(has ? "POSITIVE: " : "NEGATIVE: "); System.out.println(owlpp.render(a)); } } } System.out.println("all inferences"); LOG.info("Checking for consistency..."); System.out.println("Consistent? "+reasoner.isConsistent()); if (!reasoner.isConsistent()) { for (OWLClass c : reasoner.getUnsatisfiableClasses()) { System.out.println("UNSAT: "+owlpp.render(c)); } } LOG.info("Iterating through all classes..."); for (OWLObject obj : g.getAllOWLObjects()) { if (obj instanceof OWLClass) { OWLClass c = ((OWLClass) obj); // find all asserted parents in ontology and its import closure; // we do not want to re-assert Set<OWLClassExpression> assertedSuperclasses = OwlHelper.getSuperClasses(c, g.getSourceOntology().getImportsClosure()); //assertedSuperclasses.addAll(c.getEquivalentClasses(g.getSourceOntology().getImportsClosure())); //Set<OWLClass> eqCs = reasoner.getEquivalentClasses(c).getEntities(); for (OWLClass sup : reasoner.getSuperClasses(c, isDirect).getFlattened()) { if (assertedSuperclasses.contains(sup)) { continue; } if (sup.isOWLThing()) continue; System.out.println("INFERENCE: "+owlpp.render(obj)+" SubClassOf "+owlpp.render(sup)); if (isAssertImplied) { OWLSubClassOfAxiom sca = g.getDataFactory().getOWLSubClassOfAxiom(c, sup); g.getManager().addAxiom(g.getSourceOntology(), sca); } } for (OWLClass ec : reasoner.getEquivalentClasses(((OWLClassExpression) obj)).getEntities()) { if (!ec.equals(obj)) System.out.println("INFERENCE: "+owlpp.render(obj)+" EquivalentTo "+owlpp.render(ec)); if (isAssertImplied) { OWLEquivalentClassesAxiom eca = g.getDataFactory().getOWLEquivalentClassesAxiom(c, ec); g.getManager().addAxiom(g.getSourceOntology(), eca); } } } } } } else if (opts.nextEq("--stash-subclasses")) { opts.info("[-a][--prefix PREFIX][--ontology RECAP-ONTOLOGY-IRI", "removes all subclasses in current source ontology; after reasoning, try to re-infer these"); boolean isDefinedOnly = true; Set<String> prefixes = new HashSet<String>(); OWLOntology recapOnt = g.getSourceOntology(); while (opts.hasOpts()) { if (opts.nextEq("--prefix")) { prefixes.add(opts.nextOpt()); } else if (opts.nextEq("-a")) { isDefinedOnly = false; } else if (opts.nextEq("--ontology")) { IRI ontIRI = IRI.create(opts.nextOpt()); recapOnt = g.getManager().getOntology(ontIRI); if (recapOnt == null) { LOG.error("Cannot find ontology: "+ontIRI+" from "+g.getManager().getOntologies().size()); for (OWLOntology ont : g.getManager().getOntologies()) { LOG.error(" I have: "+ont.getOntologyID()); } for (OWLOntology ont : g.getSourceOntology().getImportsClosure()) { LOG.error(" IC: "+ont.getOntologyID()); } } } else { break; } } Set<OWLSubClassOfAxiom> allAxioms = recapOnt.getAxioms(AxiomType.SUBCLASS_OF); removedSubClassOfAxioms = new HashSet<OWLSubClassOfAxiom>(); System.out.println("Testing "+allAxioms.size()+" SubClassOf axioms for stashing. Prefixes: "+prefixes.size()); HashSet<RemoveAxiom> rmaxs = new HashSet<RemoveAxiom>(); for (OWLSubClassOfAxiom a : allAxioms) { OWLClassExpression subc = a.getSubClass(); if (!(subc instanceof OWLClass)) { continue; } OWLClassExpression supc = a.getSuperClass(); if (!(supc instanceof OWLClass)) { continue; } if (prefixes.size() > 0) { boolean skip = true; for (String p : prefixes) { if (((OWLClass) subc).getIRI().toString().startsWith(p)) { skip = false; break; } } if (skip) break; } if (isDefinedOnly) { // TODO - imports closure if (OwlHelper.getEquivalentClasses((OWLClass)subc, g.getSourceOntology()).isEmpty()) { continue; } if (OwlHelper.getEquivalentClasses((OWLClass)supc, g.getSourceOntology()).isEmpty()) { continue; } } // TODO: remove it from the ontology in which it's asserted RemoveAxiom rmax = new RemoveAxiom(recapOnt,a); LOG.debug("WILL_REMOVE: "+a); rmaxs.add(rmax); removedSubClassOfAxioms.add(g.getDataFactory().getOWLSubClassOfAxiom(a.getSubClass(), a.getSuperClass())); } System.out.println("Will remove "+rmaxs.size()+" axioms"); for (RemoveAxiom rmax : rmaxs) { g.getManager().applyChange(rmax); } } else if (opts.nextEq("--list-cycles")) { boolean failOnCycle = false; if (opts.nextEq("-f|--fail-on-cycle")) { failOnCycle = true; } OWLPrettyPrinter owlpp = getPrettyPrinter(); int n = 0; for (OWLObject x : g.getAllOWLObjects()) { for (OWLObject y : g.getAncestors(x)) { if (g.getAncestors(y).contains(x)) { if (y instanceof OWLClass) { for (OWLGraphEdge e : g.getEdgesBetween(x, y)) { System.out.println(owlpp.render(x) + " in-cycle-with "+owlpp.render(y)+" // via " + e.getQuantifiedPropertyList()); } } n++; } } } System.out.println("Number of cycles: "+n); if (n > 0 && failOnCycle) System.exit(1); } else if (opts.nextEq("-a|--ancestors")) { opts.info("LABEL", "list edges in graph closure to root nodes"); Set<OWLPropertyExpression> props = new HashSet<OWLPropertyExpression>(); boolean useProps = false; while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("PROP", "Add this property to the set of interest"); props.add(this.resolveObjectProperty(opts.nextOpt())); useProps = true; } else if (opts.nextEq("--plist")) { opts.info("PROPLIST", "Terminated by '//'. Add these properties to the set of interest"); Set<OWLObjectProperty> nprops = this.resolveObjectPropertyList(opts); props.addAll(nprops); useProps = true; } else { break; } } OWLObject obj = resolveEntity(opts); System.out.println(obj+ " "+obj.getClass()+" P:"+props); if (!useProps) props = null; Set<OWLGraphEdge> edges = g.getOutgoingEdgesClosureReflexive(obj,props); showEdges(edges); } else if (opts.nextEq("--ancestor-nodes")) { opts.info("LABEL", "list nodes in graph closure to root nodes"); Set<OWLPropertyExpression> props = new HashSet<OWLPropertyExpression>(); boolean useProps = false; while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("PROP", "Add this property to the set of interest"); props.add(this.resolveObjectProperty(opts.nextOpt())); useProps = true; } else if (opts.nextEq("--plist")) { opts.info("PROPLIST", "Terminated by '//'. Add these properties to the set of interest"); Set<OWLObjectProperty> nprops = this.resolveObjectPropertyList(opts); props.addAll(nprops); useProps = true; } else { break; } } OWLObject obj = resolveEntity(opts); System.out.println(obj+ " "+obj.getClass()+" P:"+props); if (!useProps) props = null; for (OWLObject a : g.getAncestors(obj, props)) System.out.println(a); } else if (opts.nextEq("--parents-named")) { opts.info("LABEL", "list direct outgoing edges to named classes"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getOutgoingEdges(obj); showEdges( edges); } else if (opts.nextEq("--parents")) { opts.info("LABEL", "list direct outgoing edges"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getPrimitiveOutgoingEdges(obj); showEdges( edges); } else if (opts.nextEq("--grandparents")) { opts.info("LABEL", "list direct outgoing edges and their direct outgoing edges"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getPrimitiveOutgoingEdges(obj); for (OWLGraphEdge e1 : edges) { System.out.println(e1); for (OWLGraphEdge e2 : g.getPrimitiveOutgoingEdges(e1.getTarget())) { System.out.println(" "+e2); } } } else if (opts.nextEq("--subsumers")) { opts.info("LABEL", "list named subsumers and subsuming expressions"); OWLObject obj = resolveEntity( opts); Set<OWLObject> ancs = g.getSubsumersFromClosure(obj); for (OWLObject a : ancs) { System.out.println(a); } } else if (opts.nextEq("--incoming-edges")) { opts.info("LABEL", "list edges in graph to leaf nodes"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getIncomingEdges(obj); showEdges( edges); } else if (opts.nextEq("--descendant-edges")) { opts.info("LABEL", "list edges in graph closure to leaf nodes"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getIncomingEdgesClosure(obj); showEdges( edges); } else if (opts.nextEq("--descendants")) { opts.info("LABEL", "show all descendant nodes"); OWLObject obj = resolveEntity( opts); OWLPrettyPrinter owlpp = getPrettyPrinter(); System.out.println("#" + obj+ " "+obj.getClass()+" "+owlpp.render(obj)); Set<OWLObject> ds = g.getDescendants(obj); for (OWLObject d : ds) System.out.println(d); } else if (opts.nextEq("--subsumed-by")) { opts.info("LABEL", "show all descendant nodes"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLObject> ds = g.queryDescendants((OWLClass)obj); for (OWLObject d : ds) System.out.println(d); } else if (opts.nextEq("-l") || opts.nextEq("--list-axioms")) { opts.info("LABEL", "lists all axioms for entity matching LABEL"); OWLObject obj = resolveEntity( opts); OWLPrettyPrinter owlpp = getPrettyPrinter(); owlpp.print("## Showing axiom for: "+obj); Set<OWLAxiom> axioms = g.getSourceOntology().getReferencingAxioms((OWLEntity) obj); owlpp.print(axioms); Set<OWLAnnotationAssertionAxiom> aaxioms = g.getSourceOntology().getAnnotationAssertionAxioms(((OWLNamedObject) obj).getIRI()); for (OWLAxiom a : aaxioms) { System.out.println(owlpp.render(a)); } } else if (opts.nextEq("--obsolete-class")) { opts.info("LABEL", "Add a deprecation axiom"); OWLObject obj = resolveEntity( opts); OWLPrettyPrinter owlpp = getPrettyPrinter(); owlpp.print("## Obsoleting: "+obj); Set<OWLAxiom> refAxioms = g.getSourceOntology().getReferencingAxioms((OWLEntity) obj); Set<OWLClassAxiom> axioms = g.getSourceOntology().getAxioms((OWLClass) obj, Imports.EXCLUDED); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLAxiom ax : axioms) { if (ax.isLogicalAxiom()) { rmAxioms.add(ax); System.out.println("REMOVING:"+owlpp.render(ax)); } } for (OWLAxiom ax : refAxioms) { if (ax.isLogicalAxiom() && !rmAxioms.contains(ax)) { System.err.println("UH-OH: "+ax); } } g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); System.err.println("TODO"); } else if (opts.nextEq("-d") || opts.nextEq("--draw")) { opts.info("[-o FILENAME] [-f FMT] LABEL/ID", "generates a file tmp.png made using QuickGO code"); String imgf = "tmp.png"; String fmt = "png"; while (opts.hasOpts()) { if (opts.nextEq("-o")) { opts.info("FILENAME", "name of png file to save (defaults to tmp.png)"); imgf = opts.nextOpt(); } else if (opts.nextEq("-f")) { opts.info("FMT", "image format. See ImageIO docs for a list. Default: png"); fmt = opts.nextOpt(); if (imgf.equals("tmp.png")) { imgf = "tmp."+fmt; } } else if (opts.nextEq("-p")) { OWLObjectProperty p = resolveObjectProperty(opts.nextOpt()); RelationConfig rc = gfxCfg.new RelationConfig(); rc.color = Color.MAGENTA; gfxCfg.relationConfigMap.put(p, rc); } else { break; } } OWLObject obj = resolveEntity( opts); System.out.println(obj); OWLGraphLayoutRenderer r = new OWLGraphLayoutRenderer(g); r.graphicsConfig = gfxCfg; r.addObject(obj); r.renderImage(fmt, new FileOutputStream(imgf)); //Set<OWLGraphEdge> edges = g.getOutgoingEdgesClosureReflexive(obj); //showEdges( edges); } else if (opts.nextEq("--draw-all")) { opts.info("", "draws ALL objects in the ontology (caution: small ontologies only)"); OWLGraphLayoutRenderer r = new OWLGraphLayoutRenderer(g); r.addAllObjects(); r.renderImage("png", new FileOutputStream("tmp.png")); } else if (opts.nextEq("--dump-node-attributes")) { opts.info("", "dumps all nodes attributes in CytoScape compliant format"); FileOutputStream fos; PrintStream stream = null; try { fos = new FileOutputStream(opts.nextOpt()); stream = new PrintStream(new BufferedOutputStream(fos)); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } stream.println("Label"); for (OWLObject obj : g.getAllOWLObjects()) { String label = g.getLabel(obj); if (label != null) stream.println(g.getIdentifier(obj)+"\t=\t"+label); } stream.close(); } else if (opts.nextEq("--dump-sif")) { opts.info("", "dumps CytoScape compliant sif format"); FileOutputStream fos; PrintStream stream = null; try { fos = new FileOutputStream(opts.nextOpt()); stream = new PrintStream(new BufferedOutputStream(fos)); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } for (OWLObject x : g.getAllOWLObjects()) { for (OWLGraphEdge e : g.getOutgoingEdges(x)) { OWLQuantifiedProperty qp = e.getSingleQuantifiedProperty(); String label; if (qp.getProperty() != null) label = qp.getProperty().toString(); else label = qp.getQuantifier().toString(); if (label != null) stream.println(g.getIdentifier(x)+"\t"+label+"\t"+g.getIdentifier(e.getTarget())); } } stream.close(); } else if (opts.nextEq("--sic|--slurp-import-closure")) { opts.info("[-d DIR] [-c CATALOG-OUT]","Saves local copy of import closure. Assumes sourceontology has imports"); String dir = "."; String catfile = "catalog-v001.xml"; while (opts.hasOpts()) { if (opts.nextEq("-d")) { dir = opts.nextOpt(); } else if (opts.nextEq("-c")) { catfile = opts.nextOpt(); } else { break; } } ImportClosureSlurper ics = new ImportClosureSlurper(g.getSourceOntology()); ics.save(dir, catfile); } else if (opts.nextEq("-o|--output")) { opts.info("[-f FORMAT] [--prefix PREFIX URI]* FILE", "writes source ontology."); OWLDocumentFormat ofmt = new RDFXMLDocumentFormat(); String ontURIStr = ""; if ( g.getSourceOntology().getOntologyID() != null && g.getSourceOntology().getOntologyID().getOntologyIRI().isPresent()) { ontURIStr = g.getSourceOntology().getOntologyID().getOntologyIRI().get().toString(); } while (opts.hasOpts()) { if (opts.nextEq("-f")) { opts.info("FORMAT", "omn OR ofn OR ttl OR owx OR ojs (experimental) OR obo (uses obooformat jar)"); String ofmtname = opts.nextOpt(); if (ofmtname.equals("manchester") || ofmtname.equals("omn")) { ofmt = new ManchesterSyntaxDocumentFormat(); } else if (ofmtname.equals("functional") || ofmtname.equals("ofn")) { ofmt = new FunctionalSyntaxDocumentFormat(); } else if (ofmtname.equals("turtle") || ofmtname.equals("ttl")) { ofmt = new TurtleDocumentFormat(); } else if (ofmtname.equals("trig")) { ofmt = new TrigDocumentFormat(); } else if (ofmtname.equals("xml") || ofmtname.equals("owx")) { ofmt = new OWLXMLDocumentFormat(); } else if (ofmtname.equals("ojs")) { ofmt = new OWLJSONFormat(); } else if (ofmtname.equals("jsonld")) { ofmt = new OWLJsonLDFormat(); } else if (ofmtname.equals("og") || ofmtname.equals("json")) { ofmt = new OWLOboGraphsFormat(); } else if (ofmtname.equals("oy") || ofmtname.equals("yaml")) { ofmt = new OWLOboGraphsYamlFormat(); } else if (ofmtname.equals("obo")) { if (opts.nextEq("-n|--no-check")) { pw.setCheckOboDoc(false); } ofmt = new OBODocumentFormat(); } } else if (opts.nextEq("--prefix")) { opts.info("PREFIX URIBASE","use PREFIX as prefix. Note: specify this sub-arg AFTER -f"); ofmt.asPrefixOWLOntologyFormat().setPrefix(opts.nextOpt(), opts.nextOpt()); } else { break; } } LOG.info("saving:"+ontURIStr+" using "+ofmt); if (opts.hasArgs()) { String outputFile = opts.nextOpt(); pw.saveOWL(g.getSourceOntology(), ofmt, outputFile); //pw.saveOWL(g.getSourceOntology(), opts.nextOpt()); } else { final String msg = "Missing output file for '-o' OR '--output' option. Output was not written to a file."; throw new OptionException(msg); } } else if (opts.nextEq("--filter-axioms")) { Set<AxiomType> types = new HashSet<AxiomType>(); while (opts.hasOpts()) { if (opts.nextEq("-t|--axiom-type")) { types.add( AxiomType.getAxiomType(opts.nextOpt()) ); } else { break; } } for (OWLOntology o : g.getSourceOntology().getImportsClosure()) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLAxiom ax : o.getAxioms()) { if (!types.contains(ax.getAxiomType())) { rmAxioms.add(ax); } } LOG.info("Removing axioms: "+rmAxioms.size()); g.getManager().removeAxioms(o, rmAxioms); } } else if (opts.nextEq("--remove-axioms")) { opts.info("-t Type", "Removes axioms of specified type. May be specified multiple times"); AxiomType t = null; while (opts.hasOpts()) { if (opts.nextEq("-t|--axiom-type")) { opts.info("Type", "OWLAPI type. E.g. DisjointClasses"); t = AxiomType.getAxiomType(opts.nextOpt()); } else { break; } } for (OWLOntology o : g.getSourceOntology().getImportsClosure()) { Set<OWLAxiom> axioms = o.getAxioms(t); LOG.info("Removing axioms: "+axioms.size()); g.getManager().removeAxioms(o, axioms); } } else if (opts.nextEq("--remove-axiom-annotations")) { opts.info("", "If an axiom has 1 or more annotations, replace with annotation-free version"); for (OWLAxiom a : g.getSourceOntology().getAxioms()) { Set<OWLAnnotation> anns = a.getAnnotations(); if (anns.size() > 0) { AxiomAnnotationTools.changeAxiomAnnotations(a, new HashSet<OWLAnnotation>(), g.getSourceOntology()); } } } else if (opts.nextEq("--make-super-slim")) { opts.info("IDSPACES", "removes all classes not in the superclass closure of any ontology in one of the idspaces." + " also assers superclasses"); boolean isTempReasoner = false; if (reasoner == null) { reasoner = this.createReasoner(g.getSourceOntology(), "elk", g.getManager()); isTempReasoner = true; } String idspacesStr = opts.nextOpt(); LOG.info("idsps = "+idspacesStr); String[] idarr = idspacesStr.split(","); Set<String> idspaces = new HashSet<String>(Arrays.asList(idarr)); LOG.info("idsps = "+idspaces); Set<OWLClass> cs = new HashSet<OWLClass>(); for (OWLClass c : g.getAllOWLClasses()) { String id = g.getIdentifier(c); String[] idparts = id.split(":"); String idspace = idparts[0]; if (idspaces.contains(idspace)) { cs.addAll(reasoner.getEquivalentClasses(c).getEntities()); cs.addAll(reasoner.getSuperClasses(c, false).getFlattened()); } } AssertInferenceTool.assertInferences(g, false, false, false, true, false, false, false, null, null); Mooncat m = new Mooncat(g); m.removeSubsetComplementClasses(cs, true); if (isTempReasoner) { reasoner.dispose(); } } else if (opts.nextEq("--split-ontology")) { opts.info("[-p IRI-PREFIX] [-s IRI-SUFFIX] [-d OUTDIR] [-l IDSPACE1 ... IDPSPACEn]", "Takes current only extracts all axioms in ID spaces and writes to separate ontology PRE+lc(IDSPACE)+SUFFIX saving to outdir. Also adds imports"); String prefix = g.getSourceOntology().getOntologyID().getOntologyIRI().get().toString().replace(".owl", "/"); String suffix = "_import.owl"; String outdir = "."; Set<String> idspaces = new HashSet<String>(); while (opts.hasOpts()) { if (opts.nextEq("-p|--prefix")) prefix = opts.nextOpt(); else if (opts.nextEq("-s|--suffix")) suffix = opts.nextOpt(); else if (opts.nextEq("-d|--dir")) outdir = opts.nextOpt(); else if (opts.nextEq("-l|--idspaces")) { idspaces.addAll(opts.nextList()); } else break; } Mooncat m = new Mooncat(g); for (String idspace : idspaces) { LOG.info("Removing "+idspace); String name = prefix + idspace + suffix; IRI iri = IRI.create(name); OWLOntology subOnt = g.getManager().createOntology(iri); m.transferAxiomsUsingIdSpace(idspace, subOnt); AddImport ai = new AddImport(g.getSourceOntology(), g.getDataFactory().getOWLImportsDeclaration(iri)); g.getManager().applyChange(ai); String path = outdir + "/" + name.replaceAll(".*/", ""); FileOutputStream stream = new FileOutputStream(new File(path)); g.getManager().saveOntology(subOnt, stream); } } else if (opts.nextEq("--remove-subset")) { opts.info("[-d] SUBSET", "Removes a subset (aka slim) from an ontology"); boolean isRemoveDangling = true; while (opts.hasOpts()) { if (opts.nextEq("-d|--keep-dangling")) { opts.info("", "if specified, dangling axioms (ie pointing to removed classes) are preserved"); isRemoveDangling = false; } else break; } String subset = opts.nextOpt(); Set<OWLClass> cset = g.getOWLClassesInSubset(subset); LOG.info("Removing "+cset.size()+" classes"); Mooncat m = new Mooncat(g); m.removeSubsetClasses(cset, isRemoveDangling); } else if (opts.nextEq("--remove-axioms-about")) { opts.info("[-d] IDSPACES", "Removes axioms that are about the specified ID space"); boolean isRemoveDangling = true; while (opts.hasOpts()) { if (opts.nextEq("-d|--keep-dangling")) { opts.info("", "if specified, dangling axioms (ie pointing to removed classes) are preserved"); isRemoveDangling = false; } else break; } String idspace = opts.nextOpt(); Mooncat m = new Mooncat(g); m.removeAxiomsAboutIdSpace(idspace, isRemoveDangling); } else if (opts.nextEq("--remove-classes-in-idspace")) { opts.info("[-d] [-s IDSPACE]", "Removes classes in an ID space from ontology"); String idspace = null; boolean isRemoveDangling = true; while (opts.hasOpts()) { if (opts.nextEq("-s|--idspace")) { opts.info("", "ID space"); idspace = opts.nextOpt(); } else if (opts.nextEq("-d|--keep-dangling")) { opts.info("", "if specified, dangling axioms (ie pointing to removed classes) are preserved"); isRemoveDangling = false; } else break; } if (idspace == null) idspace = opts.nextOpt(); String idspaceFinal = idspace + ":"; LOG.info("IDSPACE: "+idspaceFinal); Set<OWLClass> cset = g.getAllOWLClasses().stream().filter( c -> g.getIdentifier(c).startsWith(idspaceFinal) ).collect(Collectors.toSet()); LOG.info("Removing "+cset.size()+ " classes from "+g.getAllOWLClasses().size()); Mooncat m = new Mooncat(g); m.removeSubsetClasses(cset, isRemoveDangling); } else if (opts.nextEq("--extract-subset")) { opts.info("SUBSET", "Extract a subset (aka slim) from an ontology, storing subset in place of existing ontology"); String subset = opts.nextOpt(); Set<OWLClass> cset = g.getOWLClassesInSubset(subset); LOG.info("Removing "+cset.size()+" classes"); Mooncat m = new Mooncat(g); m.removeSubsetComplementClasses(cset, false); } else if (opts.nextEq("--translate-undeclared-to-classes")) { for (OWLAnnotationAssertionAxiom a : g.getSourceOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION)) { OWLAnnotationSubject sub = a.getSubject(); if (sub instanceof IRI) { OWLObject e = g.getOWLObject(((IRI)sub)); if (e == null) { OWLClass c = g.getDataFactory().getOWLClass((IRI)sub); OWLDeclarationAxiom ax = g.getDataFactory().getOWLDeclarationAxiom(c); g.getManager().addAxiom(g.getSourceOntology(), ax); } } } } else if (opts.nextEq("--show-metadata")) { OntologyMetadata omd = new OntologyMetadata(); omd.generate(g); } else if (opts.nextEq("--follow-subclass")) { opts.info("", "follow subclass axioms (and also equivalence axioms) in graph traversal.\n"+ " default is to follow ALL. if this is specified then only explicitly specified edges followed"); if (g.getConfig().graphEdgeIncludeSet == null) g.getConfig().graphEdgeIncludeSet = new HashSet<OWLQuantifiedProperty>(); g.getConfig().graphEdgeIncludeSet.add(new OWLQuantifiedProperty(Quantifier.SUBCLASS_OF)); } else if (opts.nextEq("--follow-property")) { opts.info("PROP-LABEL", "follow object properties of this type in graph traversal.\n"+ " default is to follow ALL. if this is specified then only explicitly specified edges followed"); OWLObjectProperty p = (OWLObjectProperty) resolveEntity( opts); if (g.getConfig().graphEdgeIncludeSet == null) g.getConfig().graphEdgeIncludeSet = new HashSet<OWLQuantifiedProperty>(); g.getConfig().graphEdgeIncludeSet.add(new OWLQuantifiedProperty(p, null)); } else if (opts.nextEq("--exclude-property")) { opts.info("PROP-LABEL", "exclude object properties of this type in graph traversal.\n"+ " default is to exclude NONE."); OWLObjectProperty p = g.getOWLObjectProperty(opts.nextOpt()); System.out.println("Excluding "+p+" "+p.getClass()); if (g.getConfig().graphEdgeExcludeSet == null) g.getConfig().graphEdgeExcludeSet = new HashSet<OWLQuantifiedProperty>(); g.getConfig().graphEdgeExcludeSet.add(new OWLQuantifiedProperty(p, null)); } else if (opts.nextEq("--exclusion-annotation-property")) { opts.info("[-o ONT] PROP-LABEL", "exclude object properties of this type in graph traversal.\n"+ " default is to exclude NONE."); OWLOntology xo = g.getSourceOntology(); if (opts.hasOpts()) { if (opts.nextEq("-o")) { xo = pw.parse(opts.nextOpt()); } else break; } OWLAnnotationProperty ap = (OWLAnnotationProperty) g.getOWLObjectByLabel(opts.nextOpt()); g.getConfig().excludeAllWith(ap, xo); } else if (opts.nextEq("--inclusion-annotation-property")) { opts.info("[-o ONT] PROP-LABEL", "include object properties of this type in graph traversal.\n"+ " default is to include NONE."); OWLOntology xo = g.getSourceOntology(); if (opts.hasOpts()) { if (opts.nextEq("-o")) { xo = pw.parse(opts.nextOpt()); } else break; } OWLAnnotationProperty ap = (OWLAnnotationProperty) g.getOWLObjectByLabel(opts.nextOpt()); g.getConfig().includeAllWith(ap, xo); } else if (opts.nextEq("--exclude-metaclass")) { opts.info("METACLASS-LABEL", "exclude classes of this type in graph traversal.\n"+ " default is to follow ALL classes"); OWLClass c = (OWLClass) resolveEntity( opts); g.getConfig().excludeMetaClass = c; } else if (opts.nextEq("--label-abox")) { opts.info("[-e]", "Auto-add labels for individuals, using class label"); boolean isUseEntailed = false; boolean isOverwrite = false; while (opts.hasOpts()) { if (opts.nextEq("-e|--entailed")) { isUseEntailed = true; } if (opts.nextEq("-w|--overwrite")) { isOverwrite = true; } else { break; } } for (OWLNamedIndividual i : g.getSourceOntology().getIndividualsInSignature(true)) { if (g.getLabel(i) != null && !isOverwrite) { continue; } TransformationUtils.addLabel(i, g, reasoner); } } else if (opts.nextEq("--create-abox-subset")) { opts.info("CLASS", "Remove all ClassAssertions where the CE is not a subclass of the specified class"); OWLClass c = this.resolveClass(opts.nextOpt()); LOG.info("SUBSET: "+c); //Set<OWLNamedIndividual> inds = g.getSourceOntology().getIndividualsInSignature(true); Set<OWLClassAssertionAxiom> caas = g.getSourceOntology().getAxioms(AxiomType.CLASS_ASSERTION); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLClassAssertionAxiom a : caas) { Set<OWLClass> sups = reasoner.getSuperClasses(a.getClassExpression(), false).getFlattened(); if (!sups.contains(c)) { rmAxioms.add(a); } } LOG.info("Removing: "+rmAxioms.size() + " / "+caas.size()); g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); reasoner.flush(); } else if (opts.nextEq("--load-instances")) { TableToAxiomConverter ttac = new TableToAxiomConverter(g); ttac.config.axiomType = AxiomType.CLASS_ASSERTION; ttac.config.isSwitchSubjectObject = true; while (opts.hasOpts()) { if (opts.nextEq("-p|--property")) { ttac.config.property = ((OWLNamedObject) resolveObjectProperty( opts.nextOpt())).getIRI(); } else { break; } } String f = opts.nextOpt(); System.out.println("tabfile: "+f); ttac.parse(f); } else if (opts.nextEq("--load-labels")) { TableToAxiomConverter ttac = new TableToAxiomConverter(g); ttac.config.setPropertyToLabel(); ttac.config.axiomType = AxiomType.ANNOTATION_ASSERTION; String f = opts.nextOpt(); ttac.parse(f); } else if (opts.nextEq("--add-labels")) { Set<Integer> colsToLabel = new HashSet<Integer>(); while (opts.hasOpts()) { if (opts.nextEq("-c|--column")) { opts.info("COLNUMS", "number of col to label (starting from 1). Can be comma-separated list"); String v = opts.nextOpt(); for (String cn : v.split(",")) { colsToLabel.add(Integer.valueOf(cn)-1); } } else { break; } } LOG.info("Labeling: "+colsToLabel); File f = opts.nextFile(); List<String> lines = FileUtils.readLines(f); for (String line : lines) { String[] vals = line.split("\\t"); for (int i=0; i<vals.length; i++) { if (i>0) System.out.print("\t"); System.out.print(vals[i]); if (colsToLabel.contains(i)) { String label = "NULL"; String v = vals[i]; if (v != null && !v.equals("") && !v.contains(" ")) { OWLObject obj = g.getOWLObjectByIdentifier(v); if (obj != null) { label = g.getLabel(obj); } } System.out.print("\t"+label); } } System.out.println(); } } else if (opts.nextEq("--parse-tsv")) { opts.info("[-s] [-l] [--comment] [-m] [-p PROPERTY] [-a AXIOMTYPE] [-t INDIVIDUALSTYPE] FILE", "parses a tabular file to OWL axioms"); TableToAxiomConverter ttac = new TableToAxiomConverter(g); ttac.config.axiomType = AxiomType.CLASS_ASSERTION; while (opts.hasOpts()) { if (opts.nextEq("-s|--switch")) { opts.info("", "switch subject and object"); ttac.config.isSwitchSubjectObject = true; } else if (opts.nextEq("-l|--label")) { ttac.config.setPropertyToLabel(); ttac.config.axiomType = AxiomType.ANNOTATION_ASSERTION; } else if (opts.nextEq("--comment")) { ttac.config.setPropertyToComment(); ttac.config.axiomType = AxiomType.ANNOTATION_ASSERTION; } else if (opts.nextEq("-m|--map-xrefs")) { ttac.buildClassMap(g); } else if (opts.nextEq("-p|--prop")) { ttac.config.property = ((OWLNamedObject) resolveObjectProperty( opts.nextOpt())).getIRI(); //ttac.config.property = g.getOWLObjectProperty().getIRI(); } else if (opts.nextEq("--default1")) { ttac.config.defaultCol1 = opts.nextOpt(); } else if (opts.nextEq("--default2")) { ttac.config.defaultCol2 = opts.nextOpt(); } else if (opts.nextEq("--object-non-literal")) { ttac.config.isObjectLiteral = false; } else if (opts.nextEq("--iri-prefix")) { int col = 0; String x = opts.nextOpt(); if (x.equals("1") || x.startsWith("s")) { col = 1; } else if (x.equals("2") || x.startsWith("o")) { col = 2; } else { // } String pfx = opts.nextOpt(); // note that we do not put the full URI prefix here for now //if (!pfx.startsWith("http:")) // pfx = "http://purl.obolibrary.org/obo/" + pfx + "_"; if (pfx.startsWith("http:")) ttac.config.iriPrefixMap.put(col, pfx); else ttac.config.iriPrefixMap.put(col, pfx+":"); } else if (opts.nextEq("-a|--axiom-type")) { ttac.config.setAxiomType(opts.nextOpt()); } else if (opts.nextEq("-t|--individuals-type")) { System.out.println("setting types"); ttac.config.individualsType = resolveClass( opts.nextOpt()); } else { throw new OptionException(opts.nextOpt()); } } String f = opts.nextOpt(); System.out.println("tabfile: "+f); ttac.parse(f); } else if (opts.nextEq("--parse-stanzas")) { opts.info("[-m KEY PROPERTY]* [-s] FILE", "parses a tabular file to OWL axioms"); StanzaToOWLConverter sc = new StanzaToOWLConverter(g); while (opts.hasOpts()) { if (opts.nextEq("-m|--map")) { String k = opts.nextOpt(); StanzaToOWLConverter.Mapping m = sc.new Mapping(); String p = opts.nextOpt(); m.property = this.resolveObjectProperty(p); // TODO - allow other types sc.config.keyMap.put(k, m); } else if (opts.nextEq("-s|--strict")) { opts.info("", "set if to be run in strict mode"); sc.config.isStrict = true; } else if (opts.nextEq("--prefix")) { sc.config.defaultPrefix = opts.nextOpt(); } else { continue; } } String f = opts.nextOpt(); System.out.println("tabfile: "+f); sc.parse(f); } else if (opts.nextEq("--idmap-extract-pairs")) { opts.info("IDType1 IDType2 PIRMapFile", "extracts pairs from mapping file"); IDMappingPIRParser p = new IDMappingPIRParser(); IDMapPairWriter h = new IDMapPairWriter(); h.setPair(opts.nextOpt(), opts.nextOpt()); p.handler = h; p.parse(new File(opts.nextOpt())); } else if (opts.nextEq("--parser-idmap")) { opts.info("UniProtIDMapFile", "..."); UniProtIDMapParser p = new UniProtIDMapParser(); p.parse(new File(opts.nextOpt())); System.out.println("Types:"+p.idMap.size()); // TODO... } else if (opts.nextEq("--extract-ontology-subset")) { opts.info("[-i FILE][-u IRI][-s SUBSET][--fill-gaps]", "performs slimdown using IDs from FILE or from named subset"); IRI subOntIRI = IRI.create("http://purl.obolibrary.org/obo/"+g.getOntologyId()+"-subset"); String fileName = null; String subset = null; boolean isFillGaps = false; boolean isSpanGaps = true; while (opts.hasOpts()) { if (opts.nextEq("-u|--uri|--iri")) { subOntIRI = IRI.create(opts.nextOpt()); } else if (opts.nextEq("-i|--input-file")) { fileName = opts.nextOpt(); } else if (opts.nextEq("-s|--subset")) { subset = opts.nextOpt(); } else if (opts.nextEq("--fill-gaps")) { isFillGaps = true; isSpanGaps = false; } else if (opts.nextEq("--minimal")) { isFillGaps = false; isSpanGaps = false; } else { break; } } Mooncat m = new Mooncat(g); Set<OWLClass> cs = new HashSet<OWLClass>(); if (fileName != null) { LOG.info("Reading IDs from: "+fileName); Set<String> unmatchedIds = new HashSet<String>(); for (String line : FileUtils.readLines(new File(fileName))) { OWLClass c = g.getOWLClassByIdentifierNoAltIds(line); if (c == null) { unmatchedIds.add(line); continue; } cs.add(c); } LOG.info("# IDs = "+cs.size()); if (unmatchedIds.size() > 0) { LOG.error(fileName+" contains "+unmatchedIds.size()+" unmatched IDs"); for (String id : unmatchedIds) { LOG.error("UNMATCHED: "+id); } } } if (subset != null) { LOG.info("Adding IDs from "+subset); cs.addAll(g.getOWLClassesInSubset(subset)); } if (cs.size() == 0) { LOG.warn("EMPTY SUBSET"); } // todo LOG.info("Making subset ontology seeded from "+cs.size()+" classes"); g.setSourceOntology(m.makeMinimalSubsetOntology(cs, subOntIRI, isFillGaps, isSpanGaps)); LOG.info("Made subset ontology; # classes = "+cs.size()); } else if (opts.nextEq("--extract-module")) { opts.info("[-n IRI] [-d] [-s SOURCE-ONTOLOGY] [-c] [-m MODULE-TYPE] SEED-OBJECTS", "Uses the OWLAPI module extractor"); String modIRI = null; ModuleType mtype = ModuleType.BOT; boolean isTraverseDown = false; boolean isMerge = false; OWLOntology baseOnt = g.getSourceOntology(); IRI dcSource = null; while (opts.hasOpts()) { if (opts.nextEq("-n")) { modIRI = opts.nextOpt(); } else if (opts.nextEq("-d")) { opts.info("", "Is set, will traverse down class hierarchy to form seed set"); isTraverseDown = true; } else if (opts.nextEq("-c|--merge")) { opts.info("", "Is set, do not use a command-line specified seed object list - use the source ontology as list of seeds"); isMerge = true; } else if (opts.nextEq("-s|--source")) { String srcName = opts.nextOpt(); baseOnt = g.getManager().getOntology(IRI.create(srcName)); if (baseOnt == null) { LOG.error("Could not find specified ontology "+srcName+" for --source"); } } else if (opts.nextEq("-m") || opts.nextEq("--module-type")) { opts.info("MODULE-TYPE", "One of: STAR, TOP, BOT (default)"); mtype = ModuleType.valueOf(opts.nextOpt()); } else { break; } } Set<OWLObject> objs = new HashSet<OWLObject>(); if (isMerge) { // add all relations and classes to seed set // merge support set closure objs.addAll( g.getSourceOntology().getObjectPropertiesInSignature() ); objs.addAll( g.getSourceOntology().getClassesInSignature() ); for (OWLOntology ont : g.getSupportOntologySet()) g.mergeOntology(ont); g.setSupportOntologySet(new HashSet<OWLOntology>()); } else { objs = this.resolveEntityList(opts); } LOG.info("OBJS: "+objs.size()); Set<OWLEntity> seedSig = new HashSet<OWLEntity>(); if (isTraverseDown) { OWLReasoner mr = this.createReasoner(baseOnt, reasonerName, g.getManager()); try { for (OWLObject obj : objs) { if (obj instanceof OWLClassExpression) { seedSig.addAll(mr.getSubClasses((OWLClassExpression) obj, false).getFlattened()); } else if (obj instanceof OWLObjectPropertyExpression) { for (OWLObjectPropertyExpression pe : mr.getSubObjectProperties((OWLObjectPropertyExpression) obj, false).getFlattened()) { if (pe instanceof OWLObjectProperty) { seedSig.add((OWLObjectProperty) pe); } } } } } finally { mr.dispose(); } } SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(g.getManager(), baseOnt, mtype); for (OWLObject obj : objs) { if (obj instanceof OWLEntity) { seedSig.add((OWLEntity) obj); } } Set<OWLAxiom> modAxioms = sme.extract(seedSig); OWLOntology modOnt; if (modIRI == null) { modOnt = g.getManager().createOntology(); } else { modOnt = g.getManager().createOntology(IRI.create(modIRI)); } if (dcSource == null) { OWLOntologyID oid = baseOnt.getOntologyID(); Optional<IRI> versionIRI = oid.getVersionIRI(); if (versionIRI.isPresent()) { dcSource = versionIRI.get(); } else { Optional<IRI> ontologyIRI = oid.getOntologyIRI(); if (ontologyIRI.isPresent()) { dcSource = ontologyIRI.get(); } } } g.getManager().addAxioms(modOnt, modAxioms); g.setSourceOntology(modOnt); if (dcSource != null) { LOG.info("Setting source: "+dcSource); OWLAnnotation ann = g.getDataFactory().getOWLAnnotation(g.getDataFactory().getOWLAnnotationProperty( IRI.create("http://purl.org/dc/elements/1.1/source")), dcSource); AddOntologyAnnotation addAnn = new AddOntologyAnnotation(g.getSourceOntology(), ann); g.getManager().applyChange(addAnn); } } else if (opts.nextEq("--translate-disjoint-to-equivalent|--translate-disjoints-to-equivalents")) { opts.info("", "adds (Xi and Xj = Nothing) for every DisjointClasses(X1...Xn) where i<j<n"); Mooncat m = new Mooncat(g); m.translateDisjointsToEquivalents(); } else if (opts.nextEq("--build-property-view-ontology|--bpvo")) { opts.info("[-p PROPERTY] [-o OUTFILE] [-r REASONER] [--filter-unused] [--prefix STR] [--suffix STR] [--avfile FILE] [--i2c]", "generates a new ontology O' from O using property P such that for each C in O, O' contains C' = P some C"); OWLOntology sourceOntol = g.getSourceOntology(); // TODO - for now assume exactly 0 or 1 support ontology; if 1, the support is the element ontology OWLOntology annotOntol; if (g.getSupportOntologySet().size() == 1) annotOntol = g.getSupportOntologySet().iterator().next(); else if (g.getSupportOntologySet().size() == 0) annotOntol = g.getManager().createOntology(); else throw new OptionException("must have zero or one support ontologies"); OWLObjectProperty viewProperty = null; String outFile = null; String suffix = null; String prefix = null; boolean isFilterUnused = false; boolean isReplace = false; boolean noReasoner = false; boolean isCreateReflexiveClasses = false; String avFile = null; String viewIRI = "http://example.org/"; while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("PROPERTY-ID-OR-LABEL", "The ObjectProperty P that is used to build the view"); viewProperty = resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-r")) { opts.info("REASONERNAME", "e.g. elk"); reasonerName = opts.nextOpt(); } else if (opts.nextEq("--no-reasoner|nr")) { opts.info("", "do not build an inferred view ontology"); noReasoner = true; } else if (opts.nextEq("--prefix")) { opts.info("STR", "each class in O(P) will have this prefix in its label"); prefix = opts.nextOpt(); } else if (opts.nextEq("--suffix")) { opts.info("STR", "each class in O(P) will have this suffix in its label"); suffix = opts.nextOpt(); } else if (opts.nextEq("-o")) { opts.info("FILE", "file to save O(P)' [i.e. reasoned view ontology] into"); outFile = opts.nextOpt(); } else if (opts.nextEq("--view-iri")) { opts.info("IRI", "IRI for the view ontology"); viewIRI = opts.nextOpt(); } else if (opts.nextEq("--avfile")) { opts.info("FILE", "file to save O(P) [i.e. non-reasoner view ontology] into"); avFile = opts.nextOpt(); } else if (opts.nextEq("--filter-unused")) { opts.info("", "if set, any class or individual that is not subsumed by P some Thing is removed from O(P)"); isFilterUnused = true; } else if (opts.nextEq("--reflexive")) { opts.info("", "Treat property as reflexive"); isCreateReflexiveClasses = true; } else if (opts.nextEq("--replace")) { opts.info("", "if set, the source ontology is replaced with O(P)'"); isReplace = true; } else if (opts.nextEq("" + "")) { annotOntol = g.getSourceOntology(); } else break; } PropertyViewOntologyBuilder pvob = new PropertyViewOntologyBuilder(sourceOntol, annotOntol, viewProperty); pvob.setViewLabelPrefix(prefix); pvob.setViewLabelSuffix(suffix); pvob.buildViewOntology(IRI.create("http://x.org/assertedViewOntology"), IRI.create(viewIRI)); pvob.setFilterUnused(isFilterUnused); pvob.setCreateReflexiveClasses(isCreateReflexiveClasses); OWLOntology avo = pvob.getAssertedViewOntology(); if (avFile != null) pw.saveOWL(avo, avFile); if (noReasoner) { pvob.setInferredViewOntology(pvob.getAssertedViewOntology()); } else { OWLReasoner vr = createReasoner(avo, reasonerName, g.getManager()); pvob.buildInferredViewOntology(vr); vr.dispose(); } // save if (outFile != null) pw.saveOWL(pvob.getInferredViewOntology(), outFile); else if (isReplace) { g.setSourceOntology(pvob.getInferredViewOntology()); } else { g.addSupportOntology(pvob.getInferredViewOntology()); } } else if (opts.nextEq("--materialize-property-inferences|--mpi")) { opts.info("[-p [-r] PROPERTY]... [-m|--merge]", "reasoned property view. Alternative to --bpvo"); // TODO - incorporate this into sparql query Set<OWLObjectProperty> vps = new HashSet<OWLObjectProperty>(); Set<OWLObjectProperty> reflexiveVps = new HashSet<OWLObjectProperty>(); boolean isMerge = false; boolean isPrereason = true; while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("[-r] PROPERTY-ID-OR-LABEL", "The ObjectProperty P that is used to build the view. If -r is specified the view is reflexive"); boolean isReflexive = false; if (opts.nextEq("-r|--reflexive")) isReflexive = true; String s = opts.nextOpt(); OWLObjectProperty viewProperty = resolveObjectProperty(s); if (viewProperty == null) { // the method resolveObjectProperty, will log already a error // escalate to an exception throw new IOException("Could not find an OWLObjectProperty for string: "+s); } vps.add(viewProperty); if (isReflexive) reflexiveVps.add(viewProperty); } else if (opts.nextEq("--merge|-m")) { isMerge = true; } else if (opts.nextEq("--no-assert-inferences|-n")) { isPrereason = false; } else { break; } } if (!isPrereason && !isMerge) { LOG.warn("ontology will be empty!"); } OWLOntology baseOntology = g.getSourceOntology(); OWLOntology vOnt = g.getManager().createOntology(); if (!isMerge) { // make the source ontology the new view g.setSourceOntology(vOnt); } Set<OWLClass> allvcs = new HashSet<OWLClass>(); for (OWLObjectProperty vp : vps) { PropertyViewOntologyBuilder pvob = new PropertyViewOntologyBuilder(baseOntology, vp); if (reflexiveVps.contains(vp)) pvob.setCreateReflexiveClasses(true); pvob.buildViewOntology(); OWLOntology avo = pvob.getAssertedViewOntology(); Set<OWLClass> vcs = avo.getClassesInSignature(); LOG.info("view for "+vp+" num view classes: "+vcs.size()); allvcs.addAll(vcs); g.mergeOntology(avo); // todo - more sophisticated } if (isPrereason) { if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); LOG.info("created reasoner: "+reasoner); } for (OWLClass c : g.getSourceOntology().getClassesInSignature(Imports.INCLUDED)) { Set<OWLClass> scs = reasoner.getSuperClasses(c, false).getFlattened(); for (OWLClass sc : scs) { OWLSubClassOfAxiom sca = g.getDataFactory().getOWLSubClassOfAxiom(c, sc); g.getManager().addAxiom(vOnt, sca); } // inferred (named classes) plus asserted (include class expressions) Set<OWLClassExpression> ecs = OwlHelper.getEquivalentClasses(c, g.getSourceOntology()); ecs.addAll(reasoner.getEquivalentClasses(c).getEntities()); for (OWLClassExpression ec : ecs) { if (ec.equals(c)) continue; OWLEquivalentClassesAxiom eca = g.getDataFactory().getOWLEquivalentClassesAxiom(c, ec); g.getManager().addAxiom(vOnt, eca); // bidirectional subclass axioms for each equivalent pair OWLSubClassOfAxiom sca1 = g.getDataFactory().getOWLSubClassOfAxiom(c, ec); g.getManager().addAxiom(vOnt, sca1); OWLSubClassOfAxiom sca2 = g.getDataFactory().getOWLSubClassOfAxiom(ec, c); g.getManager().addAxiom(vOnt, sca2); } } } else { } // TODO - turn allvcs into bnodes if (isMerge) { g.mergeOntology(vOnt); } else { g.setSourceOntology(vOnt); } } else if (opts.nextEq("--materialize-existentials")) { opts.info("[-p PROP][-l PROPLIST]", "builds view ontology with existentials named"); Set<OWLObjectSomeValuesFrom> svfs = new HashSet<OWLObjectSomeValuesFrom>(); Set<OWLObjectProperty> props = new HashSet<OWLObjectProperty>(); while (opts.hasOpts()) { if (opts.nextEq("-p")) { props.add(this.resolveObjectProperty(opts.nextOpt())); } else if (opts.nextEq("-l|--list")) { props.addAll(this.resolveObjectPropertyList(opts)); } else { break; } } LOG.info("Materializing: "+props); OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); for (OWLOntology ont : g.getAllOntologies()) { for (OWLAxiom ax : ont.getAxioms()) { if (ax instanceof OWLSubClassOfAxiom) { OWLClassExpression supc = ((OWLSubClassOfAxiom)ax).getSuperClass(); if (supc instanceof OWLObjectSomeValuesFrom) { svfs.add((OWLObjectSomeValuesFrom) supc); } } else if (ax instanceof OWLEquivalentClassesAxiom) { for (OWLClassExpression x : ((OWLEquivalentClassesAxiom)ax).getClassExpressions()) { if (x instanceof OWLObjectIntersectionOf) { for (OWLClassExpression y : ((OWLObjectIntersectionOf)x).getOperands()) { if (y instanceof OWLObjectSomeValuesFrom) { svfs.add((OWLObjectSomeValuesFrom) y); } } } } } } } Set<OWLAxiom> newAxioms = new HashSet<OWLAxiom>(); OWLDataFactory df = g.getDataFactory(); for (OWLObjectSomeValuesFrom svf : svfs) { if (svf.getFiller().isAnonymous()) continue; if (svf.getProperty().isAnonymous()) continue; OWLObjectProperty p = (OWLObjectProperty) svf.getProperty(); if (!props.contains(p)) continue; OWLClass c = (OWLClass) svf.getFiller(); PropertyViewOntologyBuilder pvob = new PropertyViewOntologyBuilder(g.getSourceOntology(), p); IRI xIRI = pvob.makeViewClassIRI(c.getIRI(), p.getIRI(), "-"); String label = "Reflexive "+ g.getLabel(p) + " " + g.getLabel(c); OWLClass xc = df.getOWLClass(xIRI); newAxioms.add(df.getOWLEquivalentClassesAxiom(xc, svf)); newAxioms.add(df.getOWLSubClassOfAxiom(c, xc)); newAxioms.add(df.getOWLAnnotationAssertionAxiom(df.getRDFSLabel(), xIRI, df.getOWLLiteral(label))); } LOG.info("Adding "+newAxioms.size()+ " axioms"); g.getManager().addAxioms(g.getSourceOntology(), newAxioms); } else if (opts.nextEq("--report-profile")) { g.getProfiler().report(); } else if (opts.nextEq("--no-cache")) { g.getConfig().isCacheClosure = false; } else if (opts.nextEq("--repeat")) { List<String> ops = new ArrayList<String>(); while (opts.hasArgs()) { if (opts.nextEq("--end")) { break; } else { String op = opts.nextOpt(); ops.add(op); } } // TODO } else if (opts.nextEq("--start-server")) { int port = 9000; while (opts.hasOpts()) { if (opts.nextEq("-p")) { port = Integer.parseInt(opts.nextOpt()); } else { break; } } Server server = new Server(port); server.setHandler(new OWLServer(g)); try { server.start(); server.join(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } else if (opts.nextEq("--create-ontology")) { opts.info("ONT-IRI", "creates a new OWLOntology and makes it the source ontology"); g = new OWLGraphWrapper(opts.nextOpt()); } else if (opts.nextEq("--parse-obo")) { String f = opts.nextOpt(); OWLOntology ont = pw.parseOBO(f); if (g == null) g = new OWLGraphWrapper(ont); else { System.out.println("adding support ont "+ont); g.addSupportOntology(ont); System.out.println("Added support ont"); } } else if (opts.nextEq("--load-ontologies-as-imports")) { opts.info("[ONT]+", "loads and adds the specified ontologies as imports"); List<String> ontologyList = opts.nextList(); if (ontologyList == null || ontologyList.isEmpty()) { LOG.error("No ontologies specified for the command. At least one ontology is required."); exit(-1); } // create a new empty ontology if there is no previous graph final OWLOntologyManager m; final OWLOntology containerOntology; if (g == null) { m = pw.getManager(); containerOntology = m.createOntology(IRI.generateDocumentIRI()); g = new OWLGraphWrapper(containerOntology); } else { m = g.getManager(); containerOntology = g.getSourceOntology(); } final OWLDataFactory factory = m.getOWLDataFactory(); for(String ont : ontologyList) { // load ontology OWLOntology owlOntology = pw.parse(ont); // check for usable ontology ID and ontology IRI OWLOntologyID ontologyID = owlOntology.getOntologyID(); if (ontologyID == null) { LOG.error("The ontology: "+ont+" does not have a valid ontology ID"); exit(-1); } else { Optional<IRI> documentIRI = ontologyID.getDefaultDocumentIRI(); if (documentIRI.isPresent() == false) { LOG.error("The ontology: "+ont+" does not have a valid document IRI"); exit(-1); }else { // add as import, instead of merge OWLImportsDeclaration importDeclaration = factory.getOWLImportsDeclaration(documentIRI.get()); OWLOntologyChange change = new AddImport(containerOntology, importDeclaration); m.applyChange(change); } } } } else { // check first if there is a matching annotated method // always check, to support introspection via '-h' boolean called = false; Method[] methods = getClass().getMethods(); for (Method method : methods) { CLIMethod cliMethod = method.getAnnotation(CLIMethod.class); if (cliMethod !=null) { if (opts.nextEq(cliMethod.value())) { called = true; try { method.invoke(this, opts); } catch (InvocationTargetException e) { // the underlying method has throw an exception Throwable cause = e.getCause(); if (cause instanceof Exception) { throw ((Exception) cause); } throw e; } } } } if (called) { continue; } if (opts.hasArgs()) { // Default is to treat argument as an ontology String f = opts.nextOpt(); try { OWLOntology ont = null; if (f.endsWith("obo")) { ont = pw.parseOBO(f); } else { ont = pw.parse(f); } if (g == null) { g = new OWLGraphWrapper(ont); } else { System.out.println("adding support ont "+ont); g.addSupportOntology(ont); } } catch (Exception e) { LOG.error("could not parse:"+f, e); if (exitOnException) { exit(1); } else { throw e; } } } else { if (opts.isHelpMode()) { helpFooter(); // should only reach here in help mode } } } } } private String owlpp(OWLClass c) { // TODO Auto-generated method stub return null; } static Set<OWLAxiom> traceAxioms(Set<OWLAxiom> axioms, OWLGraphWrapper g, OWLDataFactory df) { final OWLAnnotationProperty p = df.getOWLAnnotationProperty(IRI.create("http://trace.module/source-ont")); final Set<OWLOntology> ontologies = g.getSourceOntology().getImportsClosure(); final Set<OWLAxiom> traced = new HashSet<OWLAxiom>(); for (OWLAxiom axiom : axioms) { Set<OWLOntology> hits = new HashSet<OWLOntology>(); for(OWLOntology ont : ontologies) { if (ont.containsAxiom(axiom)) { hits.add(ont); } } if (hits.isEmpty()) { traced.add(axiom); } else { Set<OWLAnnotation> annotations = new HashSet<OWLAnnotation>(axiom.getAnnotations()); for (OWLOntology hit : hits) { Optional<IRI> hitIRI = hit.getOntologyID().getOntologyIRI(); if(hitIRI.isPresent()) { annotations.add(df.getOWLAnnotation(p, hitIRI.get())); } } traced.add(AxiomAnnotationTools.changeAxiomAnnotations(axiom, annotations, df)); } } return traced; } private Set<OWLClass> removeUnreachableAxioms(OWLOntology src, Set<OWLClass> seedClasses) { Stack<OWLClass> stack = new Stack<OWLClass>(); stack.addAll(seedClasses); Set<OWLClass> visited = new HashSet<OWLClass>(); visited.addAll(stack); while (!stack.isEmpty()) { OWLClass elt = stack.pop(); Set<OWLClass> parents = new HashSet<OWLClass>(); Set<OWLClassExpression> xparents = OwlHelper.getSuperClasses(elt, src); xparents.addAll(OwlHelper.getEquivalentClasses(elt, src)); for (OWLClassExpression x : xparents) { parents.addAll(x.getClassesInSignature()); } //parents.addAll(getReasoner().getSuperClasses(elt, true).getFlattened()); //parents.addAll(getReasoner().getEquivalentClasses(elt).getEntities()); parents.removeAll(visited); stack.addAll(parents); visited.addAll(parents); } LOG.info("# in closure set to keep: "+visited.size()); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLClass c : src.getClassesInSignature()) { if (!visited.contains(c)) { //LOG.info("removing axioms for EL-unreachable class: "+c); rmAxioms.addAll(src.getAxioms(c, Imports.EXCLUDED)); rmAxioms.add(src.getOWLOntologyManager().getOWLDataFactory().getOWLDeclarationAxiom(c)); } } src.getOWLOntologyManager().removeAxioms(src, rmAxioms); LOG.info("Removed "+rmAxioms.size()+" axioms. Remaining: "+src.getAxiomCount()); return visited; } private void removeAxiomsReferencingDeprecatedClasses(Set<OWLAxiom> axioms) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLAxiom axiom : axioms) { for (OWLClass c : axiom.getClassesInSignature()) { if (g.isObsolete(c)) { rmAxioms.add(axiom); break; } } } axioms.removeAll(rmAxioms); } @CLIMethod("--external-mappings-files") public void createExternalMappings(Opts opts) throws Exception { if (g == null) { System.err.println("No graph available for gaf-run-check."); exit(-1); return; } File headerFilesFolder = null; String headerFileSuffix = ".header"; List<String> externalDbNames = null; File outputFolder = new File(".").getCanonicalFile(); String commentPrefix = "!"; String labelPrefix = ""; while (opts.hasOpts()) { if (opts.nextEq("-o|--output|--output-folder")) outputFolder = opts.nextFile().getCanonicalFile(); else if (opts.nextEq("--go-external-default")) { externalDbNames = Arrays.asList("EC","MetaCyc","Reactome","RESID","UM-BBD_enzymeID","UM-BBD_pathwayID","Wikipedia"); labelPrefix = "GO:"; } else if(opts.nextEq("--label-prefix")) { labelPrefix = opts.nextOpt(); } else if(opts.nextEq("--externals")) { externalDbNames = opts.nextList(); } else if (opts.nextEq("--load-headers-from")) { headerFilesFolder = opts.nextFile().getCanonicalFile(); } else if (opts.nextEq("--load-headers")) { headerFilesFolder = new File(".").getCanonicalFile(); } else if (opts.nextEq("--set-header-file-suffix")) { headerFileSuffix = opts.nextOpt(); } else if (opts.nextEq("--comment-prefix")) { commentPrefix = opts.nextOpt(); } else { break; } } if (externalDbNames == null || externalDbNames.isEmpty()) { System.err.println("No external db for extraction defined."); exit(-1); return; } // setup date string and ontology version strings StringBuilder header = new StringBuilder(); OWLOntology ont = g.getSourceOntology(); String ontologyId = Owl2Obo.getOntologyId(ont); String dataVersion = Owl2Obo.getDataVersion(ont); header.append(commentPrefix); header.append(" Generated on "); TimeZone tz = TimeZone.getTimeZone("UTC"); DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); df.setTimeZone(tz); header.append(df.format(new Date())); if (ontologyId != null) { header.append(" from the ontology '"); header.append(ontologyId); header.append('\''); if (dataVersion != null) { header.append(" with data version: '"); header.append(dataVersion); header.append('\''); } } header.append('\n'); header.append(commentPrefix).append('\n'); // load external mappings per db type for(String db : externalDbNames) { String prefix = db+":"; Map<String, Set<OWLClass>> externalMappings = new HashMap<String, Set<OWLClass>>(); Set<OWLClass> allOWLClasses = g.getAllOWLClasses(); for (OWLClass owlClass : allOWLClasses) { boolean obsolete = g.isObsolete(owlClass); if (obsolete == false) { List<String> xrefs = g.getXref(owlClass); if (xrefs != null && !xrefs.isEmpty()) { for (String xref : xrefs) { if (xref.startsWith(prefix)) { String x = xref; int whitespacePos = xref.indexOf(' '); if (whitespacePos > 0) { x = xref.substring(0, whitespacePos); } Set<OWLClass> classSet = externalMappings.get(x); if (classSet == null) { classSet = new HashSet<OWLClass>(); externalMappings.put(x, classSet); } classSet.add(owlClass); } } } } } // sort List<String> xrefList = new ArrayList<String>(externalMappings.keySet()); Collections.sort(xrefList); // open writer BufferedWriter writer = new BufferedWriter(new FileWriter(new File(outputFolder, db.toLowerCase()+"2go"))); // check for pre-defined headers if (headerFilesFolder != null) { File headerFile = new File(headerFilesFolder, db.toLowerCase()+headerFileSuffix); if (headerFile.isFile() && headerFile.canRead()) { LineIterator lineIterator = FileUtils.lineIterator(headerFile); while (lineIterator.hasNext()) { String line = lineIterator.next(); // minor trickery // if the header lines do not have the comment prefix, add it if (line.startsWith(commentPrefix) == false) { writer.append(commentPrefix); writer.append(' '); } writer.append(line); writer.append('\n'); } } } // add generated header writer.append(header); // append sorted xrefs for (String xref : xrefList) { Set<OWLClass> classes = externalMappings.get(xref); List<OWLClass> classesList = new ArrayList<OWLClass>(classes); Collections.sort(classesList); for (OWLClass cls : classesList) { String id = g.getIdentifier(cls); String lbl = g.getLabel(cls); writer.append(xref); writer.append(" > "); writer.append(labelPrefix); writer.append(lbl); writer.append(" ; "); writer.append(id); writer.append('\n'); } } IOUtils.closeQuietly(writer); } } @CLIMethod("--assert-abox-inferences") public void assertAboxInferences(Opts opts) throws Exception { opts.info("", "Finds all inferred OPEs and ClassAssertions and asserts them. Does not handle DPEs. Resulting ontology can be used for sparql queries"); boolean isNew = false; while (opts.hasOpts()) { if (opts.nextEq("-n|--new")) { isNew = true; } else break; } Set<OWLAxiom> newAxioms = new HashSet<OWLAxiom>(); OWLOntology ont = g.getSourceOntology(); // TODO : move this to a utility class OWLOntologyManager mgr = ont.getOWLOntologyManager(); OWLDataFactory df = mgr.getOWLDataFactory(); LOG.info("Initial axioms:"+ont.getAxioms(true).size()); for (OWLNamedIndividual ind : ont.getIndividualsInSignature(Imports.INCLUDED)) { //LOG.info("Checking: "+ind); for (OWLObjectProperty p : ont.getObjectPropertiesInSignature(Imports.INCLUDED)) { NodeSet<OWLNamedIndividual> vs = reasoner.getObjectPropertyValues(ind, p); for (OWLNamedIndividual v : vs.getFlattened()) { //LOG.info("NEW: "+ind+" -> "+p+" -> "+v); newAxioms.add(df.getOWLObjectPropertyAssertionAxiom(p, ind, v)); } } for (OWLClass c : reasoner.getTypes(ind, false).getFlattened()) { newAxioms.add(df.getOWLClassAssertionAxiom(c, ind)); //LOG.info("NEW: "+ind+" :: "+c); } } OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); for (OWLAxiom a : newAxioms) { LOG.info("NEW: "+owlpp.render(a)); } LOG.info("# OF NEW AXIOMS: "+newAxioms.size()); if (isNew) { g.setSourceOntology(mgr.createOntology()); } mgr.addAxioms(g.getSourceOntology(), newAxioms); } @CLIMethod("--assert-inferred-subclass-axioms") public void assertInferredSubClassAxioms(Opts opts) throws Exception { opts.info("[--removeRedundant] [--keepRedundant] [--always-assert-super-classes] [--markIsInferred] [--useIsInferred] [--ignoreNonInferredForRemove] [--allowEquivalencies] [--reportProfile]", "Adds SubClassOf axioms for all entailed direct SubClasses not already asserted"); boolean removeRedundant = true; boolean checkConsistency = true; boolean useIsInferred = false; boolean ignoreNonInferredForRemove = false; boolean checkForNamedClassEquivalencies = true; boolean checkForPotentialRedundant = false; boolean alwaysAssertSuperClasses = false; String reportFile = null; while (opts.hasOpts()) { if (opts.nextEq("--removeRedundant")) removeRedundant = true; else if (opts.nextEq("--keepRedundant")) { removeRedundant = false; } else if (opts.nextEq("--markIsInferred")) { useIsInferred = true; } else if (opts.nextEq("--useIsInferred")) { useIsInferred = true; ignoreNonInferredForRemove = true; } else if (opts.nextEq("--ignoreNonInferredForRemove")) { ignoreNonInferredForRemove = true; } else if (opts.nextEq("--allowEquivalencies")) { checkForNamedClassEquivalencies = false; } else if (opts.nextEq("--reportFile")) { reportFile = opts.nextOpt(); } else if (opts.nextEq("--always-assert-super-classes")) { opts.info("", "if specified, always assert a superclass, " + "even if there exists an equivalence axiom is trivially entails in in solation"); alwaysAssertSuperClasses = true; } else { break; } } BufferedWriter reportWriter = null; if (reportFile != null) { reportWriter = new BufferedWriter(new FileWriter(reportFile)); } OWLClassFilter filter = null; try { AssertInferenceTool.assertInferences(g, removeRedundant, checkConsistency, useIsInferred, ignoreNonInferredForRemove, checkForNamedClassEquivalencies, checkForPotentialRedundant, alwaysAssertSuperClasses, filter, reportWriter); } finally { IOUtils.closeQuietly(reportWriter); } } @CLIMethod("--remove-redundant-superclass") public void removeRedundantSubclasses(Opts opts) throws Exception { if (g == null) { LOG.error("No source ontology available."); exit(-1); return; } if (reasoner == null) { LOG.error("No resoner available."); exit(-1); return; } if (reasoner.isConsistent() == false) { LOG.error("Ontology is inconsistent."); exit(-1); return; } Set<OWLClass> unsatisfiableClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); if (unsatisfiableClasses.isEmpty() == false) { LOG.error("Ontology contains unsatisfiable classes, count: "+unsatisfiableClasses.size()); for (OWLClass cls : unsatisfiableClasses) { LOG.error("UNSAT:\t"+g.getIdentifier(cls)+"\t"+g.getLabel(cls)); } exit(-1); return; } final OWLOntology rootOntology = reasoner.getRootOntology(); final List<RemoveAxiom> changes = new ArrayList<RemoveAxiom>(); Set<OWLClass> allClasses = rootOntology.getClassesInSignature(Imports.EXCLUDED); LOG.info("Check classes for redundant super class axioms, all OWL classes count: "+allClasses.size()); for(OWLClass cls : allClasses) { final Set<OWLClass> directSuperClasses = reasoner.getSuperClasses(cls, true).getFlattened(); Set<OWLSubClassOfAxiom> subClassAxioms = rootOntology.getSubClassAxiomsForSubClass(cls); for (final OWLSubClassOfAxiom subClassAxiom : subClassAxioms) { subClassAxiom.getSuperClass().accept(new OWLClassExpressionVisitorAdapter(){ @Override public void visit(OWLClass desc) { if (directSuperClasses.contains(desc) == false) { changes.add(new RemoveAxiom(rootOntology, subClassAxiom)); } } }); } } LOG.info("Found redundant axioms: "+changes.size()); rootOntology.getOWLOntologyManager().applyChanges(changes); LOG.info("Removed axioms: "+changes.size()); } /** * GeneOntology specific function to create links between molecular * functions and their corresponding processes. This method uses the exact * matching of the equivalence axioms to establish the part_of relations.<br> * All relations created by this method are going to be tagged with an axiom * annotation http://purl.org/dc/terms/source and corresponding GO_REF. * * @param opts * @throws Exception */ @CLIMethod("--create-part-of") public void createPartOfLinks(Opts opts) throws Exception { if (g == null) { LOG.error("No source ontology available."); exit(-1); return; } if (reasoner == null) { LOG.error("No resoner available."); exit(-1); return; } String goRef = "GO_REF:0000090"; String annotationIRIString = "http://purl.org/dc/terms/source"; String targetFileName = null; while (opts.hasOpts()) { if (opts.nextEq("--go-ref")) { goRef = opts.nextOpt(); } else if (opts.nextEq("--annotation-iri")) { annotationIRIString = opts.nextOpt(); } else if (opts.nextEq("--target-file")) { targetFileName = opts.nextOpt(); } else { break; } } if (targetFileName == null) { LOG.error("No target-file as output was specified."); exit(-1); return; } final File targetFile = new File(targetFileName); final IRI targetFileIRI = IRI.create(targetFile); final IRI annotationIRI = IRI.create(annotationIRIString); // first hard coded test for MF -> BP mappings: // transporter activity -part_of-> transporter // transmembrane transporter activity -part_of-> transmembrane transport final OWLClass ta = g.getOWLClassByIdentifier("GO:0005215"); // transporter activity final OWLClass t = g.getOWLClassByIdentifier("GO:0006810"); // transport final OWLClass tmta = g.getOWLClassByIdentifier("GO:0022857"); // transmembrane transport activity final OWLClass tmt = g.getOWLClassByIdentifier("GO:0055085"); // transmembrane transport final OWLObjectProperty partOf = g.getOWLObjectPropertyByIdentifier("part_of"); final OWLObjectProperty transports = g.getOWLObjectPropertyByIdentifier("transports_or_maintains_localization_of"); List<LinkPattern> patterns = new ArrayList<LinkPattern>(2); patterns.add(new LinkPattern(ta, t, transports, partOf)); patterns.add(new LinkPattern(tmta, tmt, transports, partOf)); OWLDataFactory factory = g.getDataFactory(); OWLAnnotationProperty property = factory.getOWLAnnotationProperty(annotationIRI); OWLAnnotation sourceAnnotation = factory.getOWLAnnotation(property, factory.getOWLLiteral(goRef)); LinkMaker maker = new LinkMaker(g, reasoner); LinkMakerResult result = maker.makeLinks(patterns, sourceAnnotation, false); LOG.info("Predictions size: "+result.getPredictions().size()); OWLPrettyPrinter pp = getPrettyPrinter(); for (OWLAxiom ax : result.getPredictions()) { LOG.info(pp.render(ax)); } LOG.info("Existing size: "+result.getExisiting().size()); LOG.info("Modified size: "+result.getModified().size()); OWLOntologyManager manager = g.getManager(); manager.removeAxioms(g.getSourceOntology(), result.getExisiting()); manager.addAxioms(g.getSourceOntology(), result.getModified()); manager.addAxioms(g.getSourceOntology(), result.getPredictions()); manager.saveOntology(g.getSourceOntology(), targetFileIRI); } @CLIMethod("--remove-redundant-svfs") public void removeRedundantSVFs(Opts opts) throws Exception { opts.info("", "removes redundant existentials: X R Some C, X R Some D, C SubClassOf* D"); if (g == null) { LOG.error("No current ontology loaded"); exit(-1); } if (reasoner == null) { LOG.error("No reasoner available for the current ontology"); exit(-1); } while (opts.hasOpts()) { if (opts.nextEq("--report-file")) { //reportFile = opts.nextOpt(); } else { break; } } Set<OWLSubClassOfAxiom> axioms = g.getSourceOntology().getAxioms(AxiomType.SUBCLASS_OF); Set<OWLSubClassOfAxiom> rmAxioms = new HashSet<OWLSubClassOfAxiom>(); LOG.info("Candidates: " + axioms.size()); for (OWLSubClassOfAxiom axiom : axioms) { if (axiom.getSubClass().isAnonymous()) continue; OWLClass subClass = (OWLClass)axiom.getSubClass(); if (axiom.getSuperClass() instanceof OWLObjectSomeValuesFrom) { //LOG.info(" TESTING " + axiom); OWLObjectSomeValuesFrom svf = ((OWLObjectSomeValuesFrom)axiom.getSuperClass()); for (OWLSubClassOfAxiom msAxiom : g.getSourceOntology().getSubClassAxiomsForSubClass(subClass)) { if (msAxiom.getSuperClass() instanceof OWLObjectSomeValuesFrom) { OWLObjectSomeValuesFrom mssvf = ((OWLObjectSomeValuesFrom)msAxiom.getSuperClass()); if (mssvf.getProperty().equals(svf.getProperty())) { if (!svf.getFiller().isAnonymous()) { if (reasoner.getSuperClasses(mssvf.getFiller(), false). containsEntity((OWLClass) svf.getFiller())) { LOG.info(axiom+" IS_REDUNDANT: "+mssvf.getFiller() + " more-specific-than "+svf.getFiller()); rmAxioms.add(axiom); } } } } else if (!msAxiom.getSuperClass().isAnonymous()) { // TODO } } } } g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); } @CLIMethod("--remove-redundant-inferred-svfs") public void removeRedundantInferredSVFs(Opts opts) throws Exception { opts.info("", "removes redundant existentials using extended reasoner"); if (g == null) { LOG.error("No current ontology loaded"); exit(-1); } if (reasoner == null) { LOG.error("No reasoner available for the current ontology"); exit(-1); } if (!(reasoner instanceof OWLExtendedReasoner)) { LOG.error("Reasoner is not extended"); exit(-1); } OWLExtendedReasoner exr = (OWLExtendedReasoner)reasoner; while (opts.hasOpts()) { if (opts.nextEq("--report-file")) { //reportFile = opts.nextOpt(); } else { break; } } OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); Set<OWLSubClassOfAxiom> axioms = g.getSourceOntology().getAxioms(AxiomType.SUBCLASS_OF); Set<OWLSubClassOfAxiom> rmAxioms = new HashSet<OWLSubClassOfAxiom>(); LOG.info("Candidates: " + axioms.size()); int n = 0; for (OWLSubClassOfAxiom axiom : axioms) { n++; if (n % 100 == 0) { LOG.info("Testing axiom #" +n); } if (axiom.getSubClass().isAnonymous()) continue; OWLClass subClass = (OWLClass)axiom.getSubClass(); if (axiom.getSuperClass() instanceof OWLObjectSomeValuesFrom) { OWLObjectSomeValuesFrom svf = ((OWLObjectSomeValuesFrom)axiom.getSuperClass()); if (svf.getProperty().isAnonymous()) continue; if (svf.getFiller().isAnonymous()) continue; OWLObjectProperty p = (OWLObjectProperty)svf.getProperty(); Set<OWLClass> directParents = exr.getSuperClassesOver(subClass, p, true); if (!directParents.contains(svf.getFiller())) { rmAxioms.add(axiom); LOG.info(" IS_REDUNDANT: "+owlpp.render(axiom)+" as filler not in "+directParents); for (OWLClass dp : directParents) { LOG.info("DIRECT_PARENT_OVER "+owlpp.render(p)+" "+owlpp.render(dp)); } } } } g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); } @CLIMethod("--remove-redundant-inferred-super-classes") public void removeRedundantInferredSuperClassAxioms(Opts opts) throws Exception { String reportFile = null; if (g == null) { LOG.error("No current ontology loaded"); exit(-1); } if (reasoner == null) { LOG.error("No reasoner available for the current ontology"); exit(-1); } while (opts.hasOpts()) { if (opts.nextEq("--report-file")) { reportFile = opts.nextOpt(); } else { break; } } LOG.info("Start finding and removing redundant and previously inferred super classes"); Map<OWLClass, Set<RedundantAxiom>> allRedundantAxioms = RedundantInferences.removeRedundantSubClassAxioms(g.getSourceOntology(), reasoner); if (reportFile == null) { LOG.warn("No report file available, skipping report."); } else { BufferedWriter writer = new BufferedWriter(new FileWriter(reportFile)); try { List<OWLClass> sortedClasses = new ArrayList<OWLClass>(allRedundantAxioms.keySet()); Collections.sort(sortedClasses); for (OWLClass cls : sortedClasses) { Set<RedundantAxiom> redundants = allRedundantAxioms.get(cls); List<OWLClass> superClasses = new ArrayList<OWLClass>(redundants.size()); Map<OWLClass, Set<OWLClass>> intermediateClasses = new HashMap<OWLClass, Set<OWLClass>>(); for(RedundantAxiom redundant : redundants) { OWLSubClassOfAxiom axiom = redundant.getAxiom(); OWLClass superClass = axiom.getSuperClass().asOWLClass(); superClasses.add(superClass); intermediateClasses.put(superClass, redundant.getMoreSpecific()); } Collections.sort(superClasses); for (OWLClass superClass : superClasses) { String subClassId = g.getIdentifier(cls); String subClassLabel = g.getLabel(cls); String superClassId = g.getIdentifier(superClass); String superClassLabel = g.getLabel(superClass); writer.append("REMOVE").append('\t').append(subClassId).append('\t'); if (subClassLabel != null) { writer.append('\'').append(subClassLabel).append('\''); } writer.append('\t').append(superClassId).append('\t'); if (superClassLabel != null) { writer.append('\'').append(superClassLabel).append('\''); } writer.append('\t').append("MORE SPECIFIC: "); for(OWLClass moreSpecific : intermediateClasses.get(superClass)) { String moreSpecificId = g.getIdentifier(moreSpecific); String moreSpecificLabel = g.getLabel(moreSpecific); writer.append('\t').append(moreSpecificId).append('\t'); if (moreSpecificLabel != null) { writer.append('\'').append(moreSpecificLabel).append('\''); } } writer.append('\n'); } } } finally { IOUtils.closeQuietly(writer); } } } @CLIMethod("--remove-subset-entities") public void removeSubsetEntities(Opts opts) throws Exception { opts.info("[SUBSET]+","Removes all classes, individuals and object properties that are in the specific subset(s)"); List<String> subSets = opts.nextList(); if (subSets == null || subSets.isEmpty()) { System.err.println("At least one subset is required for this function."); exit(-1); } // create annotation values to match Set<OWLAnnotationValue> values = new HashSet<OWLAnnotationValue>(); OWLDataFactory f = g.getDataFactory(); for(String subSet : subSets) { // subset as plain string values.add(f.getOWLLiteral(subSet)); // subset as IRI values.add(IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+"#"+subSet)); } // get annotation property for subset OWLAnnotationProperty p = g.getAnnotationProperty(OboFormatTag.TAG_SUBSET.getTag()); // collect all objects in the given subset final Set<OWLObject> entities = Mooncat.findTaggedEntities(p, values, g); LOG.info("Found "+entities.size()+" tagged objects."); if (entities.isEmpty() == false) { final List<RemoveAxiom> changes = Mooncat.findRelatedAxioms(entities, g); if (changes.isEmpty() == false) { LOG.info("applying changes to ontology, count: "+changes.size()); g.getManager().applyChanges(changes); } else { LOG.info("No axioms found for removal."); } } } /** * Simple helper to create a subset tag for matching entities, allows to specify exceptions * * @param opts * @throws Exception */ @CLIMethod("--create-subset-tags") public void createSubsetTags(Opts opts) throws Exception { opts.info("[-s|--source SOURCE] -n|--subset SUBSET_NAME -p PREFIX [-e|--exception EXCEPTION]", "Create subset tags for all classes and properties, which match the id prefix (OBO style). Specifiy exceptions to skip entities."); String source = null; String subset = null; String prefix = null; final Set<String> matchExceptions = new HashSet<String>(); while (opts.hasOpts()) { if (opts.nextEq("-s|--source")) { source = opts.nextOpt(); } else if (opts.nextEq("-n|--subset")) { subset = opts.nextOpt(); } else if (opts.nextEq("-p|--prefix")) { prefix = opts.nextOpt(); } else if (opts.nextEq("-e|--exception")) { matchExceptions.add(opts.nextOpt()); } else { break; } } if (subset == null) { throw new RuntimeException("A subset is required."); } if (prefix == null) { throw new RuntimeException("A prefix is required."); } final Set<OWLEntity> signature; if (source != null) { ParserWrapper newPw = new ParserWrapper(); newPw.addIRIMappers(pw.getIRIMappers()); final OWLOntology sourceOntology = newPw.parse(source); signature = sourceOntology.getSignature(Imports.INCLUDED); } else { signature = new HashSet<OWLEntity>(); for (OWLOntology o : g.getAllOntologies()) { signature.addAll(o.getSignature()); } } final Set<IRI> upperLevelIRIs = new HashSet<IRI>(); final String matchPrefix = prefix; for (OWLEntity owlEntity : signature) { owlEntity.accept(new OWLEntityVisitorAdapter(){ @Override public void visit(OWLClass cls) { String id = Owl2Obo.getIdentifier(cls.getIRI()); if (id.startsWith(matchPrefix) && !matchExceptions.contains(id)) { upperLevelIRIs.add(cls.getIRI()); } } @Override public void visit(OWLObjectProperty property) { String id = Owl2Obo.getIdentifier(property.getIRI()); if (id.startsWith(matchPrefix) && !matchExceptions.contains(id)) { upperLevelIRIs.add(property.getIRI()); } } }); } final OWLOntologyManager m = g.getManager(); final OWLDataFactory f = g.getDataFactory(); final OWLAnnotationProperty p = g.getAnnotationProperty(OboFormatTag.TAG_SUBSET.getTag()); final OWLAnnotation annotation = f.getOWLAnnotation(p, IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+"#"+subset)); for (IRI iri : upperLevelIRIs) { OWLAnnotationAssertionAxiom ax = f.getOWLAnnotationAssertionAxiom(iri, annotation); m.addAxiom(g.getSourceOntology(), ax); } } @CLIMethod("--verify-changes") public void verifyChanges(Opts opts) throws Exception { String previousInput = null; String idFilterPrefix = null; boolean checkMissingLabels = false; String reportFile = null; while (opts.hasOpts()) { if (opts.nextEq("-p|--previous")) { previousInput = opts.nextOpt(); } else if (opts.nextEq("--id-prefix-filter")) { idFilterPrefix = opts.nextOpt(); } else if (opts.nextEq("--check-missing-labels")) { checkMissingLabels = true; } else if (opts.nextEq("-o|--report-file")) { reportFile = opts.nextOpt(); } else { break; } } if (g == null) { LOG.error("No current ontology loaded for comparison"); exit(-1); } else if (previousInput == null) { LOG.error("No previous ontology configured for comparison"); exit(-1); } else { // create new parser & manager for clean load of previous ontology final ParserWrapper pw = new ParserWrapper(); // use same IRI mappers as main parser List<OWLOntologyIRIMapper> mappers = this.pw.getIRIMappers(); if (mappers != null) { for (OWLOntologyIRIMapper mapper : mappers) { pw.addIRIMapper(mapper); } } // load previous IRI previousIRI = IRI.create(new File(previousInput).getCanonicalFile()); final OWLGraphWrapper previous = pw.parseToOWLGraph(previousIRI.toString()); LOG.info("Start verifying changes."); // create (filtered) class ids and labels, obsolete, alt_ids // prev final Map<String, String> previousIdLabels = Maps.newHashMap(); final Set<String> previousObsoletes = Sets.newHashSet(); final Set<String> previousAltIds = Sets.newHashSet(); extractClassInfo(previous, previousIdLabels, previousObsoletes, previousAltIds, idFilterPrefix); // current final Map<String, String> currentIdLabels = Maps.newHashMap(); final Set<String> currentObsoletes = Sets.newHashSet(); final Set<String> currentAltIds = Sets.newHashSet(); extractClassInfo(g, currentIdLabels, currentObsoletes, currentAltIds, idFilterPrefix); // check that all ids are also in the current ontology boolean hasErrors = false; // normal ids final List<String> missingIds = Lists.newArrayList(); final Map<String, String> missingLabels = Maps.newHashMap(); for(String previousId : previousIdLabels.keySet()) { if (!(currentIdLabels.containsKey(previousId) || currentAltIds.contains(previousId) || currentObsoletes.contains(previousId))) { missingIds.add(previousId); hasErrors = true; } else if (checkMissingLabels && currentAltIds.contains(previousId)) { // this id has been merged into another class // optional: check that all primary labels of merged terms are still in the merged term final OWLObject currentObject = g.getOWLObjectByAltId(previousId); final String currentLbl = g.getLabel(currentObject); final String previousLbl = previousIdLabels.get(previousId); if (currentLbl != null && previousLbl != null) { if (currentLbl.equals(previousLbl) == false) { // check synonyms List<ISynonym> synonyms = g.getOBOSynonyms(currentObject); boolean found = false; if (synonyms != null) { for (ISynonym synonym : synonyms) { if (previousLbl.equals(synonym.getLabel())) { found = true; break; } } } if (found == false) { hasErrors = true; missingLabels.put(previousId, previousLbl); } } } } } if (!missingIds.isEmpty()) { Collections.sort(missingIds); } // alt_ids final List<String> missingAltIds = Lists.newArrayList(Sets.difference(previousAltIds, currentAltIds)); if (!missingAltIds.isEmpty()) { Collections.sort(missingAltIds); hasErrors = true; } // obsolete Set<String> differenceObsolete = Sets.difference(previousObsoletes, currentObsoletes); if (!differenceObsolete.isEmpty()) { // special case: obsolete ids might be resurrected as valid ids differenceObsolete = Sets.difference(differenceObsolete, currentIdLabels.keySet()); } final List<String> missingObsoletes = Lists.newArrayList(differenceObsolete); if (!missingObsoletes.isEmpty()) { Collections.sort(missingObsoletes); hasErrors = true; } LOG.info("Verification finished."); // clean up old file in case of no errors if (!hasErrors && reportFile != null) { FileUtils.deleteQuietly(new File(reportFile)); } if (hasErrors) { LOG.error("The verification failed with the following errors."); PrintWriter writer = null; try { if (reportFile != null) { writer = new PrintWriter(new FileWriter(reportFile)); } for(String missingId : missingIds) { LOG.error("Missing ID: "+missingId); if (writer != null) { writer.append("MISSING-ID").append('\t').append(missingId).println(); } } for (String missingId : missingAltIds) { LOG.error("Missing alternate ID: "+missingId); if (writer != null) { writer.append("MISSING-ALT_ID").append('\t').append(missingId).println(); } } for (String missingId : missingObsoletes) { LOG.error("Missing obsolete ID: "+missingId); if (writer != null) { writer.append("MISSING-OBSOLETE_ID").append('\t').append(missingId).println(); } } for (Entry<String, String> missingEntry : missingLabels.entrySet()) { LOG.error("Missing primary label for merged term: '"+missingEntry.getValue()+"' "+missingEntry.getKey()); if (writer != null) { writer.append("MISSING-LABEL").append('\t').append(missingEntry.getValue()).append('\t').append(missingEntry.getKey()).println(); } } } finally { IOUtils.closeQuietly(writer); } exit(-1); } } } /** * @param graph * @param idLabels * @param obsoletes * @param allAltIds * @param idFilterPrefix */ private void extractClassInfo(OWLGraphWrapper graph, Map<String, String> idLabels, Set<String> obsoletes, Set<String> allAltIds, String idFilterPrefix) { for(OWLObject obj : graph.getAllOWLObjects()) { if (obj instanceof OWLClass) { String id = graph.getIdentifier(obj); if (idFilterPrefix != null && !id.startsWith(idFilterPrefix)) { continue; } List<String> altIds = graph.getAltIds(obj); if (altIds != null) { allAltIds.addAll(altIds); } boolean isObsolete = graph.isObsolete(obj); if (isObsolete) { obsoletes.add(id); } else { String lbl = graph.getLabel(obj); idLabels.put(id, lbl); } } } } @CLIMethod("--create-biochebi") public void createBioChebi(Opts opts) throws Exception { final String chebiPURL = "http://purl.obolibrary.org/obo/chebi.owl"; String chebiFile = null; String output = null; String ignoredSubset = "no_conj_equiv"; while (opts.hasOpts()) { if (opts.nextEq("-o|--output")) { output = opts.nextOpt(); } else if (opts.nextEq("-c|--chebi-file")) { chebiFile = opts.nextOpt(); } else if (opts.nextEq("-i|--ignored-subset")) { ignoredSubset = opts.nextOpt(); } else { break; } } if (chebiFile != null) { File inputFile = new File(chebiFile); OWLOntology chebiOWL = pw.parse(IRI.create(inputFile).toString()); // sanity check: // check that the purl is the expected one boolean hasOntologyId = false; OWLOntologyID ontologyID = chebiOWL.getOntologyID(); if (ontologyID != null) { Optional<IRI> ontologyIRI = ontologyID.getOntologyIRI(); if (ontologyIRI.isPresent()) { hasOntologyId = chebiPURL.equals(ontologyIRI.get().toString()); } } if (hasOntologyId == false) { throw new RuntimeException("The loaded ontology file ("+chebiFile+") does not have the expected ChEBI purl: "+chebiPURL); } } if (g == null) { // load default template InputStream stream = loadResource("bio-chebi-input.owl"); if (stream == null) { throw new RuntimeException("Could not load default bio chebi input file: 'bio-chebi-input.owl'"); } g = new OWLGraphWrapper(pw.getManager().loadOntologyFromOntologyDocument(stream)); } BioChebiGenerator.createBioChebi(g, ignoredSubset); if (output != null) { OWLOntology ontology = g.getSourceOntology(); File outFile = new File(output); ontology.getOWLOntologyManager().saveOntology(ontology, IRI.create(outFile)); } } @CLIMethod("--run-obo-basic-dag-check") public void runDAGCheck(Opts opts) throws Exception { if (g != null) { List<List<OWLObject>> cycles = OboBasicDagCheck.findCycles(g); if (cycles != null && !cycles.isEmpty()) { OWLPrettyPrinter pp = getPrettyPrinter(); System.err.println("Found cycles in the graph"); for (List<OWLObject> cycle : cycles) { StringBuilder sb = new StringBuilder("Cycle:"); for (OWLObject owlObject : cycle) { sb.append(" "); sb.append(pp.render(owlObject)); } System.err.println(sb); } } } } // @CLIMethod("--rdf-to-json-ld") // public void rdfToJsonLd(Opts opts) throws Exception { // String ofn = null; // while (opts.hasOpts()) { // if (opts.nextEq("-o")) { // ofn = opts.nextOpt(); // LOG.info("SAVING JSON TO: "+ofn); // } // else { // break; // } // } // File inputFile = opts.nextFile(); // LOG.info("input rdf: "+inputFile); // FileInputStream s = new FileInputStream(inputFile); // final Model modelResult = ModelFactory.createDefaultModel().read( // s, "", "RDF/XML"); // final JenaRDFParser parser = new JenaRDFParser(); // Options jsonOpts = new Options(); // // final Object json = JSONLD.fromRDF(modelResult, jsonOpts , parser); // FileOutputStream out = new FileOutputStream(ofn); // String jsonStr = JSONUtils.toPrettyString(json); // IOUtils.write(jsonStr, out); // } // // @CLIMethod("--json-ld-to-rdf") // public void jsonLdToRdf(Opts opts) throws Exception { // String ofn = null; // while (opts.hasOpts()) { // if (opts.nextEq("-o")) { // ofn = opts.nextOpt(); // } // else { // break; // } // } // final JSONLDTripleCallback callback = new JenaTripleCallback(); // // FileInputStream s = new FileInputStream(opts.nextFile()); // Object json = JSONUtils.fromInputStream(s); // final Model model = (Model) JSONLD.toRDF(json, callback); // // final StringWriter w = new StringWriter(); // model.write(w, "TURTLE"); // // FileOutputStream out = new FileOutputStream(ofn); // IOUtils.write(w.toString(), out); // } @CLIMethod("--extract-annotation-value") public void extractAnnotationValue(Opts opts) throws Exception { String delimiter = "\t"; String idPrefix = null; boolean addLabel = true; OWLAnnotationProperty valueProperty = null; String output = null; final OWLDataFactory f = g.getDataFactory(); final OWLAnnotationProperty rdfsLabel = f.getRDFSLabel(); while (opts.hasOpts()) { if (opts.nextEq("-p|--property")) { String propString = opts.nextOpt(); valueProperty = f.getOWLAnnotationProperty(IRI.create(propString)); } else if (opts.nextEq("-o|--output")) { output = opts.nextOpt(); } else if (opts.nextEq("-d|--delimiter")) { delimiter = opts.nextOpt(); } else if (opts.nextEq("--id-prefix")) { idPrefix = opts.nextOpt(); } else if (opts.nextEq("--excludeLabel")) { addLabel = false; } else { break; } } if (output == null) { LOG.error("No outfile specified."); exit(-1); } else if (valueProperty == null) { LOG.error("No property specified."); exit(-1); } else { List<String> lines = new ArrayList<String>(); final Set<OWLOntology> allOntologies = g.getAllOntologies(); LOG.info("Extracting values for property: "+valueProperty.getIRI()); for(OWLClass cls : g.getAllOWLClasses()) { final String id = g.getIdentifier(cls); if (idPrefix != null && !id.startsWith(idPrefix)) { continue; } String label = null; String propertyValue = null; Set<OWLAnnotationAssertionAxiom> allAnnotationAxioms = new HashSet<OWLAnnotationAssertionAxiom>(); for(OWLOntology ont : allOntologies) { allAnnotationAxioms.addAll(ont.getAnnotationAssertionAxioms(cls.getIRI())); } for (OWLAnnotationAssertionAxiom axiom : allAnnotationAxioms) { OWLAnnotationProperty currentProp = axiom.getProperty(); if (valueProperty.equals(currentProp)) { OWLAnnotationValue av = axiom.getValue(); if (av instanceof OWLLiteral) { propertyValue = ((OWLLiteral)av).getLiteral(); } } else if (addLabel && rdfsLabel.equals(currentProp)) { OWLAnnotationValue av = axiom.getValue(); if (av instanceof OWLLiteral) { label = ((OWLLiteral)av).getLiteral(); } } // stop search once the values are available if (propertyValue != null) { if(addLabel) { if (label != null) { break; } } else { break; } } } // write the information StringBuilder sb = new StringBuilder(); if (addLabel) { if (label != null && propertyValue != null) { sb.append(id); sb.append(delimiter); sb.append(label); sb.append(delimiter); sb.append(propertyValue); } } else { if (label != null && propertyValue != null) { sb.append(id); sb.append(delimiter); sb.append(propertyValue); } } lines.add(sb.toString()); } LOG.info("Finished extraction, sorting output."); Collections.sort(lines); File outputFile = new File(output).getCanonicalFile(); LOG.info("Write extracted properties to file: "+outputFile.getPath()); BufferedWriter writer = null; try { writer = new BufferedWriter(new FileWriter(outputFile)); for (String line : lines) { writer.append(line).append('\n'); } } finally { IOUtils.closeQuietly(writer); } } } /** * Extract all xps ({@link OWLEquivalentClassesAxiom}) from the loaded * ontology. Requires a set of roots classes to restrict the set of * extracted xps. * * @param opts * @throws Exception */ @CLIMethod("--extract-extension-file") public void extractExtensionFile(Opts opts) throws Exception { final Set<OWLClass> rootTerms = new HashSet<OWLClass>(); String ontologyIRI = null; String outputFileOwl = null; String outputFileObo = null; String versionIRI = null; while (opts.hasOpts()) { if (opts.nextEq("-id|--ontology-id")) { ontologyIRI = opts.nextOpt(); } else if (opts.nextEq("-owl|--output-owl")) { outputFileOwl = opts.nextOpt(); } else if (opts.nextEq("-obo|--output-obo")) { outputFileObo = opts.nextOpt(); } else if (opts.nextEq("-v|--version")) { versionIRI = opts.nextOpt(); } else if (opts.nextEq("-t|--term")) { String term = opts.nextOpt(); OWLClass owlClass = g.getOWLClassByIdentifierNoAltIds(term); if (owlClass != null) { rootTerms.add(owlClass); } else { throw new RuntimeException("Could not find a class for id: "+term); } } else { break; } } if (rootTerms.isEmpty()) { throw new RuntimeException("At least one term is required for filtering"); } if (ontologyIRI == null) { throw new RuntimeException("An ontology IRI is required."); } final OWLOntologyID newID; final IRI newOntologyIRI = IRI.create(ontologyIRI); if (versionIRI != null) { final IRI newVersionIRI = IRI.create(versionIRI); newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.of(newVersionIRI)); } else { newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.<IRI>absent()); } final OWLOntologyManager m = g.getManager(); final OWLOntology work = m.createOntology(newID); // filter axioms final Set<OWLObjectProperty> usedProperties = new HashSet<OWLObjectProperty>(); final Set<OWLAxiom> filtered = new HashSet<OWLAxiom>(); final OWLOntology source = g.getSourceOntology(); // get relevant equivalent class axioms for(OWLClass cls : source.getClassesInSignature()) { Set<OWLEquivalentClassesAxiom> eqAxioms = source.getEquivalentClassesAxioms(cls); for (OWLEquivalentClassesAxiom eqAxiom : eqAxioms) { if (hasFilterClass(eqAxiom, rootTerms)) { filtered.add(eqAxiom); usedProperties.addAll(eqAxiom.getObjectPropertiesInSignature()); } } } // add used properties for (OWLObjectProperty p : usedProperties) { filtered.addAll(source.getDeclarationAxioms(p)); filtered.addAll(source.getAxioms(p, Imports.EXCLUDED)); filtered.addAll(source.getAnnotationAssertionAxioms(p.getIRI())); } // add all axioms into the ontology m.addAxioms(work, filtered); // write ontology // owl if (outputFileOwl != null) { OutputStream outputStream = new FileOutputStream(outputFileOwl); try { m.saveOntology(work, new RDFXMLDocumentFormat(), outputStream); } finally { outputStream.close(); } } // obo if (outputFileObo != null) { Owl2Obo owl2Obo = new Owl2Obo(); OBODoc doc = owl2Obo.convert(work); OBOFormatWriter writer = new OBOFormatWriter(); BufferedWriter fileWriter = null; try { fileWriter = new BufferedWriter(new FileWriter(outputFileObo)); NameProvider nameprovider = new OWLGraphWrapperNameProvider(g); writer.write(doc, fileWriter, nameprovider); } finally { IOUtils.closeQuietly(fileWriter); } } } /** * Retain only subclass of axioms and intersection of axioms if they contain * a class in it's signature of a given set of parent terms. * * For example, to create the x-chemical.owl do the following steps: * <ol> * <li>Load ChEBI as main ontology graph</li> * <li>(Optional) load go, recommended for OBO write</li> * <li>Setup reasoner: '--elk --init-reasoner'</li> * <li>'--filter-extension-file'</li> * <li>Load extensions file using: '-e' or '--extension-file'</li> * <li>Add required root terms: '-t' or '--term', use multiple paramteres to add multiple terms</li> * <li>Set ontology IRI for filtered file: '-id' or '--ontology-id'</li> * <li> set output files: * <ul> * <li>OWL: '-owl|--output-owl' owl-filename</li> * <li>OBO: '-obo|--output-obo' obo-filename</li> * </ul> * </li> * <li>(Optional) set version: '-v' or '--version'</li> * </ol> * @param opts * @throws Exception */ @CLIMethod("--filter-extension-file") public void filterExtensionFile(Opts opts) throws Exception { String extensionFile = null; final Set<OWLClass> rootTerms = new HashSet<OWLClass>(); String ontologyIRI = null; String outputFileOwl = null; String outputFileObo = null; String versionIRI = null; while (opts.hasOpts()) { if (opts.nextEq("-e|--extension-file")) { extensionFile = opts.nextOpt(); } else if (opts.nextEq("-id|--ontology-id")) { ontologyIRI = opts.nextOpt(); } else if (opts.nextEq("-owl|--output-owl")) { outputFileOwl = opts.nextOpt(); } else if (opts.nextEq("-obo|--output-obo")) { outputFileObo = opts.nextOpt(); } else if (opts.nextEq("-v|--version")) { versionIRI = opts.nextOpt(); } else if (opts.nextEq("-t|--term")) { String term = opts.nextOpt(); OWLClass owlClass = g.getOWLClassByIdentifierNoAltIds(term); if (owlClass != null) { rootTerms.add(owlClass); } else { throw new RuntimeException("Could not find a class for id: "+term); } } else { break; } } if (extensionFile == null) { throw new RuntimeException("No extension file was specified."); } if (rootTerms.isEmpty()) { throw new RuntimeException("At least one term is required for filtering"); } if (ontologyIRI == null) { throw new RuntimeException("An ontology IRI is required."); } // create new parser and new OWLOntologyManager ParserWrapper p = new ParserWrapper(); final OWLOntology work = p.parse(extensionFile); // update ontology ID final OWLOntologyID oldId = work.getOntologyID(); final IRI oldVersionIRI; if(oldId != null && oldId.getVersionIRI().isPresent()) { oldVersionIRI = oldId.getVersionIRI().get(); } else { oldVersionIRI = null; } final OWLOntologyID newID; final IRI newOntologyIRI = IRI.create(ontologyIRI); if (versionIRI != null) { final IRI newVersionIRI = IRI.create(versionIRI); newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.of(newVersionIRI)); } else if (oldVersionIRI != null) { newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.of(oldVersionIRI)); } else { newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.<IRI>absent()); } // filter axioms Set<OWLAxiom> allAxioms = work.getAxioms(); for(OWLClass cls : work.getClassesInSignature()) { Set<OWLClassAxiom> current = work.getAxioms(cls, Imports.EXCLUDED); if (hasFilterClass(current, rootTerms) == false) { allAxioms.removeAll(work.getDeclarationAxioms(cls)); allAxioms.removeAll(current); allAxioms.removeAll(work.getAnnotationAssertionAxioms(cls.getIRI())); } } OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology filtered = manager.createOntology(newID); manager.addAxioms(filtered, allAxioms); // write ontology // owl if (outputFileOwl != null) { OutputStream outputStream = new FileOutputStream(outputFileOwl); try { manager.saveOntology(filtered, new RDFXMLDocumentFormat(), outputStream); } finally { outputStream.close(); } } // obo if (outputFileObo != null) { Owl2Obo owl2Obo = new Owl2Obo(); OBODoc doc = owl2Obo.convert(filtered); OBOFormatWriter writer = new OBOFormatWriter(); BufferedWriter fileWriter = null; try { fileWriter = new BufferedWriter(new FileWriter(outputFileObo)); NameProvider nameprovider = new OWLGraphWrapperNameProvider(g); writer.write(doc, fileWriter, nameprovider); } finally { IOUtils.closeQuietly(fileWriter); } } } /** * Check that there is an axiom, which use a class (in its signature) that * has a ancestor in the root term set. * * @param axioms set to check * @param rootTerms set root of terms * @return boolean */ private boolean hasFilterClass(Set<OWLClassAxiom> axioms, Set<OWLClass> rootTerms) { if (axioms != null && !axioms.isEmpty()) { for (OWLClassAxiom ax : axioms) { if (ax instanceof OWLEquivalentClassesAxiom) { Set<OWLClass> signature = ax.getClassesInSignature(); for (OWLClass sigCls : signature) { NodeSet<OWLClass> superClasses = reasoner.getSuperClasses(sigCls, false); for(OWLClass root : rootTerms) { if (superClasses.containsEntity(root)) { return true; } } } } } } return false; } /** * Check that there is an axiom, which use a class (in its signature) that * has a ancestor in the root term set. * * @param axioms set to check * @param rootTerms set root of terms * @return boolean */ private boolean hasFilterClass(OWLEquivalentClassesAxiom axiom, Set<OWLClass> rootTerms) { if (axiom != null) { Set<OWLClass> signature = axiom.getClassesInSignature(); for (OWLClass sigCls : signature) { NodeSet<OWLClass> superClasses = reasoner.getSuperClasses(sigCls, false); for(OWLClass root : rootTerms) { if (superClasses.containsEntity(root)) { return true; } } } } return false; } @CLIMethod("--create-slim") public void createSlim(Opts opts) throws Exception { String idResource = null; String outputOwl = null; String outputObo = null; String oldOwl = null; String oldObo = null; IRI ontologyIRI = null; // parse CLI options while (opts.hasOpts()) { if (opts.nextEq("--output-owl")) { outputOwl = opts.nextOpt(); } else if (opts.nextEq("--output-obo")) { outputObo = opts.nextOpt(); } else if (opts.nextEq("-i|--ids")) { idResource = opts.nextOpt(); } else if (opts.nextEq("--old-owl")) { oldOwl = opts.nextOpt(); } else if (opts.nextEq("--old-obo")) { oldObo = opts.nextOpt(); } else if (opts.nextEq("--iri")) { String iriString = opts.nextOpt(); ontologyIRI = IRI.create(iriString); } else { break; } } // check required parameters if (idResource == null) { throw new RuntimeException("No identifier resource specified. A list of terms is required to create a slim."); } if (outputOwl == null && outputObo == null) { throw new RuntimeException("No output file specified. At least one output file (obo or owl) is needed."); } if (ontologyIRI == null) { throw new RuntimeException("No IRI found. An ontology IRI is required."); } // set of all OWL classes required in the slim. Set<OWLClass> seeds = new HashSet<OWLClass>(); // create map of alternate identifiers for fast lookup Map<String, OWLObject> objectsByAltId = g.getAllOWLObjectsByAltId(); // load list of identifiers from file LineIterator lineIterator = FileUtils.lineIterator(new File(idResource)); while (lineIterator.hasNext()) { String line = lineIterator.next(); if (line.startsWith("#")) { continue; } addId(line, seeds, objectsByAltId); } // (optional) load previous slim in OWL. // Check that all classes are also available in the new base ontology. if (oldOwl != null) { ParserWrapper pw = new ParserWrapper(); OWLOntologyManager tempManager = pw.getManager(); OWLOntology oldSlim = tempManager.loadOntologyFromOntologyDocument(new File(oldOwl)); OWLGraphWrapper oldSlimGraph = new OWLGraphWrapper(oldSlim); Set<OWLClass> classes = oldSlim.getClassesInSignature(); for (OWLClass owlClass : classes) { boolean found = false; for(OWLOntology o : g.getAllOntologies()) { if (o.getDeclarationAxioms(owlClass).isEmpty() == false) { found = true; seeds.add(owlClass); break; } } if (!found) { LOG.warn("Could not find old class ("+oldSlimGraph.getIdentifier(owlClass)+") in new ontology."); } } oldSlimGraph.close(); } // (optional) load previous slim in OBO format. // Check that all classes are also available in the new base ontology. if (oldObo != null) { OBOFormatParser p = new OBOFormatParser(); OBODoc oboDoc = p.parse(new File(oldObo)); Collection<Frame> termFrames = oboDoc.getTermFrames(); if (termFrames != null) { for (Frame frame : termFrames) { String id = frame.getId(); addId(id, seeds, objectsByAltId); } } } // sanity check if (seeds.isEmpty()) { throw new RuntimeException("There are no classes in the seed set for the slim generation. Id problem or empty id resource?"); } // create the slim Mooncat mooncat = new Mooncat(g); OWLOntology slim = mooncat.makeMinimalSubsetOntology(seeds, ontologyIRI, true, false); mooncat = null; // write the output if (outputOwl != null) { File outFile = new File(outputOwl); slim.getOWLOntologyManager().saveOntology(slim, IRI.create(outFile)); } if (outputObo != null) { Owl2Obo owl2Obo = new Owl2Obo(); OBODoc oboDoc = owl2Obo.convert(slim); OBOFormatWriter w = new OBOFormatWriter(); w.write(oboDoc, outputObo); } } private void addId(String id, Set<OWLClass> seeds, Map<String, OWLObject> altIds) { id = StringUtils.trimToNull(id); if (id != null) { // #1 check alt_ids OWLObject owlObject = altIds.get(id); if (owlObject != null && owlObject instanceof OWLClass) { LOG.warn("Retrieving class "+g.getIdentifier(owlObject)+" by alt_id: "+id+"\nPlease consider updating your idenitifers."); seeds.add((OWLClass) owlObject); } // #2 use normal code path OWLClass cls = g.getOWLClassByIdentifier(id); if (cls != null) { seeds.add(cls); } else { LOG.warn("Could not find a class for id: "+id); } } } private InputStream loadResource(String name) { InputStream inputStream = getClass().getResourceAsStream(name); if (inputStream == null) { inputStream = ClassLoader.getSystemResourceAsStream(name); } if (inputStream == null) { File file = new File(name); if (file.isFile() && file.canRead()) { try { return new FileInputStream(file); } catch (FileNotFoundException exception) { // intentionally empty } } } return inputStream; } private OWLReasoner createReasoner(OWLOntology ont, String reasonerName, OWLOntologyManager manager) { OWLReasonerFactory reasonerFactory = createReasonerFactory(reasonerName); if (reasonerFactory == null) { System.out.println("no such reasoner: "+reasonerName); } else { reasoner = reasonerFactory.createReasoner(ont); LOG.info("Created reasoner: "+reasoner); } return reasoner; } private OWLReasonerFactory createReasonerFactory(String reasonerName) { OWLReasonerFactory reasonerFactory = null; if (reasonerName.equals("hermit")) { reasonerFactory = new org.semanticweb.HermiT.ReasonerFactory(); } else if (reasonerName.equals("ogr")) { reasonerFactory = new GraphReasonerFactory(); } else if (reasonerName.equals("mexr")) { if (reasonerFactory == null) { // set default to ELK reasonerFactory = new ElkReasonerFactory(); } reasonerFactory = new ExpressionMaterializingReasonerFactory(reasonerFactory); } else if (reasonerName.equals("elk")) { reasonerFactory = new ElkReasonerFactory(); } else if (reasonerName.equals("welk")) { System.out.println("The wrapping elk reasoner is deprecated, using normal elk instead"); reasonerFactory = new ElkReasonerFactory(); } return reasonerFactory; } private void catOntologies(Opts opts) throws OWLOntologyCreationException, IOException { opts.info("[-r|--ref-ont ONT] [-i|--use-imports]", "Catenate ontologies taking only referenced subsets of supporting onts.\n"+ " See Mooncat docs"); Mooncat m = new Mooncat(g); ParserWrapper pw = new ParserWrapper(); String newURI = null; while (opts.hasOpts()) { //String opt = opts.nextOpt(); if (opts.nextEq("-r") || opts.nextEq("--ref-ont")) { LOG.error("DEPRECATED - list all ref ontologies on main command line"); String f = opts.nextOpt(); m.addReferencedOntology(pw.parseOWL(f)); } else if (opts.nextEq("-s") || opts.nextEq("--src-ont")) { m.setOntology(pw.parseOWL(opts.nextOpt())); } else if (opts.nextEq("-p") || opts.nextEq("--prefix")) { m.addSourceOntologyPrefix(opts.nextOpt()); } else if (opts.nextEq("-i") || opts.nextEq("--use-imports")) { System.out.println("using everything in imports closure"); g.addSupportOntologiesFromImportsClosure(); } else if (opts.nextEq("-n") || opts.nextEq("--new-uri")) { System.out.println("new URI for merged ontology"); newURI = opts.nextOpt(); } else { break; //opts.fail(); } } //if (m.getReferencedOntologies().size() == 0) { // m.setReferencedOntologies(g.getSupportOntologySet()); //} //g.useImportClosureForQueries(); //for (OWLAxiom ax : m.getClosureAxiomsOfExternalReferencedEntities()) { // System.out.println("M_AX:"+ax); //} m.mergeOntologies(); m.removeDanglingAxioms(); if (newURI != null) { SetOntologyID soi = new SetOntologyID(g.getSourceOntology(), new OWLOntologyID(Optional.of(IRI.create(newURI)), Optional.<IRI>absent())); g.getManager().applyChange(soi); /* HashSet<OWLOntology> cpOnts = new HashSet<OWLOntology>(); LOG.info("srcOnt annots:"+g.getSourceOntology().getAnnotations().size()); cpOnts.add(g.getSourceOntology()); OWLOntology newOnt = g.getManager().createOntology(IRI.create(newURI), cpOnts); LOG.info("newOnt annots:"+newOnt.getAnnotations().size()); //g.getDataFactory().getOWLOn g.setSourceOntology(newOnt); */ } } private void showEdges(Set<OWLGraphEdge> edges) { OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); for (OWLGraphEdge e : edges) { System.out.println(owlpp.render(e)); } } }
OWLTools-Runner/src/main/java/owltools/cli/CommandRunner.java
package owltools.cli; import java.awt.Color; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Stack; import java.util.TimeZone; import java.util.UUID; import java.util.Vector; import java.util.stream.Collectors; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.io.LineIterator; import org.apache.commons.lang3.StringUtils; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.eclipse.jetty.server.Server; import org.geneontology.reasoner.ExpressionMaterializingReasoner; import org.geneontology.reasoner.ExpressionMaterializingReasonerFactory; import org.geneontology.reasoner.OWLExtendedReasoner; import org.obolibrary.macro.MacroExpansionVisitor; import org.obolibrary.macro.ManchesterSyntaxTool; import org.obolibrary.obo2owl.Obo2OWLConstants; import org.obolibrary.obo2owl.Obo2OWLConstants.Obo2OWLVocabulary; import org.obolibrary.obo2owl.OboInOwlCardinalityTools; import org.obolibrary.obo2owl.Owl2Obo; import org.obolibrary.oboformat.model.Frame; import org.obolibrary.oboformat.model.OBODoc; import org.obolibrary.oboformat.parser.OBOFormatConstants.OboFormatTag; import org.obolibrary.oboformat.parser.OBOFormatParser; import org.obolibrary.oboformat.writer.OBOFormatWriter; import org.obolibrary.oboformat.writer.OBOFormatWriter.NameProvider; import org.semanticweb.elk.owlapi.ElkReasonerFactory; import org.semanticweb.owlapi.apibinding.OWLManager; import org.semanticweb.owlapi.formats.FunctionalSyntaxDocumentFormat; import org.semanticweb.owlapi.formats.ManchesterSyntaxDocumentFormat; import org.semanticweb.owlapi.formats.OBODocumentFormat; import org.semanticweb.owlapi.formats.OWLXMLDocumentFormat; import org.semanticweb.owlapi.formats.RDFXMLDocumentFormat; import org.semanticweb.owlapi.formats.TrigDocumentFormat; import org.semanticweb.owlapi.formats.TurtleDocumentFormat; import org.semanticweb.owlapi.io.OWLParserException; import org.semanticweb.owlapi.model.AddImport; import org.semanticweb.owlapi.model.AddOntologyAnnotation; import org.semanticweb.owlapi.model.AxiomType; import org.semanticweb.owlapi.model.IRI; import org.semanticweb.owlapi.model.OWLAnnotation; import org.semanticweb.owlapi.model.OWLAnnotationAssertionAxiom; import org.semanticweb.owlapi.model.OWLAnnotationProperty; import org.semanticweb.owlapi.model.OWLAnnotationSubject; import org.semanticweb.owlapi.model.OWLAnnotationValue; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLClass; import org.semanticweb.owlapi.model.OWLClassAssertionAxiom; import org.semanticweb.owlapi.model.OWLClassAxiom; import org.semanticweb.owlapi.model.OWLClassExpression; import org.semanticweb.owlapi.model.OWLDataFactory; import org.semanticweb.owlapi.model.OWLDeclarationAxiom; import org.semanticweb.owlapi.model.OWLDisjointClassesAxiom; import org.semanticweb.owlapi.model.OWLDocumentFormat; import org.semanticweb.owlapi.model.OWLEntity; import org.semanticweb.owlapi.model.OWLEquivalentClassesAxiom; import org.semanticweb.owlapi.model.OWLImportsDeclaration; import org.semanticweb.owlapi.model.OWLLiteral; import org.semanticweb.owlapi.model.OWLNamedIndividual; import org.semanticweb.owlapi.model.OWLNamedObject; import org.semanticweb.owlapi.model.OWLObject; import org.semanticweb.owlapi.model.OWLObjectIntersectionOf; import org.semanticweb.owlapi.model.OWLObjectProperty; import org.semanticweb.owlapi.model.OWLObjectPropertyCharacteristicAxiom; import org.semanticweb.owlapi.model.OWLObjectPropertyExpression; import org.semanticweb.owlapi.model.OWLObjectSomeValuesFrom; import org.semanticweb.owlapi.model.OWLOntology; import org.semanticweb.owlapi.model.OWLOntologyChange; import org.semanticweb.owlapi.model.OWLOntologyCreationException; import org.semanticweb.owlapi.model.OWLOntologyID; import org.semanticweb.owlapi.model.OWLOntologyIRIMapper; import org.semanticweb.owlapi.model.OWLOntologyManager; import org.semanticweb.owlapi.model.OWLProperty; import org.semanticweb.owlapi.model.OWLPropertyExpression; import org.semanticweb.owlapi.model.OWLSubClassOfAxiom; import org.semanticweb.owlapi.model.OWLSubObjectPropertyOfAxiom; import org.semanticweb.owlapi.model.OWLSubPropertyChainOfAxiom; import org.semanticweb.owlapi.model.RemoveAxiom; import org.semanticweb.owlapi.model.RemoveImport; import org.semanticweb.owlapi.model.SetOntologyID; import org.semanticweb.owlapi.model.parameters.AxiomAnnotations; import org.semanticweb.owlapi.model.parameters.Imports; import org.semanticweb.owlapi.reasoner.Node; import org.semanticweb.owlapi.reasoner.NodeSet; import org.semanticweb.owlapi.reasoner.OWLReasoner; import org.semanticweb.owlapi.reasoner.OWLReasonerFactory; import org.semanticweb.owlapi.util.AutoIRIMapper; import org.semanticweb.owlapi.util.OWLClassExpressionVisitorAdapter; import org.semanticweb.owlapi.util.OWLEntityRenamer; import org.semanticweb.owlapi.util.OWLEntityVisitorAdapter; import org.semanticweb.owlapi.util.SimpleIRIMapper; import org.semanticweb.owlapi.vocab.OWL2Datatype; import org.semanticweb.owlapi.vocab.OWLRDFVocabulary; import owltools.InferenceBuilder.OWLClassFilter; import owltools.RedundantInferences; import owltools.RedundantInferences.RedundantAxiom; import owltools.cli.tools.CLIMethod; import owltools.gfx.GraphicsConfig; import owltools.gfx.GraphicsConfig.RelationConfig; import owltools.gfx.OWLGraphLayoutRenderer; import owltools.graph.AxiomAnnotationTools; import owltools.graph.OWLGraphEdge; import owltools.graph.OWLGraphWrapper; import owltools.graph.OWLGraphWrapper.ISynonym; import owltools.graph.OWLGraphWrapperBasic.LabelPolicy; import owltools.graph.OWLQuantifiedProperty; import owltools.graph.OWLQuantifiedProperty.Quantifier; import owltools.idmap.IDMapPairWriter; import owltools.idmap.IDMappingPIRParser; import owltools.idmap.UniProtIDMapParser; import owltools.io.CatalogXmlIRIMapper; import owltools.io.ChadoGraphClosureRenderer; import owltools.io.CompactGraphClosureReader; import owltools.io.CompactGraphClosureRenderer; import owltools.io.EdgeTableRenderer; import owltools.io.GraphClosureRenderer; import owltools.io.GraphReader; import owltools.io.GraphRenderer; import owltools.io.ImportClosureSlurper; import owltools.io.InferredParentRenderer; import owltools.io.OWLJSONFormat; import owltools.io.OWLJsonLDFormat; import owltools.io.OWLOboGraphsFormat; import owltools.io.OWLOboGraphsYamlFormat; import owltools.io.OWLPrettyPrinter; import owltools.io.ParserWrapper; import owltools.io.ParserWrapper.OWLGraphWrapperNameProvider; import owltools.io.StanzaToOWLConverter; import owltools.io.TableRenderer; import owltools.io.TableToAxiomConverter; import owltools.mooncat.BridgeExtractor; import owltools.mooncat.Diff; import owltools.mooncat.DiffUtil; import owltools.mooncat.EquivalenceSetMergeUtil; import owltools.mooncat.Mooncat; import owltools.mooncat.OWLInAboxTranslator; import owltools.mooncat.PropertyExtractor; import owltools.mooncat.PropertyViewOntologyBuilder; import owltools.mooncat.ProvenanceReasonerWrapper; import owltools.mooncat.QuerySubsetGenerator; import owltools.mooncat.SpeciesMergeUtil; import owltools.mooncat.SpeciesSubsetterUtil; import owltools.mooncat.TransformationUtils; import owltools.mooncat.ontologymetadata.ImportChainDotWriter; import owltools.mooncat.ontologymetadata.ImportChainExtractor; import owltools.mooncat.ontologymetadata.OntologyMetadataMarkdownWriter; import owltools.ontologyrelease.OboBasicDagCheck; import owltools.ontologyrelease.OntologyMetadata; import owltools.reasoner.GCIUtil; import owltools.reasoner.GraphReasonerFactory; import owltools.renderer.markdown.MarkdownRenderer; import owltools.sim2.preprocessor.ABoxUtils; import owltools.tr.LinkMaker; import owltools.tr.LinkMaker.LinkMakerResult; import owltools.tr.LinkMaker.LinkPattern; import owltools.util.OwlHelper; import owltools.web.OWLServer; import uk.ac.manchester.cs.owlapi.modularity.ModuleType; import uk.ac.manchester.cs.owlapi.modularity.SyntacticLocalityModuleExtractor; import com.clarkparsia.owlapi.explanation.DefaultExplanationGenerator; import com.clarkparsia.owlapi.explanation.ExplanationGenerator; import com.google.common.base.Optional; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import de.derivo.sparqldlapi.Query; import de.derivo.sparqldlapi.QueryArgument; import de.derivo.sparqldlapi.QueryBinding; import de.derivo.sparqldlapi.QueryEngine; import de.derivo.sparqldlapi.QueryResult; import de.derivo.sparqldlapi.exceptions.QueryEngineException; import de.derivo.sparqldlapi.exceptions.QueryParserException; import de.derivo.sparqldlapi.types.QueryArgumentType; /** * An instance of this class can execute owltools commands in sequence. * * Typically, this class is called from a wrapper within its main() method. * * Extend this class to implement additional functions. Use the {@link CLIMethod} * annotation, to designate the relevant methods. * * @author cjm * @see GafCommandRunner * @see JsCommandRunner * @see SimCommandRunner * @see SolrCommandRunner * @see TaxonCommandRunner */ public class CommandRunner extends CommandRunnerBase { private static Logger LOG = Logger.getLogger(CommandRunner.class); public void runSingleIteration(Opts opts) throws Exception { Set<OWLSubClassOfAxiom> removedSubClassOfAxioms = null; GraphicsConfig gfxCfg = new GraphicsConfig(); //Configuration config = new PropertiesConfiguration("owltools.properties"); while (opts.hasArgs()) { if (opts.nextArgIsHelp()) { help(); opts.setHelpMode(true); } //String opt = opts.nextOpt(); //System.out.println("processing arg: "+opt); if (opts.nextEq("--pellet")) { System.err.println("The Pellet reasoner is no longer supported, use Hermit '--hermit', or ELK '--elk' instead"); exit(-1); return; } else if (opts.nextEq("--hermit")) { reasonerName = "hermit"; } else if (opts.nextEq("--elk")) { reasonerName = "elk"; } else if (opts.nextEq("--jfact")) { System.err.println("The JFact reasoner is no longer supported, use Hermit '--hermit', or ELK '--elk' instead"); exit(-1); return; } else if (opts.nextEq("--more")) { System.err.println("The MORE reasoner is no longer supported, use Hermit '--hermit', or ELK '--elk' instead"); exit(-1); return; } else if (opts.nextEq("--use-reasoner|--set-reasoner-name")) { reasonerName = opts.nextOpt(); } else if (opts.nextEq("--no-dispose")) { this.isDisposeReasonerOnExit = false; } else if (opts.nextEq("--reasoner")) { reasonerName = opts.nextOpt(); g.setReasoner(createReasoner(g.getSourceOntology(),reasonerName,g.getManager())); reasoner = g.getReasoner(); } else if (opts.nextEq("--init-reasoner")) { while (opts.hasOpts()) { if (opts.nextEq("-r")) { reasonerName = opts.nextOpt(); } else { break; } } g.setReasoner(createReasoner(g.getSourceOntology(),reasonerName,g.getManager())); reasoner = g.getReasoner(); } else if (opts.nextEq("--reasoner-dispose")) { reasoner.dispose(); } else if (opts.nextEq("--reasoner-flush")) { reasoner.flush(); } else if (opts.nextEq("--no-reasoner")) { reasonerName = ""; } else if (opts.nextEq("--log-info")) { Logger.getRootLogger().setLevel(Level.INFO); } else if (opts.nextEq("--log-debug")) { Logger.getRootLogger().setLevel(Level.DEBUG); } else if (opts.nextEq("--log-error")) { Logger.getRootLogger().setLevel(Level.ERROR); } else if (opts.nextEq("--no-debug")) { Logger.getRootLogger().setLevel(Level.OFF); } else if (opts.nextEq("--no-logging")) { Logger.getRootLogger().setLevel(Level.OFF); } else if (opts.nextEq("--silence-elk")) { Logger.getLogger("org.semanticweb.elk").setLevel(Level.OFF); } else if (opts.nextEq("--monitor-memory")) { g.getConfig().isMonitorMemory = true; } else if (opts.nextEq("--list-classes")) { Set<OWLClass> clss = g.getSourceOntology().getClassesInSignature(); for (OWLClass c : clss) { System.out.println(c); } } else if (opts.nextEq("--object-to-label-table")) { Set<OWLObject> objs = g.getAllOWLObjects(); boolean useIds = false; while (opts.hasOpts()) { if (opts.nextEq("-i")) { useIds = true; } else { break; } } for (OWLObject c : objs) { if (c instanceof OWLNamedObject) { String label = g.getLabel(c); String id; if (useIds) { id = g.getIdentifier(c); } else { id = ((OWLNamedObject)c).getIRI().toString(); } System.out.println(id+"\t"+label); } } } else if (opts.nextEq("--write-all-subclass-relationships")) { for (OWLSubClassOfAxiom ax : g.getSourceOntology().getAxioms(AxiomType.SUBCLASS_OF)) { OWLClassExpression parent = ax.getSuperClass(); OWLClassExpression child = ax.getSubClass(); if (parent.isAnonymous() || child.isAnonymous()) continue; System.out.println(g.getIdentifier(child) + "\t" + g.getIdentifier(parent)); } } else if (opts.nextEq("--query-ontology")) { opts.info("[-m]", "specify an ontology that has classes to be used as queries. See also: --reasoner-query"); boolean isMerge = false; while (opts.hasOpts()) { if (opts.nextEq("-m")) isMerge = true; else opts.nextOpt(); } queryOntology = pw.parse(opts.nextOpt()); queryExpressionMap = new HashMap<OWLClass,OWLClassExpression>(); for (OWLClass qc : queryOntology.getClassesInSignature()) { for (OWLClassExpression ec : OwlHelper.getEquivalentClasses(qc, queryOntology)) { queryExpressionMap.put(qc, ec); } } if (isMerge) { g.mergeOntology(queryOntology); } } else if (opts.nextEq("--merge")) { opts.info("ONT", "merges ONT into current source ontology"); g.mergeOntology(pw.parse(opts.nextOpt())); } else if (opts.nextEq("--use-catalog") || opts.nextEq("--use-catalog-xml")) { opts.info("", "uses default catalog-v001.xml"); pw.addIRIMapper(new CatalogXmlIRIMapper("catalog-v001.xml")); } else if (opts.nextEq("--catalog-xml")) { opts.info("CATALOG-FILE", "uses the specified file as a catalog"); pw.addIRIMapper(new CatalogXmlIRIMapper(opts.nextOpt())); } else if (opts.nextEq("--map-ontology-iri")) { opts.info("OntologyIRI FILEPATH", "maps an ontology IRI to a file in your filesystem"); OWLOntologyIRIMapper iriMapper = new SimpleIRIMapper(IRI.create(opts.nextOpt()), IRI.create(new File(opts.nextOpt()))); LOG.info("Adding "+iriMapper+" to "+pw.getManager()); pw.addIRIMapper(iriMapper); } else if (opts.nextEq("--auto-ontology-iri")) { opts.info("[-r] ROOTDIR", "uses an AutoIRI mapper [EXPERIMENTAL]"); boolean isRecursive = false; while (opts.hasOpts()) { if (opts.nextEq("-r")) { isRecursive = true; } else { break; } } File file = new File(opts.nextOpt()); OWLOntologyIRIMapper iriMapper = new AutoIRIMapper(file, isRecursive); LOG.info("Adding "+iriMapper+" to "+pw.getManager()+" dir:"+file+" isRecursive="+isRecursive); pw.addIRIMapper(iriMapper); } else if (opts.nextEq("--remove-imports-declarations")) { Set<OWLImportsDeclaration> oids = g.getSourceOntology().getImportsDeclarations(); for (OWLImportsDeclaration oid : oids) { RemoveImport ri = new RemoveImport(g.getSourceOntology(), oid); g.getManager().applyChange(ri); } } else if (opts.nextEq("--remove-import-declaration")) { opts.info("IRI", "Removes a specific import"); String rmImport = opts.nextOpt(); Set<OWLImportsDeclaration> oids = g.getSourceOntology().getImportsDeclarations(); for (OWLImportsDeclaration oid : oids) { LOG.info("Testing "+oid.getIRI().toString()+" == "+rmImport); if (oid.getIRI().toString().equals(rmImport)) { RemoveImport ri = new RemoveImport(g.getSourceOntology(), oid); LOG.info(ri); g.getManager().applyChange(ri); } } } else if (opts.nextEq("--add-imports-declarations")) { opts.info("IRI-LIST", "Adds declaration for each ontology IRI"); List<String> importsIRIs = opts.nextList(); for (String importIRI : importsIRIs) { AddImport ai = new AddImport(g.getSourceOntology(), g.getDataFactory().getOWLImportsDeclaration(IRI.create(importIRI))); g.getManager().applyChange(ai); } } else if (opts.nextEq("--subtract")) { opts.info("ONTS", "subtract axioms in support ontology from main ontology"); boolean preserveDeclarations = false; boolean preserveAnnotations = false; while (opts.hasOpts()) { if (opts.nextEq("-d|--preserve-declarations")) { preserveDeclarations = true; } if (opts.nextEq("-a|--preserve-annotations")) { preserveAnnotations = true; } else { break; } } List<String> ontFiles = opts.nextList(); for (String ontFile: ontFiles) { g.addSupportOntology(pw.parse(ontFile)); } Set<OWLAxiom> rmAxioms = new HashSet<>(); int n=0; for (OWLOntology o : g.getSupportOntologySet()) { for (OWLAxiom a : o.getAxioms()) { if (preserveDeclarations && a instanceof OWLDeclarationAxiom) { continue; } if (preserveAnnotations && a.isAnnotationAxiom()) { continue; } rmAxioms.add(a); n++; } } LOG.info("Removing "+n+" axioms"); g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); } else if (opts.nextEq("--diff")) { opts.info("[--o1r FILE][--o2r FILE][--oi FILE][--od FILE][-u][-s][-f FMT]", "ontology difference or subtraction"); String o1r = null; String o2r = null; String oi = null; String od = null; OWLDocumentFormat ofmt = new RDFXMLDocumentFormat(); boolean isCompareClassesInCommon = false; boolean isCompareUnannotatedForm = false; boolean isAddSharedDeclarations = false; while (opts.hasOpts()) { if (opts.nextEq("--o1r")) { o1r = opts.nextOpt(); } else if (opts.nextEq("--o2r")) { o2r = opts.nextOpt(); } else if (opts.nextEq("--oi")) { oi = opts.nextOpt(); } else if (opts.nextEq("--od")) { od = opts.nextOpt(); } else if (opts.nextEq("-s")) { opts.info("", "shared: compare only classes in common to both"); isCompareClassesInCommon = true; } else if (opts.nextEq("-u")) { opts.info("", "compare unannotated forms: remove axiom annotations before comparison"); isCompareUnannotatedForm = true; } else if (opts.nextEq("-f")) { String fmt = opts.nextOpt(); if (fmt.equals("obo")) { ofmt = new OBODocumentFormat(); isAddSharedDeclarations = true; } else LOG.error("Cannot do: "+fmt); } else { break; } } if (g.getSupportOntologySet().size() != 1) { LOG.error("must be exactly 1 support ontology. E.g. owltools f1.owl f2.owl --dif"); } OWLOntology ont2 = g.getSupportOntologySet().iterator().next(); Diff diff = new Diff(); diff.ontology1 = g.getSourceOntology(); diff.ontology2 = ont2; diff.isAddSharedDeclarations = isAddSharedDeclarations; diff.isCompareClassesInCommon = isCompareClassesInCommon; diff.isCompareUnannotatedForm = isCompareUnannotatedForm; diff = DiffUtil.getDiff(diff); System.out.println(diff); final ParserWrapper pw = new ParserWrapper(); if (o1r != null) pw.saveOWL(diff.ontology1remaining , ofmt, o1r); if (o2r != null) pw.saveOWL(diff.ontology2remaining , ofmt, o2r); if (oi != null) pw.saveOWL(diff.ontology2remaining , ofmt, oi); if (od != null) pw.saveOWL(diff.ontologyDiff , ofmt, od); } else if (opts.nextEq("--set-ontology-id")) { opts.info("[-v VERSION-IRI][-a] IRI", "Sets the OWLOntologyID (i.e. IRI and versionIRI)"); IRI v = null; IRI iri = null; boolean isAnonymous = false; while (opts.hasOpts()) { if (opts.nextEq("-v|--version-iri")) { v = IRI.create(opts.nextOpt()); } else if (opts.nextEq("-a|--anonymous")) { opts.info("", "if specified, do not specify an IRI"); isAnonymous = true; } else { break; } } if (!isAnonymous) iri = IRI.create(opts.nextOpt()); OWLOntologyID oid = new OWLOntologyID(Optional.fromNullable(iri), Optional.fromNullable(v)); SetOntologyID soid; soid = new SetOntologyID(g.getSourceOntology(), oid); g.getManager().applyChange(soid); } else if (opts.nextEq("--add-ontology-annotation")) { opts.info("[-u] PROP VAL", "Sets an ontology annotation"); OWL2Datatype dt = OWL2Datatype.XSD_STRING; while (opts.hasOpts()) { if (opts.nextEq("-u")) { opts.info("", "Ase xsd:anyURI as datatype"); dt = OWL2Datatype.XSD_ANY_URI; } else break; } IRI p = IRI.create(opts.nextOpt()); OWLLiteral v = g.getDataFactory().getOWLLiteral(opts.nextOpt(), dt); OWLAnnotation ann = g.getDataFactory().getOWLAnnotation(g.getDataFactory().getOWLAnnotationProperty(p), v); AddOntologyAnnotation addAnn = new AddOntologyAnnotation(g.getSourceOntology(), ann); g.getManager().applyChange(addAnn); } else if (opts.nextEq("--create-ontology")) { IRI v = null; while (opts.hasOpts()) { if (opts.nextEq("-v|--version-iri")) { v = IRI.create(opts.nextOpt()); } else { break; } } String iri = opts.nextOpt(); if (!iri.startsWith("http:")) { iri = Obo2OWLConstants.DEFAULT_IRI_PREFIX+iri; } g = new OWLGraphWrapper(iri); if (v != null) { OWLOntologyID oid = new OWLOntologyID(Optional.of(IRI.create(iri)), Optional.of(v)); SetOntologyID soid; soid = new SetOntologyID(g.getSourceOntology(), oid); g.getManager().applyChange(soid); } } else if (opts.nextEq("--merge-import")) { opts.info("ONTOLOGY-IRI", "Ensure all axioms from the ontology are merged into the main ontology"); String iriString = opts.nextOpt(); g.mergeSpecificImport(IRI.create(iriString)); } else if (opts.nextEq("--merge-import-closure") || opts.nextEq("--merge-imports-closure")) { opts.info("[--ni]", "All axioms from ontologies in import closure are copied into main ontology"); boolean isRmImports = false; if (opts.nextEq("--ni")) { opts.info("", "removes imports declarations after merging"); isRmImports = true; } g.mergeImportClosure(isRmImports); } else if (opts.nextEq("--merge-support-ontologies")) { opts.info("[-l]", "This will merge the support ontologies from the OWLGraphWrapper into the main ontology. This is usually required while working with a reasoner."); LabelPolicy lp = LabelPolicy.ALLOW_DUPLICATES; while (opts.hasArgs()) { if (opts.nextEq("-l|--labels")) { opts.info("", "preserve source labels"); lp = LabelPolicy.PRESERVE_SOURCE; } else break; } for (OWLOntology ont : g.getSupportOntologySet()) g.mergeOntology(ont, lp); g.setSupportOntologySet(new HashSet<OWLOntology>()); } else if (opts.nextEq("--add-support-from-imports")) { opts.info("", "All ontologies in direct import are removed and added as support ontologies"); g.addSupportOntologiesFromImportsClosure(); } else if (opts.nextEq("--add-imports-from-support|--add-imports-from-supports")) { g.addImportsFromSupportOntologies(); } else if (opts.nextEq("-m") || opts.nextEq("--mcat")) { catOntologies(opts); } else if (opts.nextEq("--remove-entities-marked-imported")) { opts.info("","Removes all classes, individuals and object properties that are marked with IAO_0000412"); Mooncat m = new Mooncat(g); m.removeExternalEntities(); } else if (opts.nextEq("--remove-external-classes")) { opts.info("IDSPACE","Removes all classes not in the specified ID space"); boolean removeDangling = true; while (opts.hasOpts()) { if (opts.nextEq("-k|--keepDangling")) { removeDangling = false; } else { break; } } String idspace = opts.nextOpt(); Mooncat m = new Mooncat(g); m.removeClassesNotInIDSpace(idspace, removeDangling); } else if (opts.nextEq("--remove-dangling")) { Mooncat m = new Mooncat(g); m.removeDanglingAxioms(); } else if (opts.nextEq("--remove-uninstantiated-classes")) { opts.info("", "removes all classes for which the reasoner can infer no instances"); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); int n = 0; reasoner.flush(); for (OWLClass obj : g.getAllOWLClasses()) { if (reasoner.getInstances(obj, false).getFlattened().size() == 0) { LOG.info("Unused: "+obj); n++; rmAxioms.addAll(g.getSourceOntology().getReferencingAxioms(obj, Imports.INCLUDED)); } } LOG.info("Removing "+rmAxioms.size()+" referencing "+n+" unused classes"); g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); } else if (opts.nextEq("--make-subset-by-properties")) { opts.info("PROPERTY-LIST", "make an ontology subset that excludes axioms that use properties not in the specified set.\n"+ " Note the ontology should be relaxed e.g. X=A and R some B ==> X SubClassOf A" + " A property list is a space-separated list of object property OBO-IDs, shorthands, URIs, or labels.\n"+ " Example: my.owl --make-subset-by-properties BFO:0000050 'develops from' // -o my-slim.owl \n"+ " The special symbol 'ALL-PROPERTIES' selects all properties in the signature.\n"+ " The property list should be terminated by '//' (this is optional and a new command starting with '-' is sufficient to end the list)"); boolean isForceRemoveDangling = false; boolean isSuppressRemoveDangling = false; while (opts.hasOpts()) { if (opts.nextEq("-f|--force")) { isForceRemoveDangling = true; } else if (opts.nextEq("-n|--no-remove-dangling")) { isSuppressRemoveDangling = true; } else { break; } } Set<OWLObjectProperty> props = this.resolveObjectPropertyList(opts); Mooncat m = new Mooncat(g); int numDanglingAxioms = m.getDanglingAxioms(g.getSourceOntology()).size(); LOG.info("# Dangling axioms prior to filtering: "+numDanglingAxioms); if (numDanglingAxioms > 0) { if (!isForceRemoveDangling && !isSuppressRemoveDangling) { LOG.error("Will not proceed until dangling axioms removed, or -n or -f options are set"); throw new Exception("Dangling axioms will be lost"); } } m.retainAxiomsInPropertySubset(g.getSourceOntology(),props,reasoner); if (!isSuppressRemoveDangling) { LOG.info("# Dangling axioms post-filtering: " + m.getDanglingAxioms(g.getSourceOntology()).size()); m.removeDanglingAxioms(); } } else if (opts.nextEq("--list-class-axioms")) { OWLClass c = resolveClass(opts.nextOpt()); System.out.println("Class = "+c); OWLPrettyPrinter owlpp = getPrettyPrinter(); for (OWLClassAxiom ax : g.getSourceOntology().getAxioms(c, Imports.EXCLUDED)) { //System.out.println(ax); owlpp.print(ax); } } else if (opts.nextEq("--list-all-axioms")) { OWLPrettyPrinter owlpp = getPrettyPrinter(); for (OWLAxiom a : g.getSourceOntology().getAxioms()) { owlpp.print(a); } } else if (opts.nextEq("--make-species-subset")) { opts.info("-t TAXCLASS","Creates a taxon-specific ontology"); OWLObjectProperty viewProperty = null; OWLClass taxClass = null; String suffix = null; SpeciesSubsetterUtil smu = new SpeciesSubsetterUtil(g); while (opts.hasOpts()) { if (opts.nextEq("-t|--taxon")) { taxClass = this.resolveClass(opts.nextOpt()); } else if (opts.nextEq("-p|--property")) { viewProperty = this.resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-r|--root")) { smu.rootClass = this.resolveClass(opts.nextOpt()); } else break; } smu.viewProperty = viewProperty; smu.taxClass = taxClass; smu.reasoner = reasoner; smu.removeOtherSpecies(); } else if (opts.nextEq("--merge-species-ontology")) { opts.info("-t TAXCLASS [-s SUFFIX] [-p PROP] [-q PROPS]","Creates a composite/merged species ontology"); OWLObjectProperty viewProperty = g.getOWLObjectPropertyByIdentifier("BFO:0000050"); OWLClass taxClass = null; String suffix = null; Set<OWLObjectProperty> includeProps = null; while (opts.hasOpts()) { if (opts.nextEq("-t|--taxon")) { taxClass = this.resolveClass(opts.nextOpt()); } else if (opts.nextEq("-p|--property")) { viewProperty = this.resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-q|--include-property")) { opts.info("PROPS", "object props to include - ALL if unspecified"); includeProps = this.resolveObjectPropertyList(opts); LOG.info("|IP|"+includeProps.size()); LOG.info("IP"+includeProps); } else if (opts.nextEq("-s|--suffix")) { suffix = opts.nextOpt(); } else break; } SpeciesMergeUtil smu = new SpeciesMergeUtil(g); smu.viewProperty = viewProperty; smu.taxClass = taxClass; smu.reasoner = reasoner; smu.includedProperties = includeProps; if (suffix != null) smu.suffix = suffix; smu.merge(); } else if (opts.nextEq("--info")) { opts.info("","show ontology statistics"); for (OWLOntology ont : g.getAllOntologies()) { summarizeOntology(ont); } } else if (opts.nextEq("--reason-intra-ontology")) { opts.info("[-r reasoner] ONT", ""); String ont = null; while (opts.hasOpts()) { if (opts.nextEq("-r|--reasoner")) { opts.info("REASONER", "select reasoner."); reasonerName = opts.nextOpt(); } else { break; } } ont = opts.nextOpt(); OWLPrettyPrinter owlpp = getPrettyPrinter(); for (OWLClass c : g.getAllOWLClasses()) { if (g.getIdSpace(c).equals(ont)) { Set<OWLClassExpression> parents = g.getSourceOntology().getSubClassAxiomsForSubClass(c).stream().map( a -> a.getSuperClass()).collect(Collectors.toSet()); Set<OWLClass> infParents = reasoner.getSuperClasses(c, true).getFlattened(); for (OWLClass p : infParents) { if (g.getIdSpace(p).equals(ont)) { if (!parents.contains(p)) { System.out.println("INFERRED: "+owlpp(c)+" SubClassOf " + owlpp(p)); } } } } } } else if (opts.nextEq("--spike-and-reason")) { opts.info("[-r reasoner]", "spikes in support ontologies and determines new inferences"); while (opts.hasOpts()) { if (opts.nextEq("-r|--reasoner")) { opts.info("REASONER", "select reasoner."); reasonerName = opts.nextOpt(); } else { break; } } OWLPrettyPrinter owlpp = getPrettyPrinter(); Set<OWLClass> sourceOntologyClasses = g.getSourceOntology().getClassesInSignature(Imports.EXCLUDED); LOG.info("SOURCE ONTOLOGY CLASSES:" +sourceOntologyClasses.size()); // create ancestor lookup, pre-spiking reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); Map<OWLClass, Set<OWLClass>> assertedParentMap = new HashMap<>(); for (OWLClass c : sourceOntologyClasses) { assertedParentMap.put(c, reasoner.getSuperClasses(c, false).getFlattened()); } reasoner.dispose(); // spike in support ontologies for (OWLOntology ont : g.getSupportOntologySet()) { LOG.info("MERGING:" +ont); g.mergeOntology(ont); } g.setSupportOntologySet(new HashSet<OWLOntology>()); // perform inference on spiked ontology, determine difference reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); int n = 0; LOG.info("TESTING:" +sourceOntologyClasses.size()); for (OWLClass c : sourceOntologyClasses) { // all ancestors in spiked ontology Set<OWLClass> infParents = new HashSet<>(reasoner.getSuperClasses(c, false).getFlattened()); Set<OWLClass> infParentsDirect = new HashSet<>(reasoner.getSuperClasses(c, true).getFlattened()); // get those unique to spiked ontology infParents.removeAll(assertedParentMap.get(c)); for (OWLClass p : infParents) { // only report new inferences within source if (sourceOntologyClasses.contains(p)) { Set<OWLClass> pSubs = reasoner.getSubClasses(p, true).getFlattened(); boolean isRedundant = false; for (OWLClass p2 : pSubs) { if (sourceOntologyClasses.contains(p2) && infParents.contains(p2)) { isRedundant = true; break; } } if (!isRedundant) { String isDirect = infParentsDirect.contains(p) ? "PARENT" : "ANCESTOR"; System.out.println(owlpp.render(c)+ "\t" + isDirect + "\t" + owlpp.render(p)); n++; } } } } LOG.info("NEW SUPERCLASSES:"+n); } else if (opts.nextEq("--save-superclass-closure")) { opts.info("[-r reasoner] FILENAME", "write out superclass closure of graph."); GraphRenderer gcw; while (opts.hasOpts()) { if (opts.nextEq("-r|--reasoner")) { opts.info("REASONER", "select reasoner."); reasonerName = opts.nextOpt(); } else { break; } } String filename = opts.nextOpt(); List<String> lines = new ArrayList<String>(); for (OWLClass c : g.getAllOWLClasses()) { Set<OWLClass> ecs = reasoner.getEquivalentClasses(c).getEntities(); Set<OWLClass> scs = reasoner.getSuperClasses(c, false).getFlattened(); Set<OWLClass> all = new HashSet<OWLClass>(ecs); all.addAll(scs); List<String> ids = new ArrayList<String>(); for (OWLClass sc : all) { ids.add(g.getIdentifier(sc)); } Collections.sort(ids); lines.add(g.getIdentifier(c)+"\t"+StringUtils.join(ids.iterator(), ",")); } Collections.sort(lines); FileUtils.writeLines(new File(filename), lines); } else if (opts.nextEq("--save-closure")) { opts.info("[-c] FILENAME", "write out closure of graph."); GraphRenderer gcw; if (opts.nextEq("-c")) { opts.info("", "compact storage option."); gcw = new CompactGraphClosureRenderer(opts.nextOpt()); } else { gcw = new GraphClosureRenderer(opts.nextOpt()); } gcw.render(g); } else if (opts.nextEq("--read-closure")) { opts.info("FILENAME", "reads closure previously saved using --save-closure (compact format only)"); GraphReader gr = new CompactGraphClosureReader(g); gr.read(opts.nextOpt()); LOG.info("RESTORED CLOSURE CACHE"); LOG.info("size="+g.inferredEdgeBySource.size()); } else if (opts.nextEq("--save-closure-for-chado")) { opts.info("OUTPUTFILENAME", "saves the graph closure in a format that is oriented towards loading into a Chado database"); boolean isChain = opts.nextEq("--chain"); ChadoGraphClosureRenderer gcw = new ChadoGraphClosureRenderer(opts.nextOpt()); gcw.isChain = isChain; gcw.render(g); } else if (opts.nextEq("--export-table")) { opts.info("[-c] OUTPUTFILENAME", "saves the ontology in tabular format (PARTIALLY IMPLEMENTED)"); boolean isWriteHeader = false; while (opts.hasOpts()) { if (opts.nextEq("-c")) opts.info("", "write column headers"); isWriteHeader = true; } String out = opts.nextOpt(); TableRenderer tr = new TableRenderer(out); tr.isWriteHeader = isWriteHeader; tr.render(g); } else if (opts.nextEq("--export-edge-table")) { opts.info("OUTPUTFILENAME", "saves the ontology edges in tabular format"); String out = opts.nextOpt(); EdgeTableRenderer tr = new EdgeTableRenderer(out); tr.render(g); } else if (opts.nextEq("--materialize-gcis")) { opts.info("[-m]", "infers axioms using GCIUtil"); boolean isMerge = false; while (opts.hasOpts()) { if (opts.nextEq("-m|--merge")) { isMerge = true; } else { break; } } if (reasoner == null) { System.err.println("REASONER NOT INITIALIZED!"); } OWLDataFactory df = g.getDataFactory(); Set<OWLSubClassOfAxiom> axioms = GCIUtil.getSubClassOfSomeValuesFromAxioms(g.getSourceOntology(), reasoner); if (!isMerge) { g.setSourceOntology(g.getManager().createOntology()); } g.getManager().addAxioms(g.getSourceOntology(), axioms); } else if (opts.nextEq("--assert-inferred-svfs")) { opts.info("[-p LIST] [-m] [-gp PROPERTY] [-gf FILLER]", "asserts inferred parents by property using ExtendedReasoner"); String out = null; boolean isMerge = false; List<OWLObjectProperty> props = null; OWLObjectProperty gciProperty = null; List<OWLClass> gciFillers = null; while (opts.hasOpts()) { if (opts.nextEq("-p|--plist")) { props = this.resolveObjectPropertyListAsList(opts); } //else if (opts.nextEq("-o|--output")) { // out = opts.nextOpt(); //} else if (opts.nextEq("-gp|--gci-property")) { gciProperty = this.resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-gf|--gci-fillers")) { gciFillers = resolveClassList(opts); } else if (opts.nextEq("-m|--merge")) { opts.info("", "adds inferred axioms to current ontology (default is new"); isMerge = true; } else { break; } } if (reasoner == null) { System.err.println("REASONER NOT INITIALIZED!"); } if (!(reasoner instanceof OWLExtendedReasoner)) { System.err.println("REASONER NOT AN EXTENDED REASONER. Recommended: add '--reasoner mexr' prior to this command"); } OWLExtendedReasoner emr = (OWLExtendedReasoner) reasoner; OWLDataFactory df = g.getDataFactory(); Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); for (OWLClass c : g.getAllOWLClasses()) { for (OWLObjectProperty p : props) { for (OWLClass parent : emr.getSuperClassesOver(c, p, true)) { axioms.add(df.getOWLSubClassOfAxiom(c, df.getOWLObjectSomeValuesFrom(p, parent))); } } } if (!isMerge) { g.setSourceOntology(g.getManager().createOntology()); } g.getManager().addAxioms(g.getSourceOntology(), axioms); } else if (opts.nextEq("--export-parents")) { opts.info("[-p LIST] [-o OUTPUTFILENAME]", "saves a table of all direct inferred parents by property using ExtendedReasoner"); String out = null; List<OWLObjectProperty> props = null; OWLObjectProperty gciProperty = null; List<OWLClass> gciFillers = null; while (opts.hasOpts()) { if (opts.nextEq("-p|--plist")) { props = this.resolveObjectPropertyListAsList(opts); } else if (opts.nextEq("-o|--output")) { out = opts.nextOpt(); } else if (opts.nextEq("-gp|--gci-property")) { gciProperty = this.resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-gf|--gci-fillers")) { gciFillers = resolveClassList(opts); } else { break; } } InferredParentRenderer tr = new InferredParentRenderer(out); tr.setProperties(props); if (gciFillers != null) tr.setGciFillers(gciFillers); if (gciProperty != null) tr.setGciProperty(gciProperty); tr.setReasoner((OWLExtendedReasoner) reasoner); tr.render(g); } else if (opts.nextEq("--remove-annotation-assertions")) { opts.info("[-l][-d][-s][-r][-p IRI]*", "removes annotation assertions to make a pure logic subset. Select axioms can be preserved"); boolean isPreserveLabels = false; boolean isPreserveDefinitions = false; boolean isPreserveSynonyms = false; boolean isPreserveRelations = false; boolean isPreserveDeprecations = true; Set<IRI> preserveAnnotationPropertyIRIs = new HashSet<IRI>(); while (opts.hasOpts()) { if (opts.nextEq("-l|--preserve-labels")) { opts.info("", "if specified, all rdfs labels are preserved"); isPreserveLabels = true; } else if (opts.nextEq("-d|--preserve-definitions")) { opts.info("", "if specified, all obo text defs are preserved"); isPreserveDefinitions = true; } else if (opts.nextEq("-s|--preserve-synonyms")) { opts.info("", "if specified, all obo-style synonyms are preserved"); isPreserveSynonyms = true; } else if (opts.nextEq("--remove-deprecation-axioms")) { opts.info("", "if specified, all owl:deprecated in NOT preserved"); isPreserveDeprecations = true; } else if (opts.nextEq("-r|--preserve-relations")) { opts.info("", "unless specified, all axioms about properties are removed"); isPreserveRelations = true; } else if (opts.nextEq("-p|--preserve-property")) { opts.info("IRI", "if specified, all properties with IRI are preserved. Can be specified multiple times"); preserveAnnotationPropertyIRIs.add(IRI.create(opts.nextOpt())); } else break; } for (OWLOntology o : g.getAllOntologies()) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); Set<OWLAxiom> keepAxioms = new HashSet<OWLAxiom>(); Set<OWLAnnotationProperty> propsToKeep = new HashSet<OWLAnnotationProperty>(); rmAxioms.addAll( o.getAxioms(AxiomType.ANNOTATION_ASSERTION) ); for (OWLAnnotationAssertionAxiom aaa : o.getAxioms(AxiomType.ANNOTATION_ASSERTION)) { IRI piri = aaa.getProperty().getIRI(); if (isPreserveLabels) { if (aaa.getProperty().isLabel()) { keepAxioms.add(aaa); } } if (isPreserveDeprecations) { if (aaa.getProperty().isDeprecated()) { keepAxioms.add(aaa); } } if (isPreserveDefinitions) { if (piri.equals(Obo2OWLVocabulary.IRI_IAO_0000115.getIRI())) { keepAxioms.add(aaa); } } if (isPreserveSynonyms) { if (piri.equals(Obo2OWLVocabulary.IRI_OIO_hasBroadSynonym.getIRI()) || piri.equals(Obo2OWLVocabulary.IRI_OIO_hasExactSynonym.getIRI()) || piri.equals(Obo2OWLVocabulary.IRI_OIO_hasRelatedSynonym.getIRI()) || piri.equals(Obo2OWLVocabulary.IRI_OIO_hasNarrowSynonym.getIRI())) { keepAxioms.add(aaa); } } if (preserveAnnotationPropertyIRIs.contains(piri)) keepAxioms.add(aaa); // remove non-classes if (!isPreserveRelations) { if (aaa.getSubject() instanceof IRI) { OWLClass c = g.getDataFactory().getOWLClass((IRI) aaa.getSubject()); if (o.getDeclarationAxioms(c).size() == 0) { keepAxioms.remove(aaa); } } } if (keepAxioms.contains(aaa)) { propsToKeep.add(aaa.getProperty()); } } LOG.info("Number of annotation assertion axioms:"+rmAxioms.size()); LOG.info("Axioms to preserve:"+keepAxioms.size()); rmAxioms.removeAll(keepAxioms); LOG.info("Number of annotation assertion axioms being removed:"+rmAxioms.size()); if (!isPreserveRelations) { for (OWLAnnotationProperty p : o.getAnnotationPropertiesInSignature()) { if (propsToKeep.contains(p)) continue; rmAxioms.addAll(o.getAnnotationAssertionAxioms(p.getIRI())); rmAxioms.add(g.getDataFactory().getOWLDeclarationAxiom(p)); } LOG.info("Total number of axioms being removed, including annotation properties:"+rmAxioms.size()); } g.getManager().removeAxioms(o, rmAxioms); // TODO - remove axiom annotations } } else if (opts.nextEq("--apply-patch")) { opts.info("minusAxiomsOntology plusAxiomsOntology", "applies 'patch' to current ontology"); OWLOntology ontMinus = pw.parse(opts.nextOpt()); OWLOntology ontPlus = pw.parse(opts.nextOpt()); OWLOntology src = g.getSourceOntology(); Set<OWLAxiom> rmAxioms = ontMinus.getAxioms(); Set<OWLAxiom> addAxioms = ontPlus.getAxioms(); int numPre = src.getAxiomCount(); LOG.info("Removing "+rmAxioms.size()+" axioms from src, current axiom count="+numPre); g.getManager().removeAxioms(src, rmAxioms); int numPost = src.getAxiomCount(); LOG.info("Removed axioms from src, new axiom count="+numPost); if (numPre-numPost != rmAxioms.size()) { LOG.error("Some axioms not found!"); } LOG.info("Adding "+addAxioms.size()+" axioms to src, current axiom count="+numPost); g.getManager().addAxioms(src, addAxioms); LOG.info("Added "+addAxioms.size()+" axioms to src, new count="+src.getAxiomCount()); if (src.getAxiomCount() - numPost != addAxioms.size()) { LOG.error("Some axioms already there!"); } } else if (opts.nextEq("--translate-xrefs-to-equivs")) { opts.info("[-m PREFIX URI]* [-p PREFIX] [-a] [-n]", "Translates the OBO xref property (or alt_id property, if -a set) into equivalence axioms"); Map<String,String> prefixMap = new HashMap<String,String>(); Set<String> prefixes = new HashSet<String>(); boolean isNew = false; boolean isUseAltIds = false; while (opts.hasOpts()) { if (opts.nextEq("-m")) { opts.info("PREFIX URI", "maps prefixs/DBs to URI prefixes"); prefixMap.put(opts.nextOpt(), opts.nextOpt()); } else if (opts.nextEq("-p")) { opts.info("PREFIX", "prefix to filter on"); prefixes.add(opts.nextOpt()); } else if (opts.nextEq("-a")) { opts.info("", "if true, use obo alt_ids"); isUseAltIds = true; } else if (opts.nextEq("-n")) { opts.info("", "if set, will generate a new ontology containing only equiv axioms"); isNew = true; } else { break; } } Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); for (OWLClass c : g.getAllOWLClasses()) { List<String> xrefs = g.getXref(c); if (isUseAltIds) { xrefs = g.getAltIds(c); LOG.info("Class "+c+" altIds: "+xrefs.size()); } for (String x : xrefs) { IRI iri = null; if (x.contains(" ")) { LOG.warn("Ignore xref with space: "+x); continue; } if (x.contains(":")) { String[] parts = x.split(":",2); if (prefixes.size() > 0 && !prefixes.contains(parts[0])) { continue; } if (prefixMap.containsKey(parts[0])) { iri = IRI.create(prefixMap.get(parts[0]) + parts[1]); } } if (iri == null) { iri = g.getIRIByIdentifier(x); } axioms.add(g.getDataFactory().getOWLEquivalentClassesAxiom(c, g.getDataFactory().getOWLClass(iri))); } } if (isNew) { g.setSourceOntology(g.getManager().createOntology()); } g.getManager().addAxioms(g.getSourceOntology(), axioms); } else if (opts.nextEq("--repair-relations")) { opts.info("", "replaces un-xrefed relations with correct IRIs"); OWLEntityRenamer oer = new OWLEntityRenamer(g.getManager(), g.getAllOntologies()); List<OWLOntologyChange> changes = new ArrayList<OWLOntologyChange> (); for (OWLObjectProperty p : g.getSourceOntology().getObjectPropertiesInSignature()) { IRI piri = p.getIRI(); if (piri.getFragment().equals("part_of")) { List<OWLOntologyChange> ch = oer.changeIRI(piri, g.getIRIByIdentifier("BFO:0000050")); changes.addAll(ch); } if (piri.getFragment().equals("has_part")) { List<OWLOntologyChange> ch = oer.changeIRI(piri, g.getIRIByIdentifier("BFO:0000051")); changes.addAll(ch); } } LOG.info("Repairs: "+changes.size()); g.getManager().applyChanges(changes); OboInOwlCardinalityTools.checkAnnotationCardinality(g.getSourceOntology()); } else if (opts.nextEq("--rename-entity")) { opts.info("OLD-IRI NEW-IRI", "used OWLEntityRenamer to switch IDs/IRIs"); OWLEntityRenamer oer = new OWLEntityRenamer(g.getManager(), g.getAllOntologies()); List<OWLOntologyChange> changes = oer.changeIRI(IRI.create(opts.nextOpt()),IRI.create(opts.nextOpt())); g.getManager().applyChanges(changes); } else if (opts.nextEq("--merge-equivalence-sets")) { opts.info("[-s PREFIX SCORE]* [-l PREFIX SCORE]* [-c PREFIX SCORE]* [-d PREFIX SCORE]* [-x]", "merges sets of equivalent classes. Prefix-based priorities used to determine representative member"); EquivalenceSetMergeUtil esmu = new EquivalenceSetMergeUtil(g, reasoner); while (opts.hasOpts()) { if (opts.nextEq("-s")) { opts.info("PREFIX SCORE", "Assigns a priority score for a prefix used to determine REPRESENTATIVE IRI for merging. E.g. -s HP 5 -s MP 4"); esmu.setPrefixScore( opts.nextOpt(), Double.parseDouble(opts.nextOpt()) ); } else if (opts.nextEq("-l")) { opts.info("PREFIX SCORE", "Assigns a priority score to determine which LABEL should be used post-merge. E.g. -s HP 5 -s MP 4 means HP prefered"); OWLAnnotationProperty p = g.getDataFactory().getOWLAnnotationProperty( OWLRDFVocabulary.RDFS_LABEL.getIRI() ); esmu.setPropertyPrefixScore( p, opts.nextOpt(), Double.parseDouble(opts.nextOpt()) ); } else if (opts.nextEq("-c")) { opts.info("PREFIX SCORE", "Assigns a priority score to determine which COMMENT should be used post-merge. E.g. -s HP 5 -s MP 4 means HP prefered"); OWLAnnotationProperty p = g.getDataFactory().getOWLAnnotationProperty( OWLRDFVocabulary.RDFS_COMMENT.getIRI() ); esmu.setPropertyPrefixScore( p, opts.nextOpt(), Double.parseDouble(opts.nextOpt()) ); } else if (opts.nextEq("-d")) { opts.info("PREFIX SCORE", "Assigns a priority score to determine which DEFINITION should be used post-merge. E.g. -s HP 5 -s MP 4"); OWLAnnotationProperty p = g.getDataFactory().getOWLAnnotationProperty( Obo2OWLVocabulary.IRI_IAO_0000115.getIRI() ); esmu.setPropertyPrefixScore( p, opts.nextOpt(), Double.parseDouble(opts.nextOpt()) ); } else if (opts.nextEq("-x")) { opts.info("", "Removes xrefs between named classes"); esmu.setRemoveAxiomatizedXRefs(true); } else { break; } } esmu.merge(); } else if (opts.nextEq("--merge-equivalent-classes")) { opts.info("[-f FROM-URI-PREFIX]* [-t TO-URI-PREFIX] [-a] [-sa]", "merges equivalent classes, from source(s) to target ontology"); List<String> prefixFroms = new Vector<String>(); String prefixTo = null; boolean isKeepAllAnnotations = false; boolean isPrioritizeAnnotationsFromSource = false; while (opts.hasOpts()) { if (opts.nextEq("-f")) { opts.info("", "a URI or OBO prefix for the source entities. This may be listed multiple times"); String pfx = opts.nextOpt(); if (!pfx.startsWith("http")) pfx = "http://purl.obolibrary.org/obo/"+pfx+"_"; prefixFroms.add(pfx); } else if (opts.nextEq("-t")) { opts.info("", "a URI or OBO prefix for the target entities. This must be listed once"); prefixTo = opts.nextOpt(); if (!prefixTo.startsWith("http")) prefixTo = "http://purl.obolibrary.org/obo/"+prefixTo+"_"; } else if (opts.nextEq("-a|--keep-all-annotations")) { opts.info("", "if set, all annotations are preserved. Resulting ontology may have duplicate labels and definitions"); isKeepAllAnnotations = true; } else if (opts.nextEq("-sa|--prioritize-annotations-from-source")) { opts.info("", "if set, then when collapsing label and def annotations, use the source annotation over the target"); isPrioritizeAnnotationsFromSource = true; } else break; } Map<OWLEntity,IRI> e2iri = new HashMap<OWLEntity,IRI>(); LOG.info("building entity2IRI map...: " + prefixFroms + " --> "+prefixTo); OWLEntityRenamer oer = new OWLEntityRenamer(g.getManager(), g.getAllOntologies()); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); // we only map classes in the source ontology - however, // we use equivalence axioms from the full complement of ontologies // TODO - allow arbitrary entities Map<Integer,Integer> binSizeMap = new HashMap<Integer,Integer>(); for (OWLClass e : g.getSourceOntology().getClassesInSignature()) { //LOG.info(" testing "+c+" ECAs: "+g.getSourceOntology().getEquivalentClassesAxioms(c)); // TODO - may be more efficient to invert order of testing String iriStr = e.getIRI().toString(); boolean isMatch = false; for (String prefixFrom : prefixFroms) { if (iriStr.startsWith(prefixFrom)) { isMatch = true; break; } } if (prefixFroms.size()==0) isMatch = true; if (isMatch) { Set<OWLClass> ecs = new HashSet<OWLClass>(); if (reasoner != null) { ecs = reasoner.getEquivalentClasses(e).getEntities(); } else { // we also scan support ontologies for equivalencies for (OWLOntology ont : g.getAllOntologies()) { // c is the same of e.. why do this? OWLClass c = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(e.getIRI()); for (OWLEquivalentClassesAxiom eca : ont.getEquivalentClassesAxioms(c)) { ecs.addAll(eca.getNamedClasses()); } } } int size = ecs.size(); if (binSizeMap.containsKey(size)) { binSizeMap.put(size, binSizeMap.get(size) +1); } else { binSizeMap.put(size, 1); } for (OWLClass d : ecs) { if (d.equals(e)) continue; if (prefixTo == null || d.getIRI().toString().startsWith(prefixTo)) { // add to mapping. Renaming will happen later e2iri.put(e, d.getIRI()); // TODO one-to-many // annotation collapsing. In OBO, max cardinality of label, comment and definition is 1 // note that this not guaranteed to work if multiple terms are being merged in if (!isKeepAllAnnotations) { OWLClass mainObj = d; OWLClass secondaryObj = e; if (isPrioritizeAnnotationsFromSource) { mainObj = e; secondaryObj = d; } // ensure OBO cardinality of properties is preserved for (OWLAnnotationAssertionAxiom aaa : g.getSourceOntology().getAnnotationAssertionAxioms(secondaryObj.getIRI())) { if (aaa.getProperty().isLabel()) { if (g.getLabel(mainObj) != null) { rmAxioms.add(aaa); // todo - option to translate to synonym } } if (aaa.getProperty().getIRI().equals(Obo2OWLVocabulary.IRI_IAO_0000115.getIRI())) { if (g.getDef(mainObj) != null) { rmAxioms.add(aaa); } } if (aaa.getProperty().isComment()) { if (g.getComment(mainObj) != null) { rmAxioms.add(aaa); } } } } } } } } for (Integer k : binSizeMap.keySet()) { LOG.info(" | Bin( "+k+" classes ) | = "+binSizeMap.get(k)); } g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); LOG.info("Mapping "+e2iri.size()+" entities"); // TODO - this is slow List<OWLOntologyChange> changes = oer.changeIRI(e2iri); g.getManager().applyChanges(changes); LOG.info("Mapped "+e2iri.size()+" entities!"); } else if (opts.nextEq("--rename-entities-via-equivalent-classes")) { opts.info("[-p PREFIX]", "renames entities in source ontology, using equiv axioms from all"); String prefix = null; String prefixTo = null; while (opts.hasOpts()) { if (opts.nextEq("-p|--prefix")) { opts.info("", "prefix to map from (FULL URI)"); prefix = opts.nextOpt(); } else if (opts.nextEq("-q|--prefix-to")) { opts.info("", "prefix to map to (FULL URI)"); prefixTo = opts.nextOpt(); } else break; } Map<OWLEntity,IRI> e2iri = new HashMap<OWLEntity,IRI>(); OWLEntityRenamer oer = new OWLEntityRenamer(g.getManager(), g.getAllOntologies()); Set<IRI> entities = new HashSet<IRI>(); for (OWLClass c : g.getSourceOntology().getClassesInSignature()) { entities.add(c.getIRI()); } for (OWLAnnotationAssertionAxiom aaa : g.getSourceOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION)) { if (aaa.getSubject() instanceof IRI) { entities.add((IRI) aaa.getSubject()); } } // we only map classes in the source ontology - however, // we use equivalence axioms from the full complement of ontologies // TODO - allow arbitrary entities for (IRI e : entities) { for (OWLOntology ont : g.getAllOntologies()) { OWLClass c = ont.getOWLOntologyManager().getOWLDataFactory().getOWLClass(e); if (prefix != null && !c.getIRI().toString().startsWith(prefix)) { continue; } for (OWLClassExpression d : OwlHelper.getEquivalentClasses(c, ont)) { if (d instanceof OWLClass) { if (prefixTo != null && !d.asOWLClass().getIRI().toString().startsWith(prefixTo)) { continue; } e2iri.put(c, d.asOWLClass().getIRI()); LOG.info(" "+c+" ==> "+d ); } } } } LOG.info("Mapping "+e2iri.size()+" entities"); // TODO - this is slow List<OWLOntologyChange> changes = oer.changeIRI(e2iri); g.getManager().applyChanges(changes); LOG.info("Mapped "+e2iri.size()+" entities!"); } else if (opts.nextEq("--query-cw")) { opts.info("", "closed-world query"); OWLPrettyPrinter owlpp = getPrettyPrinter(); for (OWLClass qc : queryExpressionMap.keySet()) { System.out.println(" CWQueryClass: "+qc); System.out.println(" CWQueryClass: "+owlpp.render(qc)+" "+qc.getIRI().toString()); OWLClassExpression ec = queryExpressionMap.get(qc); System.out.println(" CWQueryExpression: "+owlpp.render(ec)); Set<OWLObject> results = g.queryDescendants(ec); for (OWLObject result : results) { if (result instanceof OWLClass) { System.out.println(" "+owlpp.render((OWLClass)result)); } } } } else if (opts.nextEq("--extract-ontology-metadata")) { opts.info("[-c ONT-IRI]", "extracts annotations from ontology"); String mdoIRI = "http://x.org"; while (opts.hasOpts()) { if (opts.nextEq("-c")) { mdoIRI = opts.nextOpt(); } else break; } OWLOntology mdo = ImportChainExtractor.extractOntologyMetadata(g, mdoIRI); g.setSourceOntology(mdo); } else if (opts.nextEq("--write-imports-dot")) { opts.info("OUT", "writes imports chain as dot file"); String output = opts.nextOpt(); ImportChainDotWriter writer = new ImportChainDotWriter(g); writer.renderDot(g.getSourceOntology(), g.getOntologyId(), output, true); } else if (opts.nextEq("--ontology-metadata-to-markdown")) { opts.info("OUT", "writes md from ontology metadata"); String output = opts.nextOpt(); BufferedWriter fileWriter = new BufferedWriter(new FileWriter(new File(output))); String s = OntologyMetadataMarkdownWriter.renderMarkdown(g, ".", true); fileWriter.write(s); fileWriter.close(); } else if (opts.nextEq("--ontology-to-markdown")) { opts.info("[-l LEVEL] DIR", "writes md from ontology"); int level = 2; while (opts.hasOpts()) { if (opts.nextEq("-l|--level")) { level = Integer.parseInt(opts.nextOpt()); } else break; } String dir = opts.nextOpt(); MarkdownRenderer mr = new MarkdownRenderer(); mr.setChunkLevel(level); mr.render(g.getSourceOntology(), dir); } else if (opts.nextEq("--add-obo-shorthand-to-properties")) { Set<OWLObjectProperty> props = g.getSourceOntology().getObjectPropertiesInSignature(Imports.INCLUDED); OWLDataFactory df = g.getDataFactory(); Set<OWLAxiom> addAxioms = new HashSet<OWLAxiom>(); Set<OWLAxiom> removeAxioms = new HashSet<OWLAxiom>(); final String MODE_MISSING = "missing"; // add missing axioms final String MODE_REPLACE = "replace"; // replace all axioms final String MODE_ADD = "add"; // old mode, which is very broken String mode = MODE_MISSING; // safe default, only add missing axioms while (opts.hasOpts()) { if (opts.nextEq("-m|--add-missing")) { mode = MODE_MISSING; } else if (opts.nextEq("-r|--replace-all")) { mode = MODE_REPLACE; } else if (opts.nextEq("--always-add")) { // this models the old broken behavior, generally not recommended mode = MODE_ADD; } else { break; } } if (MODE_ADD.equals(mode)) { LOG.warn("Using the always add mode is not recommended. Make an explicit choice by either add missing '-m' or replace all '-r' shorthand information."); } final OWLAnnotationProperty shorthandProperty = df.getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#shorthand")); final OWLAnnotationProperty xrefProperty = df.getOWLAnnotationProperty(IRI.create("http://www.geneontology.org/formats/oboInOwl#hasDbXref")); for (OWLObjectProperty prop : props) { if (prop.isBuiltIn()) { continue; } IRI entity = prop.getIRI(); // retrieve existing Set<OWLAnnotationAssertionAxiom> annotationAxioms = g.getSourceOntology().getAnnotationAssertionAxioms(entity); Set<OWLAnnotationAssertionAxiom> shorthandAxioms = new HashSet<OWLAnnotationAssertionAxiom>(); Set<OWLAnnotationAssertionAxiom> xrefAxioms = new HashSet<OWLAnnotationAssertionAxiom>(); for (OWLAnnotationAssertionAxiom axiom : annotationAxioms) { OWLAnnotationProperty property = axiom.getProperty(); if (shorthandProperty.equals(property)) { shorthandAxioms.add(axiom); } else if (xrefProperty.equals(property)) { xrefAxioms.add(axiom); } } // check what needs to be added boolean addShortHand = false; boolean addXref = false; if (MODE_REPLACE.equals(mode)) { // replace existing axioms removeAxioms.addAll(shorthandAxioms); removeAxioms.addAll(xrefAxioms); addShortHand = true; addXref = true; } else if (MODE_MISSING.equals(mode)) { // add missing axioms addShortHand = shorthandAxioms.isEmpty(); addXref = xrefAxioms.isEmpty(); } else if (MODE_ADD.equals(mode)) { // old broken mode: regardless of current axioms always add axioms addShortHand = true; addXref = true; } // create required axioms if (addShortHand) { // shorthand String id = g.getLabel(prop); if (id != null) { id = id.replaceAll(" ", "_"); OWLAxiom ax = df.getOWLAnnotationAssertionAxiom( shorthandProperty, prop.getIRI(), df.getOWLLiteral(id)); addAxioms.add(ax); LOG.info(ax); } else { LOG.error("No label: "+prop); } } if (addXref) { // xref to OBO style ID String pid = Owl2Obo.getIdentifier(prop.getIRI()); OWLAxiom ax = df.getOWLAnnotationAssertionAxiom( xrefProperty, prop.getIRI(), df.getOWLLiteral(pid)); addAxioms.add(ax); LOG.info(ax); } } // update axioms if (removeAxioms.isEmpty() == false) { LOG.info("Total axioms removed: "+removeAxioms.size()); g.getManager().addAxioms(g.getSourceOntology(), removeAxioms); } if (addAxioms.isEmpty() == false) { LOG.info("Total axioms added: "+addAxioms.size()); g.getManager().addAxioms(g.getSourceOntology(), addAxioms); } } else if (opts.nextEq("--extract-properties")) { LOG.warn("Deprecated - use --extract-module"); opts.info("[-p PROP]* [--list PLIST] [--no-shorthand]", "extracts properties from source ontology. If properties not specified, then support ontologies will be used"); Set<OWLProperty> props = new HashSet<OWLProperty>(); boolean useProps = false; boolean isCreateShorthand = true; boolean isExpansive = false; boolean isUseSubProperties = false; UUID uuid = UUID.randomUUID(); IRI newIRI = IRI.create("http://purl.obolibrary.org/obo/temporary/"+uuid.toString()); while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("PROP", "Add this property to the set of interest"); props.add(this.resolveObjectProperty(opts.nextOpt())); useProps = true; } else if (opts.nextEq("-s|--subproperties")) { opts.info("", "If set, subproperties are used"); isUseSubProperties = true; } else if (opts.nextEq("--list")) { opts.info("PROPLIST", "Terminated by '//'. Add these properties to the set of interest. ALL-POPERTIES for all"); Set<OWLObjectProperty> nprops = this.resolveObjectPropertyList(opts); props.addAll(nprops); useProps = true; } else if (opts.nextEq("--no-shorthand")) { opts.info("", "Do not create OBO shorthands. Resulting OBO format will use numeric IDs as primary"); isCreateShorthand = false; } else { break; } } PropertyExtractor pe; pe = new PropertyExtractor(g.getSourceOntology()); pe.isCreateShorthand = isCreateShorthand; pe.isUseSubProperties = isUseSubProperties; OWLOntology pont; if (useProps) { // use user-specified proeprty list pont = pe.extractPropertyOntology(newIRI, props); } else { // use the support ontology as the source of property usages pont = pe.extractPropertyOntology(newIRI, g.getSupportOntologySet().iterator().next()); } g.setSourceOntology(pont); } else if (opts.nextEq("--extract-mingraph")) { opts.info("", "Extracts a minimal graph ontology containing only label, subclass and equivalence axioms"); String idspace = null; boolean isPreserveOntologyAnnotations = false; while (opts.hasOpts()) { if (opts.nextEq("--idspace")) { opts.info("IDSPACE", "E.g. GO. If set, only the reflexive closure of this ontology will be included"); idspace = opts.nextOpt(); } else if (opts.nextEq("-a|--preserve-ontology-annotations")) { opts.info("", "Set if ontology header is to be preserved"); isPreserveOntologyAnnotations = true; } else { break; } } Set <OWLClass> seedClasses = new HashSet<OWLClass>(); OWLOntology src = g.getSourceOntology(); Set<OWLAxiom> axioms = new HashSet<OWLAxiom>(); Set<OWLAnnotation> anns = new HashSet<OWLAnnotation>(); if (isPreserveOntologyAnnotations) { anns = src.getAnnotations(); } axioms.addAll(src.getAxioms(AxiomType.SUBCLASS_OF)); axioms.addAll(src.getAxioms(AxiomType.EQUIVALENT_CLASSES)); for (OWLAnnotationAssertionAxiom aaa : src.getAxioms(AxiomType.ANNOTATION_ASSERTION)) { if (aaa.getProperty().isLabel()) { axioms.add(aaa); //LOG.info("LABEL:"+aaa); } } removeAxiomsReferencingDeprecatedClasses(axioms); LOG.info("#axioms: "+axioms.size()); for (OWLClass c : src.getClassesInSignature()) { String id = g.getIdentifier(c); if (idspace == null || id.startsWith(idspace+":")) { boolean isDep = false; for (OWLAnnotation ann : OwlHelper.getAnnotations(c, src)) { if (ann.isDeprecatedIRIAnnotation()) { isDep = true; break; } } if (!isDep) { seedClasses.add(c); } } } LOG.info("#classes: "+seedClasses.size()); g.addSupportOntology(src); OWLOntology newOnt = src.getOWLOntologyManager().createOntology(axioms); Set<OWLClass> retainedClasses = removeUnreachableAxioms(newOnt, seedClasses); for (OWLClass c : retainedClasses) { newOnt.getOWLOntologyManager().addAxiom(newOnt, g.getDataFactory().getOWLDeclarationAxiom(c)); } PropertyExtractor pe; pe = new PropertyExtractor(src); pe.isCreateShorthand = true; OWLOntology pont; HashSet<OWLProperty> props = new HashSet<OWLProperty>(); for (OWLObjectProperty p : newOnt.getObjectPropertiesInSignature()) { props.add(p); } pont = pe.extractPropertyOntology(null, props); axioms = new HashSet<OWLAxiom>(); for (OWLAxiom axiom : pont.getAxioms()) { if (axiom instanceof OWLObjectPropertyCharacteristicAxiom) { axioms.add(axiom); } else if (axiom instanceof OWLSubObjectPropertyOfAxiom) { axioms.add(axiom); } else if (axiom instanceof OWLSubPropertyChainOfAxiom) { axioms.add(axiom); } else if (axiom instanceof OWLAnnotationAssertionAxiom) { OWLAnnotationAssertionAxiom aaa = (OWLAnnotationAssertionAxiom) axiom; if (aaa.getProperty().isLabel()) { axioms.add(axiom); } else if (aaa.getProperty().getIRI().toString().toLowerCase().contains("shorthand")) { // TODO: fix hack axioms.add(axiom); } else if (aaa.getProperty().getIRI().toString().toLowerCase().contains("xref")) { // TODO: fix hack axioms.add(axiom); } } else if (axiom instanceof OWLDeclarationAxiom) { axioms.add(axiom); } } newOnt.getOWLOntologyManager().addAxioms(newOnt, axioms); g.setSourceOntology(newOnt); for (OWLAnnotation ann : anns ) { AddOntologyAnnotation addAnn = new AddOntologyAnnotation(g.getSourceOntology(), ann); g.getManager().applyChange(addAnn); } //g.mergeOntology(pont); AxiomAnnotationTools.reduceAxiomAnnotationsToOboBasic(newOnt); OboInOwlCardinalityTools.checkAnnotationCardinality(newOnt); } else if (opts.nextEq("--extract-axioms")) { opts.info("[-t TYPE]", "Extracts axioms of specified type into the source ontology (existing source is moved to support)"); AxiomType axiomType = AxiomType.EQUIVALENT_CLASSES; while (opts.hasOpts()) { if (opts.nextEq("-t|--type")) { opts.info("AxiomType", "OWL2 syntax for axiom type. Default is EquivalentClasses"); String axiomTypeStr = opts.nextOpt(); axiomType = AxiomType.getAxiomType(axiomTypeStr); if (axiomType == null) { throw new OptionException("invalid axiom type "+axiomTypeStr+" -- must be OWL2 syntax, e.g. 'SubClassOf'"); } } else { break; } } OWLOntology src = g.getSourceOntology(); LOG.info("axiomType = "+axiomType); Set<OWLAxiom> axioms = src.getAxioms(axiomType); LOG.info("#axioms: "+axioms.size()); g.addSupportOntology(src); OWLOntology newOnt = src.getOWLOntologyManager().createOntology(axioms); g.setSourceOntology(newOnt); } else if (opts.nextEq("--extract-bridge-ontologies")) { opts.info("[-d OUTDIR] [-x] [-s ONTID]", ""); String dir = "bridge/"; String ontId = null; boolean isRemoveBridgeAxiomsFromSource = false; RDFXMLDocumentFormat fmt = new RDFXMLDocumentFormat(); while (opts.hasOpts()) { if (opts.nextEq("-d")) { opts.info("DIR", "bridge files are generated in this directory. Default: ./bridge/"); dir = opts.nextOpt(); } else if (opts.nextEq("-c")) { opts.info("TGT SRCLIST", "Combines all src onts to tgt. TODO"); String tgt = opts.nextOpt(); List<String> srcs = opts.nextList(); } else if (opts.nextEq("-x")) { opts.info("", "If specified, bridge axioms are removed from the source"); isRemoveBridgeAxiomsFromSource = true; } else if (opts.nextEq("-s")) { opts.info("ONTID", "If specified, ..."); ontId = opts.nextOpt(); } else { break; } } BridgeExtractor be = new BridgeExtractor(g.getSourceOntology()); be.subDir = dir; be.extractBridgeOntologies(ontId, isRemoveBridgeAxiomsFromSource); be.saveBridgeOntologies(dir, fmt); } else if (opts.nextEq("--expand-macros")) { opts.info("", "performs expansion on assertions and expressions. "+ "See http://oboformat.googlecode.com/svn/trunk/doc/obo-syntax.html#7"); MacroExpansionVisitor mev = new MacroExpansionVisitor(g.getSourceOntology()); mev.expandAll(); } else if (opts.nextEq("--expand-expression")) { opts.info("PROP EXPRESSION", "uses OBO Macros to expand expressions with PROP to the target expression using ?Y"); OWLObjectProperty p = resolveObjectProperty(opts.nextOpt()); String expr = opts.nextOpt(); OWLAnnotationAssertionAxiom aaa = g.getDataFactory().getOWLAnnotationAssertionAxiom( g.getDataFactory().getOWLAnnotationProperty(Obo2OWLVocabulary.IRI_IAO_0000424.getIRI()), p.getIRI(), g.getDataFactory().getOWLLiteral(expr)); g.getManager().addAxiom(g.getSourceOntology(), aaa); MacroExpansionVisitor mev = new MacroExpansionVisitor(g.getSourceOntology()); mev.expandAll(); } else if (opts.nextEq("--expand-assertion")) { opts.info("PROP ASSERTION", "uses OBO Macros to expand expressions with PROP to the target expression using ?X and ?Y"); OWLNamedObject p = (OWLNamedObject) this.resolveEntity(opts.nextOpt()); String expr = opts.nextOpt(); OWLAnnotationAssertionAxiom aaa = g.getDataFactory().getOWLAnnotationAssertionAxiom( g.getDataFactory().getOWLAnnotationProperty(Obo2OWLVocabulary.IRI_IAO_0000425.getIRI()), p.getIRI(), g.getDataFactory().getOWLLiteral(expr)); g.getManager().addAxiom(g.getSourceOntology(), aaa); MacroExpansionVisitor mev = new MacroExpansionVisitor(g.getSourceOntology()); mev.expandAll(); } else if (opts.nextEq("--sparql-dl")) { opts.info("\"QUERY-TEXT\"", "executes a SPARQL-DL query using the reasoner"); /* Examples: * SELECT * WHERE { SubClassOf(?x,?y)} */ if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); } String q = opts.nextOpt(); System.out.println("Q="+q); OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); try { QueryEngine engine; Query query = Query.create(q); engine = QueryEngine.create(g.getManager(), reasoner, true); QueryResult result = engine.execute(query); if(query.isAsk()) { System.out.print("Result: "); if(result.ask()) { System.out.println("yes"); } else { System.out.println("no"); } } else { if(!result.ask()) { System.out.println("Query has no solution.\n"); } else { System.out.println("Results:"); for (int i=0; i < result.size(); i++) { System.out.print("["+i+"] "); QueryBinding qb = result.get(i); for (QueryArgument qa : qb.getBoundArgs()) { String k = qa.toString(); System.out.print(" "+k+"="); QueryArgument v = qb.get(qa); String out = v.toString(); if (v.getType().equals(QueryArgumentType.URI)) { out = owlpp.renderIRI(v.toString()); } System.out.print(out+"; "); } System.out.println(""); } //System.out.print(result); System.out.println("-------------------------------------------------"); System.out.println("Size of result set: " + result.size()); } } } catch (QueryParserException e) { // TODO Auto-generated catch block e.printStackTrace(); } catch (QueryEngineException e) { // TODO Auto-generated catch block e.printStackTrace(); } } else if (opts.nextEq("--remove-abox")) { opts.info("", "removes all named individual declarations and all individual axioms (e.g. class/property assertion"); for (OWLOntology ont : g.getAllOntologies()) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); rmAxioms.addAll(ont.getAxioms(AxiomType.DIFFERENT_INDIVIDUALS)); rmAxioms.addAll(ont.getAxioms(AxiomType.CLASS_ASSERTION)); rmAxioms.addAll(ont.getAxioms(AxiomType.OBJECT_PROPERTY_ASSERTION)); for (OWLNamedIndividual ind : ont.getIndividualsInSignature()) { rmAxioms.add(g.getDataFactory().getOWLDeclarationAxiom(ind)); } g.getManager().removeAxioms(ont, rmAxioms); } } else if (opts.nextEq("--remove-tbox")) { opts.info("", "removes all class axioms"); for (OWLOntology ont : g.getAllOntologies()) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLAxiom ax : ont.getAxioms()) { if (ax instanceof OWLClassAxiom) { rmAxioms.add(ax); } else if (ax instanceof OWLDeclarationAxiom) { if ( ((OWLDeclarationAxiom)ax).getEntity() instanceof OWLClass) { rmAxioms.add(ax); } } else if (ax instanceof OWLAnnotationAssertionAxiom) { OWLAnnotationSubject subj = ((OWLAnnotationAssertionAxiom)ax).getSubject(); if (subj instanceof IRI) { // warning - excessive pruning if there is punning if (ont.getClassesInSignature(Imports.INCLUDED).contains(g.getDataFactory().getOWLClass((IRI) subj))) { rmAxioms.add(ax); } } } } g.getManager().removeAxioms(ont, rmAxioms); } } else if (opts.nextEq("--i2c")) { opts.info("[-s]", "Converts individuals to classes"); boolean isReplaceOntology = false; while (opts.hasOpts()) { if (opts.nextEq("-s")) { isReplaceOntology = true; } else { break; } } Set<OWLAxiom> axs = new HashSet<OWLAxiom>(); OWLOntology ont = g.getSourceOntology(); for (OWLNamedIndividual i : ont.getIndividualsInSignature()) { OWLClass c = g.getDataFactory().getOWLClass(i.getIRI()); for (OWLClassExpression ce : OwlHelper.getTypes(i, ont)) { axs.add(g.getDataFactory().getOWLSubClassOfAxiom(c, ce)); } //g.getDataFactory().getOWLDe for (OWLClassAssertionAxiom ax : ont.getClassAssertionAxioms(i)) { g.getManager().removeAxiom(ont, ax); } for (OWLDeclarationAxiom ax : ont.getDeclarationAxioms(i)) { g.getManager().removeAxiom(ont, ax); } //g.getDataFactory().getOWLDeclarationAxiom(owlEntity) } if (isReplaceOntology) { for (OWLAxiom ax : g.getSourceOntology().getAxioms()) { g.getManager().removeAxiom(ont, ax); } } for (OWLAxiom axiom : axs) { g.getManager().addAxiom(ont, axiom); } } else if (opts.nextEq("--init-reasoner")) { opts.info("[-r reasonername]", "Creates a reasoner object"); while (opts.hasOpts()) { if (opts.nextEq("-r")) { reasonerName = opts.nextOpt(); } else { break; } } reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); } else if (opts.nextEq("--reasoner-query")) { opts.info("[-r reasonername] [-m] [-d] [-a] [-x] [-c IRI] (--stdin | CLASS-EXPRESSION | -l CLASS-LABEL)", "Queries current ontology for descendants, ancestors and equivalents of CE using reasoner.\n"+ "Enclose all labels in quotes (--stdin only). E.g. echo \"'part of' some 'tentacle'\" | owltools ceph.owl --reasoner-query --stdin"); boolean isManifest = false; boolean isDescendants = true; boolean isIndividuals = false; boolean isAncestors = true; boolean isEquivalents = true; boolean isExtended = false; boolean isCache = false; boolean isRemoveUnsatisfiable = false; boolean isSubOntExcludeClosure = false; String subOntologyIRI = null; OWLClassExpression ce = null; String expression = null; while (opts.hasOpts()) { if (opts.nextEq("-r")) { reasoner = null; reasonerName = opts.nextOpt(); if (reasonerName.toLowerCase().equals("elk")) isManifest = true; } else if (opts.nextEq("-m")) { opts.info("", "manifests the class exression as a class equivalent to query CE and uses this as a query; required for older versions of Elk"); isManifest = true; } else if (opts.nextEq("-d")) { opts.info("", "show descendants, but not ancestors (default is both + equivs)"); isDescendants = true; isAncestors = false; } else if (opts.nextEq("-a")) { opts.info("", "show ancestors, but not descendants (default is both + equivs)"); isDescendants = false; isAncestors = true; } else if (opts.nextEq("-e")) { opts.info("", "show equivalents only (default is ancestors + descendants + equivs)"); isDescendants = false; isAncestors = false; } else if (opts.nextEq("-i")) { opts.info("", "show inferred individuals, as well as ancestors/descendants/equivalents"); isIndividuals = true; } else if (opts.nextEq("--stdin")) { try { BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); System.out.print("> QUERY: "); expression = in.readLine(); } catch (IOException e) { } } else if (opts.nextEq("-x")) { isExtended = true; } else if (opts.nextEq("-c")) { if (opts.nextEq("--exclude-closure")) isSubOntExcludeClosure = true; subOntologyIRI = opts.nextOpt(); } else if (opts.nextEq("--cache")) { isCache = true; } else if (opts.nextEq("-l")) { ce = (OWLClassExpression) resolveEntity(opts); } else { break; } } if (ce == null && expression == null) expression = opts.nextOpt(); OWLPrettyPrinter owlpp = getPrettyPrinter(); Set<OWLClass> results = new HashSet<OWLClass>(); ManchesterSyntaxTool parser = new ManchesterSyntaxTool(g.getSourceOntology(), g.getSupportOntologySet()); try { if (ce == null) { System.out.println("# PARSING: "+expression); ce = parser.parseManchesterExpression(expression); } System.out.println("# QUERY: "+owlpp.render(ce)); if (ce instanceof OWLClass) results.add((OWLClass) ce); // some reasoners such as elk cannot query using class expressions - we manifest // the class expression as a named class in order to bypass this limitation if (isManifest && !(ce instanceof OWLClass)) { System.err.println("-m deprecated: consider using --reasoner welk"); OWLClass qc = g.getDataFactory().getOWLClass(IRI.create("http://owltools.org/Q")); g.getManager().removeAxioms(g.getSourceOntology(), g.getSourceOntology().getAxioms(qc, Imports.EXCLUDED)); OWLEquivalentClassesAxiom ax = g.getDataFactory().getOWLEquivalentClassesAxiom(ce, qc); g.getManager().addAxiom(g.getSourceOntology(), ax); if (reasoner != null) reasoner.flush(); ce = qc; } ExpressionMaterializingReasoner xr = null; if (isExtended) { if (reasoner != null) { LOG.error("Reasoner should NOT be set prior to creating EMR - unsetting"); } xr = new ExpressionMaterializingReasoner(g.getSourceOntology(), createReasonerFactory(reasonerName)); LOG.info("materializing... [doing this before initializing reasoner]"); xr.materializeExpressions(); LOG.info("set extended reasoner: "+xr); reasoner = xr; } else { if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(), reasonerName, g.getManager()); } } if (isIndividuals) { for (OWLNamedIndividual r : reasoner.getInstances(ce, false).getFlattened()) { //results.add(r); if (!isCache) System.out.println("D: "+owlpp.render(r)); } } if (isEquivalents) { for (OWLClass r : reasoner.getEquivalentClasses(ce).getEntities()) { results.add(r); if (!isCache) System.out.println("E: "+owlpp.render(r)); } } if (isDescendants) { for (OWLClass r : reasoner.getSubClasses(ce, false).getFlattened()) { results.add(r); if (!isCache) System.out.println("D: "+owlpp.render(r)); } } if (isAncestors) { if (isExtended) { for (OWLClassExpression r : ((OWLExtendedReasoner) reasoner).getSuperClassExpressions(ce, false)) { if (r instanceof OWLClass) results.add((OWLClass) r); if (!isCache) System.out.println("A:"+owlpp.render(r)); } } else { for (OWLClass r : reasoner.getSuperClasses(ce, false).getFlattened()) { results.add(r); if (!isCache) System.out.println("A:"+owlpp.render(r)); } } } } catch (OWLParserException e) { // TODO Auto-generated catch block e.printStackTrace(); } finally { // always dispose parser to avoid a memory leak parser.dispose(); } if (owlObjectCachedSet == null) owlObjectCachedSet = new HashSet<OWLObject>(); owlObjectCachedSet.addAll(results); // --- // Create a sub-ontology // --- if (subOntologyIRI != null) { //g.mergeImportClosure(); QuerySubsetGenerator subsetGenerator = new QuerySubsetGenerator(); OWLOntology srcOnt = g.getSourceOntology(); g.setSourceOntology(g.getManager().createOntology(IRI.create(subOntologyIRI))); g.addSupportOntology(srcOnt); subsetGenerator.createSubSet(g, results, g.getSupportOntologySet(), isSubOntExcludeClosure, isSubOntExcludeClosure); } } else if (opts.nextEq("--make-ontology-from-results")) { // TODO - use Mooncat opts.info("[-m] [-f] IRI", "takes the most recent reasoner query and generates a subset ontology using ONLY classes from results"); boolean followClosure = false; boolean useMooncat = false; while (opts.hasOpts()) { if (opts.nextEq("-f|--follow-closure|--fill-gaps")) { opts.info("", "using mooncat will have the effect of including the graph closure of all results in the output ontology"); followClosure = true; } else if (opts.nextEq("-m|--use-mooncat")) { opts.info("", "using mooncat will have the effect of including the graph closure of all results in the output ontology"); useMooncat = true; } else break; } if (followClosure) useMooncat = true; String subOntologyIRI = opts.nextOpt(); if (useMooncat) { Mooncat m = new Mooncat(g); Set<OWLClass> cs = new HashSet<OWLClass>(); for (OWLObject obj : owlObjectCachedSet) { if (obj instanceof OWLClass) cs.add((OWLClass) obj); } // TODO OWLOntology subOnt = m.makeMinimalSubsetOntology(cs, IRI.create(subOntologyIRI), followClosure); g.setSourceOntology(subOnt); } else { Set<OWLAxiom> subsetAxioms = new HashSet<OWLAxiom>(); Set <OWLObjectProperty> objPropsUsed = new HashSet<OWLObjectProperty>(); for (OWLOntology mergeOntology : g.getAllOntologies()) { for (OWLObject cls : owlObjectCachedSet) { if (cls instanceof OWLClass) { // TODO - translate equivalence axioms; assume inferred for now for (OWLAxiom ax : mergeOntology.getAxioms((OWLClass)cls, Imports.EXCLUDED)) { boolean ok = true; for (OWLClass refCls : ax.getClassesInSignature()) { if (!owlObjectCachedSet.contains(refCls)) { LOG.info("Skipping: "+ax); ok = false; break; } } if (ok) subsetAxioms.add(ax); } for (OWLAxiom ax : mergeOntology.getAnnotationAssertionAxioms(((OWLClass)cls).getIRI())) { subsetAxioms.add(ax); } } subsetAxioms.add(g.getDataFactory().getOWLDeclarationAxiom(((OWLClass)cls))); } } for (OWLAxiom ax : subsetAxioms) { objPropsUsed.addAll(ax.getObjectPropertiesInSignature()); } for (OWLObjectProperty p : objPropsUsed) { for (OWLOntology mergeOntology : g.getAllOntologies()) { subsetAxioms.addAll(mergeOntology.getAxioms(p, Imports.EXCLUDED)); subsetAxioms.addAll(mergeOntology.getAnnotationAssertionAxioms(p.getIRI())); } } OWLOntology subOnt = g.getManager().createOntology(IRI.create(subOntologyIRI)); g.getManager().addAxioms(subOnt, subsetAxioms); g.setSourceOntology(subOnt); } } else if (opts.nextEq("--remove-equivalent-to-nothing-axioms")) { Set<OWLAxiom> axs = new HashSet<OWLAxiom>(); OWLClass nothing = g.getDataFactory().getOWLNothing(); for (OWLAxiom ax : g.getSourceOntology().getAxioms(AxiomType.EQUIVALENT_CLASSES)) { if (ax.getClassesInSignature().contains(nothing)) { axs.add(ax); } } g.getManager().removeAxioms(g.getSourceOntology(), axs); } else if (opts.nextEq("--check-disjointness-axioms")) { opts.info("", "DEPRECATED: this command precedes the ability of Elk to check disjointness"); boolean isTranslateEquivalentToNothing = true; OWLPrettyPrinter owlpp = getPrettyPrinter(); OWLOntology ont = g.getSourceOntology(); Set<OWLObjectIntersectionOf> dPairs = new HashSet<OWLObjectIntersectionOf>(); Map<OWLClassExpression, Set<OWLClassExpression>> dMap = new HashMap<OWLClassExpression, Set<OWLClassExpression>>(); OWLClass nothing = g.getDataFactory().getOWLNothing(); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); if (isTranslateEquivalentToNothing) { // TODO for (OWLEquivalentClassesAxiom eca : ont.getAxioms(AxiomType.EQUIVALENT_CLASSES)) { if (eca.contains(nothing)) { for (OWLClassExpression x : eca.getClassExpressionsMinus(nothing)) { if (x instanceof OWLObjectIntersectionOf) { dPairs.add((OWLObjectIntersectionOf) x); System.out.println("TRANSLATED:"+x); } } rmAxioms.add(eca); } } } for (OWLDisjointClassesAxiom dca : ont.getAxioms(AxiomType.DISJOINT_CLASSES)) { for (OWLClassExpression x : dca.getClassExpressions()) { for (OWLClassExpression y : dca.getClassExpressions()) { if (!x.equals(y)) { dPairs.add(g.getDataFactory().getOWLObjectIntersectionOf(x,y)); } } } } g.getManager().removeAxioms(ont, ont.getAxioms(AxiomType.DISJOINT_CLASSES)); g.getManager().removeAxioms(ont, rmAxioms); reasoner.flush(); for (OWLObjectIntersectionOf x : dPairs) { //System.out.println("TESTING: "+owlpp.render(x)+" using "+reasoner); for (Node<OWLClass> v : reasoner.getSubClasses(x, false)) { if (v.contains(nothing)) continue; System.out.println("VIOLATION: "+owlpp.render(v.getRepresentativeElement())+" SubClassOf "+owlpp.render(x)); } } } else if (opts.nextEq("--remove-disjoints")) { List<AxiomType<? extends OWLAxiom>> disjointTypes = new ArrayList<AxiomType<? extends OWLAxiom>>(); disjointTypes.add(AxiomType.DISJOINT_CLASSES); disjointTypes.add(AxiomType.DISJOINT_UNION); disjointTypes.add(AxiomType.DISJOINT_OBJECT_PROPERTIES); disjointTypes.add(AxiomType.DISJOINT_DATA_PROPERTIES); for(AxiomType<? extends OWLAxiom> axtype : disjointTypes) { Set<? extends OWLAxiom> axioms = g.getSourceOntology().getAxioms(axtype); if (axioms.isEmpty() == false) { g.getManager().removeAxioms(g.getSourceOntology(), axioms); } } } else if (opts.nextEq("--abox-to-tbox")) { ABoxUtils.translateABoxToTBox(g.getSourceOntology()); } else if (opts.nextEq("--make-default-abox")) { ABoxUtils.makeDefaultIndividuals(g.getSourceOntology()); } else if (opts.nextEq("--tbox-to-abox")) { OWLInAboxTranslator t = new OWLInAboxTranslator(g.getSourceOntology()); while (opts.hasOpts()) { if (opts.nextEq("-p|--preserve-iris|--preserve-object-properties")) { opts.info("", "Use the same OP IRIs for ABox shows (danger will robinson!)"); t.setPreserveObjectPropertyIRIs(true); } else { break; } } OWLOntology abox = t.translate(); g.setSourceOntology(abox); } else if (opts.nextEq("--map-abox-to-results")) { Set<OWLClass> cs = new HashSet<OWLClass>(); for (OWLObject obj : owlObjectCachedSet) { if (obj instanceof OWLClass) cs.add((OWLClass) obj); } ABoxUtils.mapClassAssertionsUp(g.getSourceOntology(), reasoner, cs, null); } else if (opts.nextEq("--map-abox-to-namespace")) { String ns = opts.nextOpt(); Set<OWLClass> cs = new HashSet<OWLClass>(); for (OWLClass c : g.getSourceOntology().getClassesInSignature(Imports.INCLUDED)) { if (c.getIRI().toString().startsWith(ns)) cs.add(c); } ABoxUtils.mapClassAssertionsUp(g.getSourceOntology(), reasoner, cs, null); } else if (opts.nextEq("--reasoner-ask-all")) { opts.info("[-r REASONERNAME] [-s] [-a] AXIOMTYPE", "list all inferred equivalent named class pairs"); boolean isReplaceOntology = false; boolean isAddToCurrentOntology = false; boolean isDirect = true; boolean isRemoveIndirect = false; while (opts.hasOpts()) { if (opts.nextEq("-r")) { opts.info("REASONERNAME", "E.g. elk"); reasonerName = opts.nextOpt(); } else if (opts.nextEq("-s")) { opts.info("", "Replaces ALL axioms in ontology with inferred axioms"); isReplaceOntology = true; } else if (opts.nextEq("-a")) { opts.info("", "Add inferred axioms to current ontology"); isAddToCurrentOntology = true; } else if (opts.nextEq("--remove-indirect")) { opts.info("", "Remove indirect assertions from current ontology"); isRemoveIndirect = true; } else if (opts.nextEq("--indirect")) { opts.info("", "Include indirect inferences"); isDirect = false; } else { break; } } if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); } if (isRemoveIndirect && !isAddToCurrentOntology) { System.err.println("You asked to remove indirect but not to assert direct - I am proceeding, but check this is what you want"); } if (isRemoveIndirect && !isDirect) { System.err.println("You asked to remove indirect and yet you want indirect inferences - invalid combination"); System.exit(1); } Set<OWLAxiom> iAxioms = new HashSet<OWLAxiom>(); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); String q = opts.nextOpt().toLowerCase(); OWLPrettyPrinter owlpp = getPrettyPrinter(); OWLOntology ont = g.getSourceOntology(); for (OWLClass c : g.getSourceOntology().getClassesInSignature()) { if (q.startsWith("e")) { for (OWLClass ec : reasoner.getEquivalentClasses(c)) { OWLEquivalentClassesAxiom ax = g.getDataFactory().getOWLEquivalentClassesAxiom(c, ec); if (!ont.containsAxiom(ax, Imports.INCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { LOG.info("INFERRED: "+owlpp.render(ax)); iAxioms.add(ax); } } } else if (q.startsWith("s")) { Set<OWLClass> supers = reasoner.getSuperClasses(c, isDirect).getFlattened(); for (OWLClass sc : supers) { OWLSubClassOfAxiom ax = g.getDataFactory().getOWLSubClassOfAxiom(c, sc); ax.getObjectPropertiesInSignature(); if (!ont.containsAxiom(ax, Imports.INCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { LOG.info("INFERRED: "+owlpp.render(ax)); iAxioms.add(ax); } } if (isRemoveIndirect) { for (OWLClass sc : reasoner.getSuperClasses(c, false).getFlattened()) { if (!supers.contains(sc)) { OWLSubClassOfAxiom ax = g.getDataFactory().getOWLSubClassOfAxiom(c, sc); if (ont.containsAxiom(ax, Imports.INCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { rmAxioms.add(ax); LOG.info("INDIRECT: "+owlpp.render(ax)); } } } } } } if (q.startsWith("i")) { for (OWLNamedIndividual i : g.getSourceOntology().getIndividualsInSignature()) { Set<OWLClass> types = reasoner.getTypes(i, isDirect).getFlattened(); for (OWLClass ce : types) { OWLClassAssertionAxiom ax = g.getDataFactory().getOWLClassAssertionAxiom(ce, i); if (!ont.containsAxiom(ax, Imports.INCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { LOG.info("INFERRED: "+owlpp.render(ax)); iAxioms.add(ax); } } if (isRemoveIndirect) { for (OWLClass ce : reasoner.getTypes(i, false).getFlattened()) { if (!types.contains(ce)) { OWLClassAssertionAxiom ax = g.getDataFactory().getOWLClassAssertionAxiom(ce, i); if (ont.containsAxiom(ax, Imports.EXCLUDED, AxiomAnnotations.IGNORE_AXIOM_ANNOTATIONS)) { rmAxioms.add(ax); LOG.info("INDIRECT: "+owlpp.render(ax)); } } } } } } if (isReplaceOntology) { Set<OWLAxiom> allAxioms = ont.getAxioms(); g.getManager().removeAxioms(ont, allAxioms); g.getManager().addAxioms(ont, iAxioms); } if (isAddToCurrentOntology) { System.out.println("Adding "+iAxioms.size()+" axioms"); g.getManager().addAxioms(ont, iAxioms); } rmAxioms.retainAll(ont.getAxioms()); if (rmAxioms.size() > 0) { System.out.println("Removing "+rmAxioms.size()+" axioms"); g.getManager().removeAxioms(ont, rmAxioms); } } else if (opts.nextEq("--annotate-with-reasoner")) { opts.info("[-c OntologyIRI]", "annotated existing and inferred subClassOf axioms with source"); OWLOntology outputOntology = null; while (opts.hasOpts()) { if (opts.nextEq("-c||--create")) { outputOntology = g.getManager().createOntology(IRI.create(opts.nextOpt())); } else { break; } } ProvenanceReasonerWrapper pr = new ProvenanceReasonerWrapper(g.getSourceOntology(), new ElkReasonerFactory()); if (outputOntology != null) { pr.outputOntology = outputOntology; } pr.reason(); if (outputOntology != null) { g.setSourceOntology(outputOntology); } } else if (opts.nextEq("--run-reasoner")) { opts.info("[-r reasonername] [--assert-implied] [--indirect] [-u]", "infer new relationships"); boolean isAssertImplied = false; boolean isDirect = true; boolean isShowUnsatisfiable = false; boolean isRemoveUnsatisfiable = false; boolean showExplanation = false; String unsatisfiableModule = null; boolean traceModuleAxioms = false; // related to unsatisfiableModule while (opts.hasOpts()) { if (opts.nextEq("-r")) { opts.info("REASONERNAME", "selects the reasoner to use"); reasonerName = opts.nextOpt(); } else if (opts.nextEq("--assert-implied")) { isAssertImplied = true; } else if (opts.nextEq("--indirect")) { opts.info("", "include indirect inferences"); isDirect = false; } else if (opts.nextEq("-u|--list-unsatisfiable")) { opts.info("", "list all unsatisfiable classes"); isShowUnsatisfiable = true; } else if (opts.nextEq("-e|--show-explanation")) { opts.info("", "add a single explanation for each unsatisfiable class"); showExplanation = true; } else if (opts.nextEq("-x|--remove-unsatisfiable")) { opts.info("", "remove all unsatisfiable classes"); isRemoveUnsatisfiable = true; isShowUnsatisfiable = true; } else if (opts.nextEq("-m|--unsatisfiable-module")) { opts.info("", "create a module for the unsatisfiable classes."); unsatisfiableModule = opts.nextOpt(); } else if (opts.nextEq("--trace-module-axioms")) { traceModuleAxioms = true; } else { break; } } OWLPrettyPrinter owlpp = getPrettyPrinter(); boolean isQueryProcessed = false; if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); } if (isShowUnsatisfiable || isRemoveUnsatisfiable) { OWLClass nothing = g.getDataFactory().getOWLNothing(); Set<OWLObjectProperty> unsats = new HashSet<>(); for (OWLObjectProperty p : g.getSourceOntology().getObjectPropertiesInSignature(true)) { try { if (reasoner.getObjectPropertyDomains(p, false).getFlattened().contains(nothing)) { LOG.error("Domain is unsat: "+p); unsats.add(p); } if (reasoner.getObjectPropertyRanges(p, false).getFlattened().contains(nothing)) { LOG.error("Range is unsat: "+p); unsats.add(p); } } catch (UnsupportedOperationException e) { LOG.warn("Could not perform operation (expected with Elk)"); } } if (unsats.size() > 0) { LOG.error("Ontology has unsat properties - will not proceed"); exit(1); } else { LOG.info("All properties have satisfiable domain and range"); } } if (isShowUnsatisfiable || isRemoveUnsatisfiable) { int n = 0; Set<OWLClass> unsats = new HashSet<OWLClass>(); LOG.info("Finding unsatisfiable classes"); Set<OWLClass> unsatisfiableClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); ExplanationGenerator explanationGenerator = null; if (showExplanation) { OWLReasonerFactory factory = createReasonerFactory(reasonerName); explanationGenerator = new DefaultExplanationGenerator(g.getManager(), factory, g.getSourceOntology(), reasoner, null); } for (OWLClass c : unsatisfiableClasses) { if (c.isBuiltIn()) { continue; } unsats.add(c); StringBuilder msgBuilder = new StringBuilder(); msgBuilder.append("UNSAT: ").append(owlpp.render(c)); if (explanationGenerator != null) { Set<OWLAxiom> explanation = explanationGenerator.getExplanation(c); if (explanation.isEmpty() == false) { msgBuilder.append('\t'); msgBuilder.append("explanation:"); for (OWLAxiom axiom : explanation) { msgBuilder.append('\t'); msgBuilder.append(owlpp.render(axiom)); } } } System.out.println(msgBuilder); n++; } System.out.println("NUMBER_OF_UNSATISFIABLE_CLASSES: "+n); if (unsatisfiableModule != null) { LOG.info("Creating module for unsatisfiable classes in file: "+unsatisfiableModule); ModuleType mtype = ModuleType.BOT; OWLOntologyManager m = g.getManager(); SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(m, g.getSourceOntology(), mtype ); Set<OWLEntity> seeds = new HashSet<OWLEntity>(unsatisfiableClasses); Set<OWLAxiom> axioms = sme.extract(seeds); OWLOntology module = m.createOntology(); if (traceModuleAxioms) { axioms = traceAxioms(axioms, g, module.getOWLOntologyManager().getOWLDataFactory()); } m.addAxioms(module, axioms); File moduleFile = new File(unsatisfiableModule).getCanonicalFile(); m.saveOntology(module, IRI.create(moduleFile)); } if (n > 0) { if (isRemoveUnsatisfiable) { Mooncat m = new Mooncat(g); m.removeSubsetClasses(unsats, true); isQueryProcessed = true; } else { LOG.error("Ontology has unsat classes - will not proceed"); exit(1); } } } if (opts.hasOpts()) { if (opts.nextEq("-i")) { OWLClass qc = (OWLClass)resolveEntity(opts); System.out.println("Getting individuals of class: "+qc); for (Node<OWLNamedIndividual> ni : reasoner.getInstances(qc, false)) { for (OWLNamedIndividual i : ni.getEntities()) { System.out.println(i); } } isQueryProcessed = true; } } // this should probably be deprecated - deliberate // non-local effects from separate command if (queryExpressionMap != null) { // Assume --query-ontontology -m ONT has been processed for (OWLClass qc : queryExpressionMap.keySet()) { System.out.println(" CWQueryClass: "+owlpp.render(qc)+" "+qc.getIRI().toString()); OWLClassExpression ec = queryExpressionMap.get(qc); System.out.println(" CWQueryExpression: "+owlpp.render(ec)); // note jcel etc will not take class expressions NodeSet<OWLClass> results = reasoner.getSubClasses(qc, false); for (OWLClass result : results.getFlattened()) { if (reasoner.isSatisfiable(result)) { System.out.println(" "+owlpp.render(result)); } else { // will not report unsatisfiable classes, as they trivially //LOG.error("unsatisfiable: "+owlpp.render(result)); } } } isQueryProcessed = true; } if (!isQueryProcessed) { if (removedSubClassOfAxioms != null) { System.out.println("attempting to recapitulate "+removedSubClassOfAxioms.size()+" axioms"); for (OWLSubClassOfAxiom a : removedSubClassOfAxioms) { OWLClassExpression sup = a.getSuperClass(); if (sup instanceof OWLClass) { boolean has = false; for (Node<OWLClass> isup : reasoner.getSuperClasses(a.getSubClass(),false)) { if (isup.getEntities().contains(sup)) { has = true; break; } } System.out.print(has ? "POSITIVE: " : "NEGATIVE: "); System.out.println(owlpp.render(a)); } } } System.out.println("all inferences"); LOG.info("Checking for consistency..."); System.out.println("Consistent? "+reasoner.isConsistent()); if (!reasoner.isConsistent()) { for (OWLClass c : reasoner.getUnsatisfiableClasses()) { System.out.println("UNSAT: "+owlpp.render(c)); } } LOG.info("Iterating through all classes..."); for (OWLObject obj : g.getAllOWLObjects()) { if (obj instanceof OWLClass) { OWLClass c = ((OWLClass) obj); // find all asserted parents in ontology and its import closure; // we do not want to re-assert Set<OWLClassExpression> assertedSuperclasses = OwlHelper.getSuperClasses(c, g.getSourceOntology().getImportsClosure()); //assertedSuperclasses.addAll(c.getEquivalentClasses(g.getSourceOntology().getImportsClosure())); //Set<OWLClass> eqCs = reasoner.getEquivalentClasses(c).getEntities(); for (OWLClass sup : reasoner.getSuperClasses(c, isDirect).getFlattened()) { if (assertedSuperclasses.contains(sup)) { continue; } if (sup.isOWLThing()) continue; System.out.println("INFERENCE: "+owlpp.render(obj)+" SubClassOf "+owlpp.render(sup)); if (isAssertImplied) { OWLSubClassOfAxiom sca = g.getDataFactory().getOWLSubClassOfAxiom(c, sup); g.getManager().addAxiom(g.getSourceOntology(), sca); } } for (OWLClass ec : reasoner.getEquivalentClasses(((OWLClassExpression) obj)).getEntities()) { if (!ec.equals(obj)) System.out.println("INFERENCE: "+owlpp.render(obj)+" EquivalentTo "+owlpp.render(ec)); if (isAssertImplied) { OWLEquivalentClassesAxiom eca = g.getDataFactory().getOWLEquivalentClassesAxiom(c, ec); g.getManager().addAxiom(g.getSourceOntology(), eca); } } } } } } else if (opts.nextEq("--stash-subclasses")) { opts.info("[-a][--prefix PREFIX][--ontology RECAP-ONTOLOGY-IRI", "removes all subclasses in current source ontology; after reasoning, try to re-infer these"); boolean isDefinedOnly = true; Set<String> prefixes = new HashSet<String>(); OWLOntology recapOnt = g.getSourceOntology(); while (opts.hasOpts()) { if (opts.nextEq("--prefix")) { prefixes.add(opts.nextOpt()); } else if (opts.nextEq("-a")) { isDefinedOnly = false; } else if (opts.nextEq("--ontology")) { IRI ontIRI = IRI.create(opts.nextOpt()); recapOnt = g.getManager().getOntology(ontIRI); if (recapOnt == null) { LOG.error("Cannot find ontology: "+ontIRI+" from "+g.getManager().getOntologies().size()); for (OWLOntology ont : g.getManager().getOntologies()) { LOG.error(" I have: "+ont.getOntologyID()); } for (OWLOntology ont : g.getSourceOntology().getImportsClosure()) { LOG.error(" IC: "+ont.getOntologyID()); } } } else { break; } } Set<OWLSubClassOfAxiom> allAxioms = recapOnt.getAxioms(AxiomType.SUBCLASS_OF); removedSubClassOfAxioms = new HashSet<OWLSubClassOfAxiom>(); System.out.println("Testing "+allAxioms.size()+" SubClassOf axioms for stashing. Prefixes: "+prefixes.size()); HashSet<RemoveAxiom> rmaxs = new HashSet<RemoveAxiom>(); for (OWLSubClassOfAxiom a : allAxioms) { OWLClassExpression subc = a.getSubClass(); if (!(subc instanceof OWLClass)) { continue; } OWLClassExpression supc = a.getSuperClass(); if (!(supc instanceof OWLClass)) { continue; } if (prefixes.size() > 0) { boolean skip = true; for (String p : prefixes) { if (((OWLClass) subc).getIRI().toString().startsWith(p)) { skip = false; break; } } if (skip) break; } if (isDefinedOnly) { // TODO - imports closure if (OwlHelper.getEquivalentClasses((OWLClass)subc, g.getSourceOntology()).isEmpty()) { continue; } if (OwlHelper.getEquivalentClasses((OWLClass)supc, g.getSourceOntology()).isEmpty()) { continue; } } // TODO: remove it from the ontology in which it's asserted RemoveAxiom rmax = new RemoveAxiom(recapOnt,a); LOG.debug("WILL_REMOVE: "+a); rmaxs.add(rmax); removedSubClassOfAxioms.add(g.getDataFactory().getOWLSubClassOfAxiom(a.getSubClass(), a.getSuperClass())); } System.out.println("Will remove "+rmaxs.size()+" axioms"); for (RemoveAxiom rmax : rmaxs) { g.getManager().applyChange(rmax); } } else if (opts.nextEq("--list-cycles")) { boolean failOnCycle = false; if (opts.nextEq("-f|--fail-on-cycle")) { failOnCycle = true; } OWLPrettyPrinter owlpp = getPrettyPrinter(); int n = 0; for (OWLObject x : g.getAllOWLObjects()) { for (OWLObject y : g.getAncestors(x)) { if (g.getAncestors(y).contains(x)) { if (y instanceof OWLClass) { for (OWLGraphEdge e : g.getEdgesBetween(x, y)) { System.out.println(owlpp.render(x) + " in-cycle-with "+owlpp.render(y)+" // via " + e.getQuantifiedPropertyList()); } } n++; } } } System.out.println("Number of cycles: "+n); if (n > 0 && failOnCycle) System.exit(1); } else if (opts.nextEq("-a|--ancestors")) { opts.info("LABEL", "list edges in graph closure to root nodes"); Set<OWLPropertyExpression> props = new HashSet<OWLPropertyExpression>(); boolean useProps = false; while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("PROP", "Add this property to the set of interest"); props.add(this.resolveObjectProperty(opts.nextOpt())); useProps = true; } else if (opts.nextEq("--plist")) { opts.info("PROPLIST", "Terminated by '//'. Add these properties to the set of interest"); Set<OWLObjectProperty> nprops = this.resolveObjectPropertyList(opts); props.addAll(nprops); useProps = true; } else { break; } } OWLObject obj = resolveEntity(opts); System.out.println(obj+ " "+obj.getClass()+" P:"+props); if (!useProps) props = null; Set<OWLGraphEdge> edges = g.getOutgoingEdgesClosureReflexive(obj,props); showEdges(edges); } else if (opts.nextEq("--ancestor-nodes")) { opts.info("LABEL", "list nodes in graph closure to root nodes"); Set<OWLPropertyExpression> props = new HashSet<OWLPropertyExpression>(); boolean useProps = false; while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("PROP", "Add this property to the set of interest"); props.add(this.resolveObjectProperty(opts.nextOpt())); useProps = true; } else if (opts.nextEq("--plist")) { opts.info("PROPLIST", "Terminated by '//'. Add these properties to the set of interest"); Set<OWLObjectProperty> nprops = this.resolveObjectPropertyList(opts); props.addAll(nprops); useProps = true; } else { break; } } OWLObject obj = resolveEntity(opts); System.out.println(obj+ " "+obj.getClass()+" P:"+props); if (!useProps) props = null; for (OWLObject a : g.getAncestors(obj, props)) System.out.println(a); } else if (opts.nextEq("--parents-named")) { opts.info("LABEL", "list direct outgoing edges to named classes"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getOutgoingEdges(obj); showEdges( edges); } else if (opts.nextEq("--parents")) { opts.info("LABEL", "list direct outgoing edges"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getPrimitiveOutgoingEdges(obj); showEdges( edges); } else if (opts.nextEq("--grandparents")) { opts.info("LABEL", "list direct outgoing edges and their direct outgoing edges"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getPrimitiveOutgoingEdges(obj); for (OWLGraphEdge e1 : edges) { System.out.println(e1); for (OWLGraphEdge e2 : g.getPrimitiveOutgoingEdges(e1.getTarget())) { System.out.println(" "+e2); } } } else if (opts.nextEq("--subsumers")) { opts.info("LABEL", "list named subsumers and subsuming expressions"); OWLObject obj = resolveEntity( opts); Set<OWLObject> ancs = g.getSubsumersFromClosure(obj); for (OWLObject a : ancs) { System.out.println(a); } } else if (opts.nextEq("--incoming-edges")) { opts.info("LABEL", "list edges in graph to leaf nodes"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getIncomingEdges(obj); showEdges( edges); } else if (opts.nextEq("--descendant-edges")) { opts.info("LABEL", "list edges in graph closure to leaf nodes"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLGraphEdge> edges = g.getIncomingEdgesClosure(obj); showEdges( edges); } else if (opts.nextEq("--descendants")) { opts.info("LABEL", "show all descendant nodes"); OWLObject obj = resolveEntity( opts); OWLPrettyPrinter owlpp = getPrettyPrinter(); System.out.println("#" + obj+ " "+obj.getClass()+" "+owlpp.render(obj)); Set<OWLObject> ds = g.getDescendants(obj); for (OWLObject d : ds) System.out.println(d); } else if (opts.nextEq("--subsumed-by")) { opts.info("LABEL", "show all descendant nodes"); OWLObject obj = resolveEntity( opts); System.out.println(obj+ " "+obj.getClass()); Set<OWLObject> ds = g.queryDescendants((OWLClass)obj); for (OWLObject d : ds) System.out.println(d); } else if (opts.nextEq("-l") || opts.nextEq("--list-axioms")) { opts.info("LABEL", "lists all axioms for entity matching LABEL"); OWLObject obj = resolveEntity( opts); OWLPrettyPrinter owlpp = getPrettyPrinter(); owlpp.print("## Showing axiom for: "+obj); Set<OWLAxiom> axioms = g.getSourceOntology().getReferencingAxioms((OWLEntity) obj); owlpp.print(axioms); Set<OWLAnnotationAssertionAxiom> aaxioms = g.getSourceOntology().getAnnotationAssertionAxioms(((OWLNamedObject) obj).getIRI()); for (OWLAxiom a : aaxioms) { System.out.println(owlpp.render(a)); } } else if (opts.nextEq("--obsolete-class")) { opts.info("LABEL", "Add a deprecation axiom"); OWLObject obj = resolveEntity( opts); OWLPrettyPrinter owlpp = getPrettyPrinter(); owlpp.print("## Obsoleting: "+obj); Set<OWLAxiom> refAxioms = g.getSourceOntology().getReferencingAxioms((OWLEntity) obj); Set<OWLClassAxiom> axioms = g.getSourceOntology().getAxioms((OWLClass) obj, Imports.EXCLUDED); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLAxiom ax : axioms) { if (ax.isLogicalAxiom()) { rmAxioms.add(ax); System.out.println("REMOVING:"+owlpp.render(ax)); } } for (OWLAxiom ax : refAxioms) { if (ax.isLogicalAxiom() && !rmAxioms.contains(ax)) { System.err.println("UH-OH: "+ax); } } g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); System.err.println("TODO"); } else if (opts.nextEq("-d") || opts.nextEq("--draw")) { opts.info("[-o FILENAME] [-f FMT] LABEL/ID", "generates a file tmp.png made using QuickGO code"); String imgf = "tmp.png"; String fmt = "png"; while (opts.hasOpts()) { if (opts.nextEq("-o")) { opts.info("FILENAME", "name of png file to save (defaults to tmp.png)"); imgf = opts.nextOpt(); } else if (opts.nextEq("-f")) { opts.info("FMT", "image format. See ImageIO docs for a list. Default: png"); fmt = opts.nextOpt(); if (imgf.equals("tmp.png")) { imgf = "tmp."+fmt; } } else if (opts.nextEq("-p")) { OWLObjectProperty p = resolveObjectProperty(opts.nextOpt()); RelationConfig rc = gfxCfg.new RelationConfig(); rc.color = Color.MAGENTA; gfxCfg.relationConfigMap.put(p, rc); } else { break; } } OWLObject obj = resolveEntity( opts); System.out.println(obj); OWLGraphLayoutRenderer r = new OWLGraphLayoutRenderer(g); r.graphicsConfig = gfxCfg; r.addObject(obj); r.renderImage(fmt, new FileOutputStream(imgf)); //Set<OWLGraphEdge> edges = g.getOutgoingEdgesClosureReflexive(obj); //showEdges( edges); } else if (opts.nextEq("--draw-all")) { opts.info("", "draws ALL objects in the ontology (caution: small ontologies only)"); OWLGraphLayoutRenderer r = new OWLGraphLayoutRenderer(g); r.addAllObjects(); r.renderImage("png", new FileOutputStream("tmp.png")); } else if (opts.nextEq("--dump-node-attributes")) { opts.info("", "dumps all nodes attributes in CytoScape compliant format"); FileOutputStream fos; PrintStream stream = null; try { fos = new FileOutputStream(opts.nextOpt()); stream = new PrintStream(new BufferedOutputStream(fos)); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } stream.println("Label"); for (OWLObject obj : g.getAllOWLObjects()) { String label = g.getLabel(obj); if (label != null) stream.println(g.getIdentifier(obj)+"\t=\t"+label); } stream.close(); } else if (opts.nextEq("--dump-sif")) { opts.info("", "dumps CytoScape compliant sif format"); FileOutputStream fos; PrintStream stream = null; try { fos = new FileOutputStream(opts.nextOpt()); stream = new PrintStream(new BufferedOutputStream(fos)); } catch (FileNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } for (OWLObject x : g.getAllOWLObjects()) { for (OWLGraphEdge e : g.getOutgoingEdges(x)) { OWLQuantifiedProperty qp = e.getSingleQuantifiedProperty(); String label; if (qp.getProperty() != null) label = qp.getProperty().toString(); else label = qp.getQuantifier().toString(); if (label != null) stream.println(g.getIdentifier(x)+"\t"+label+"\t"+g.getIdentifier(e.getTarget())); } } stream.close(); } else if (opts.nextEq("--sic|--slurp-import-closure")) { opts.info("[-d DIR] [-c CATALOG-OUT]","Saves local copy of import closure. Assumes sourceontology has imports"); String dir = "."; String catfile = "catalog-v001.xml"; while (opts.hasOpts()) { if (opts.nextEq("-d")) { dir = opts.nextOpt(); } else if (opts.nextEq("-c")) { catfile = opts.nextOpt(); } else { break; } } ImportClosureSlurper ics = new ImportClosureSlurper(g.getSourceOntology()); ics.save(dir, catfile); } else if (opts.nextEq("-o|--output")) { opts.info("[-f FORMAT] [--prefix PREFIX URI]* FILE", "writes source ontology."); OWLDocumentFormat ofmt = new RDFXMLDocumentFormat(); String ontURIStr = ""; if ( g.getSourceOntology().getOntologyID() != null && g.getSourceOntology().getOntologyID().getOntologyIRI().isPresent()) { ontURIStr = g.getSourceOntology().getOntologyID().getOntologyIRI().get().toString(); } while (opts.hasOpts()) { if (opts.nextEq("-f")) { opts.info("FORMAT", "omn OR ofn OR ttl OR owx OR ojs (experimental) OR obo (uses obooformat jar)"); String ofmtname = opts.nextOpt(); if (ofmtname.equals("manchester") || ofmtname.equals("omn")) { ofmt = new ManchesterSyntaxDocumentFormat(); } else if (ofmtname.equals("functional") || ofmtname.equals("ofn")) { ofmt = new FunctionalSyntaxDocumentFormat(); } else if (ofmtname.equals("turtle") || ofmtname.equals("ttl")) { ofmt = new TurtleDocumentFormat(); } else if (ofmtname.equals("trig")) { ofmt = new TrigDocumentFormat(); } else if (ofmtname.equals("xml") || ofmtname.equals("owx")) { ofmt = new OWLXMLDocumentFormat(); } else if (ofmtname.equals("ojs")) { ofmt = new OWLJSONFormat(); } else if (ofmtname.equals("jsonld")) { ofmt = new OWLJsonLDFormat(); } else if (ofmtname.equals("og") || ofmtname.equals("json")) { ofmt = new OWLOboGraphsFormat(); } else if (ofmtname.equals("oy") || ofmtname.equals("yaml")) { ofmt = new OWLOboGraphsYamlFormat(); } else if (ofmtname.equals("obo")) { if (opts.nextEq("-n|--no-check")) { pw.setCheckOboDoc(false); } ofmt = new OBODocumentFormat(); } } else if (opts.nextEq("--prefix")) { opts.info("PREFIX URIBASE","use PREFIX as prefix. Note: specify this sub-arg AFTER -f"); ofmt.asPrefixOWLOntologyFormat().setPrefix(opts.nextOpt(), opts.nextOpt()); } else { break; } } LOG.info("saving:"+ontURIStr+" using "+ofmt); if (opts.hasArgs()) { String outputFile = opts.nextOpt(); pw.saveOWL(g.getSourceOntology(), ofmt, outputFile); //pw.saveOWL(g.getSourceOntology(), opts.nextOpt()); } else { final String msg = "Missing output file for '-o' OR '--output' option. Output was not written to a file."; throw new OptionException(msg); } } else if (opts.nextEq("--filter-axioms")) { Set<AxiomType> types = new HashSet<AxiomType>(); while (opts.hasOpts()) { if (opts.nextEq("-t|--axiom-type")) { types.add( AxiomType.getAxiomType(opts.nextOpt()) ); } else { break; } } for (OWLOntology o : g.getSourceOntology().getImportsClosure()) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLAxiom ax : o.getAxioms()) { if (!types.contains(ax.getAxiomType())) { rmAxioms.add(ax); } } LOG.info("Removing axioms: "+rmAxioms.size()); g.getManager().removeAxioms(o, rmAxioms); } } else if (opts.nextEq("--remove-axioms")) { opts.info("-t Type", "Removes axioms of specified type. May be specified multiple times"); AxiomType t = null; while (opts.hasOpts()) { if (opts.nextEq("-t|--axiom-type")) { opts.info("Type", "OWLAPI type. E.g. DisjointClasses"); t = AxiomType.getAxiomType(opts.nextOpt()); } else { break; } } for (OWLOntology o : g.getSourceOntology().getImportsClosure()) { Set<OWLAxiom> axioms = o.getAxioms(t); LOG.info("Removing axioms: "+axioms.size()); g.getManager().removeAxioms(o, axioms); } } else if (opts.nextEq("--remove-axiom-annotations")) { opts.info("", "If an axiom has 1 or more annotations, replace with annotation-free version"); for (OWLAxiom a : g.getSourceOntology().getAxioms()) { Set<OWLAnnotation> anns = a.getAnnotations(); if (anns.size() > 0) { AxiomAnnotationTools.changeAxiomAnnotations(a, new HashSet<OWLAnnotation>(), g.getSourceOntology()); } } } else if (opts.nextEq("--make-super-slim")) { opts.info("IDSPACES", "removes all classes not in the superclass closure of any ontology in one of the idspaces." + " also assers superclasses"); boolean isTempReasoner = false; if (reasoner == null) { reasoner = this.createReasoner(g.getSourceOntology(), "elk", g.getManager()); isTempReasoner = true; } String idspacesStr = opts.nextOpt(); LOG.info("idsps = "+idspacesStr); String[] idarr = idspacesStr.split(","); Set<String> idspaces = new HashSet<String>(Arrays.asList(idarr)); LOG.info("idsps = "+idspaces); Set<OWLClass> cs = new HashSet<OWLClass>(); for (OWLClass c : g.getAllOWLClasses()) { String id = g.getIdentifier(c); String[] idparts = id.split(":"); String idspace = idparts[0]; if (idspaces.contains(idspace)) { cs.addAll(reasoner.getEquivalentClasses(c).getEntities()); cs.addAll(reasoner.getSuperClasses(c, false).getFlattened()); } } AssertInferenceTool.assertInferences(g, false, false, false, true, false, false, false, null, null); Mooncat m = new Mooncat(g); m.removeSubsetComplementClasses(cs, true); if (isTempReasoner) { reasoner.dispose(); } } else if (opts.nextEq("--split-ontology")) { opts.info("[-p IRI-PREFIX] [-s IRI-SUFFIX] [-d OUTDIR] [-l IDSPACE1 ... IDPSPACEn]", "Takes current only extracts all axioms in ID spaces and writes to separate ontology PRE+lc(IDSPACE)+SUFFIX saving to outdir. Also adds imports"); String prefix = g.getSourceOntology().getOntologyID().getOntologyIRI().get().toString().replace(".owl", "/"); String suffix = "_import.owl"; String outdir = "."; Set<String> idspaces = new HashSet<String>(); while (opts.hasOpts()) { if (opts.nextEq("-p|--prefix")) prefix = opts.nextOpt(); else if (opts.nextEq("-s|--suffix")) suffix = opts.nextOpt(); else if (opts.nextEq("-d|--dir")) outdir = opts.nextOpt(); else if (opts.nextEq("-l|--idspaces")) { idspaces.addAll(opts.nextList()); } else break; } Mooncat m = new Mooncat(g); for (String idspace : idspaces) { LOG.info("Removing "+idspace); String name = prefix + idspace + suffix; IRI iri = IRI.create(name); OWLOntology subOnt = g.getManager().createOntology(iri); m.transferAxiomsUsingIdSpace(idspace, subOnt); AddImport ai = new AddImport(g.getSourceOntology(), g.getDataFactory().getOWLImportsDeclaration(iri)); g.getManager().applyChange(ai); String path = outdir + "/" + name.replaceAll(".*/", ""); FileOutputStream stream = new FileOutputStream(new File(path)); g.getManager().saveOntology(subOnt, stream); } } else if (opts.nextEq("--remove-subset")) { opts.info("[-d] SUBSET", "Removes a subset (aka slim) from an ontology"); boolean isRemoveDangling = true; while (opts.hasOpts()) { if (opts.nextEq("-d|--keep-dangling")) { opts.info("", "if specified, dangling axioms (ie pointing to removed classes) are preserved"); isRemoveDangling = false; } else break; } String subset = opts.nextOpt(); Set<OWLClass> cset = g.getOWLClassesInSubset(subset); LOG.info("Removing "+cset.size()+" classes"); Mooncat m = new Mooncat(g); m.removeSubsetClasses(cset, isRemoveDangling); } else if (opts.nextEq("--remove-axioms-about")) { opts.info("[-d] IDSPACES", "Removes axioms that are about the specified ID space"); boolean isRemoveDangling = true; while (opts.hasOpts()) { if (opts.nextEq("-d|--keep-dangling")) { opts.info("", "if specified, dangling axioms (ie pointing to removed classes) are preserved"); isRemoveDangling = false; } else break; } String idspace = opts.nextOpt(); Mooncat m = new Mooncat(g); m.removeAxiomsAboutIdSpace(idspace, isRemoveDangling); } else if (opts.nextEq("--remove-classes-in-idspace")) { opts.info("[-d] [-s IDSPACE]", "Removes classes in an ID space from ontology"); String idspace = null; boolean isRemoveDangling = true; while (opts.hasOpts()) { if (opts.nextEq("-s|--idspace")) { opts.info("", "ID space"); idspace = opts.nextOpt(); } else if (opts.nextEq("-d|--keep-dangling")) { opts.info("", "if specified, dangling axioms (ie pointing to removed classes) are preserved"); isRemoveDangling = false; } else break; } if (idspace == null) idspace = opts.nextOpt(); String idspaceFinal = idspace + ":"; LOG.info("IDSPACE: "+idspaceFinal); Set<OWLClass> cset = g.getAllOWLClasses().stream().filter( c -> g.getIdentifier(c).startsWith(idspaceFinal) ).collect(Collectors.toSet()); LOG.info("Removing "+cset.size()+ " classes from "+g.getAllOWLClasses().size()); Mooncat m = new Mooncat(g); m.removeSubsetClasses(cset, isRemoveDangling); } else if (opts.nextEq("--extract-subset")) { opts.info("SUBSET", "Extract a subset (aka slim) from an ontology, storing subset in place of existing ontology"); String subset = opts.nextOpt(); Set<OWLClass> cset = g.getOWLClassesInSubset(subset); LOG.info("Removing "+cset.size()+" classes"); Mooncat m = new Mooncat(g); m.removeSubsetComplementClasses(cset, false); } else if (opts.nextEq("--translate-undeclared-to-classes")) { for (OWLAnnotationAssertionAxiom a : g.getSourceOntology().getAxioms(AxiomType.ANNOTATION_ASSERTION)) { OWLAnnotationSubject sub = a.getSubject(); if (sub instanceof IRI) { OWLObject e = g.getOWLObject(((IRI)sub)); if (e == null) { OWLClass c = g.getDataFactory().getOWLClass((IRI)sub); OWLDeclarationAxiom ax = g.getDataFactory().getOWLDeclarationAxiom(c); g.getManager().addAxiom(g.getSourceOntology(), ax); } } } } else if (opts.nextEq("--show-metadata")) { OntologyMetadata omd = new OntologyMetadata(); omd.generate(g); } else if (opts.nextEq("--follow-subclass")) { opts.info("", "follow subclass axioms (and also equivalence axioms) in graph traversal.\n"+ " default is to follow ALL. if this is specified then only explicitly specified edges followed"); if (g.getConfig().graphEdgeIncludeSet == null) g.getConfig().graphEdgeIncludeSet = new HashSet<OWLQuantifiedProperty>(); g.getConfig().graphEdgeIncludeSet.add(new OWLQuantifiedProperty(Quantifier.SUBCLASS_OF)); } else if (opts.nextEq("--follow-property")) { opts.info("PROP-LABEL", "follow object properties of this type in graph traversal.\n"+ " default is to follow ALL. if this is specified then only explicitly specified edges followed"); OWLObjectProperty p = (OWLObjectProperty) resolveEntity( opts); if (g.getConfig().graphEdgeIncludeSet == null) g.getConfig().graphEdgeIncludeSet = new HashSet<OWLQuantifiedProperty>(); g.getConfig().graphEdgeIncludeSet.add(new OWLQuantifiedProperty(p, null)); } else if (opts.nextEq("--exclude-property")) { opts.info("PROP-LABEL", "exclude object properties of this type in graph traversal.\n"+ " default is to exclude NONE."); OWLObjectProperty p = g.getOWLObjectProperty(opts.nextOpt()); System.out.println("Excluding "+p+" "+p.getClass()); if (g.getConfig().graphEdgeExcludeSet == null) g.getConfig().graphEdgeExcludeSet = new HashSet<OWLQuantifiedProperty>(); g.getConfig().graphEdgeExcludeSet.add(new OWLQuantifiedProperty(p, null)); } else if (opts.nextEq("--exclusion-annotation-property")) { opts.info("[-o ONT] PROP-LABEL", "exclude object properties of this type in graph traversal.\n"+ " default is to exclude NONE."); OWLOntology xo = g.getSourceOntology(); if (opts.hasOpts()) { if (opts.nextEq("-o")) { xo = pw.parse(opts.nextOpt()); } else break; } OWLAnnotationProperty ap = (OWLAnnotationProperty) g.getOWLObjectByLabel(opts.nextOpt()); g.getConfig().excludeAllWith(ap, xo); } else if (opts.nextEq("--inclusion-annotation-property")) { opts.info("[-o ONT] PROP-LABEL", "include object properties of this type in graph traversal.\n"+ " default is to include NONE."); OWLOntology xo = g.getSourceOntology(); if (opts.hasOpts()) { if (opts.nextEq("-o")) { xo = pw.parse(opts.nextOpt()); } else break; } OWLAnnotationProperty ap = (OWLAnnotationProperty) g.getOWLObjectByLabel(opts.nextOpt()); g.getConfig().includeAllWith(ap, xo); } else if (opts.nextEq("--exclude-metaclass")) { opts.info("METACLASS-LABEL", "exclude classes of this type in graph traversal.\n"+ " default is to follow ALL classes"); OWLClass c = (OWLClass) resolveEntity( opts); g.getConfig().excludeMetaClass = c; } else if (opts.nextEq("--label-abox")) { opts.info("[-e]", "Auto-add labels for individuals, using class label"); boolean isUseEntailed = false; boolean isOverwrite = false; while (opts.hasOpts()) { if (opts.nextEq("-e|--entailed")) { isUseEntailed = true; } if (opts.nextEq("-w|--overwrite")) { isOverwrite = true; } else { break; } } for (OWLNamedIndividual i : g.getSourceOntology().getIndividualsInSignature(true)) { if (g.getLabel(i) != null && !isOverwrite) { continue; } TransformationUtils.addLabel(i, g, reasoner); } } else if (opts.nextEq("--create-abox-subset")) { opts.info("CLASS", "Remove all ClassAssertions where the CE is not a subclass of the specified class"); OWLClass c = this.resolveClass(opts.nextOpt()); LOG.info("SUBSET: "+c); //Set<OWLNamedIndividual> inds = g.getSourceOntology().getIndividualsInSignature(true); Set<OWLClassAssertionAxiom> caas = g.getSourceOntology().getAxioms(AxiomType.CLASS_ASSERTION); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLClassAssertionAxiom a : caas) { Set<OWLClass> sups = reasoner.getSuperClasses(a.getClassExpression(), false).getFlattened(); if (!sups.contains(c)) { rmAxioms.add(a); } } LOG.info("Removing: "+rmAxioms.size() + " / "+caas.size()); g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); reasoner.flush(); } else if (opts.nextEq("--load-instances")) { TableToAxiomConverter ttac = new TableToAxiomConverter(g); ttac.config.axiomType = AxiomType.CLASS_ASSERTION; ttac.config.isSwitchSubjectObject = true; while (opts.hasOpts()) { if (opts.nextEq("-p|--property")) { ttac.config.property = ((OWLNamedObject) resolveObjectProperty( opts.nextOpt())).getIRI(); } else { break; } } String f = opts.nextOpt(); System.out.println("tabfile: "+f); ttac.parse(f); } else if (opts.nextEq("--load-labels")) { TableToAxiomConverter ttac = new TableToAxiomConverter(g); ttac.config.setPropertyToLabel(); ttac.config.axiomType = AxiomType.ANNOTATION_ASSERTION; String f = opts.nextOpt(); ttac.parse(f); } else if (opts.nextEq("--add-labels")) { Set<Integer> colsToLabel = new HashSet<Integer>(); while (opts.hasOpts()) { if (opts.nextEq("-c|--column")) { opts.info("COLNUMS", "number of col to label (starting from 1). Can be comma-separated list"); String v = opts.nextOpt(); for (String cn : v.split(",")) { colsToLabel.add(Integer.valueOf(cn)-1); } } else { break; } } LOG.info("Labeling: "+colsToLabel); File f = opts.nextFile(); List<String> lines = FileUtils.readLines(f); for (String line : lines) { String[] vals = line.split("\\t"); for (int i=0; i<vals.length; i++) { if (i>0) System.out.print("\t"); System.out.print(vals[i]); if (colsToLabel.contains(i)) { String label = "NULL"; String v = vals[i]; if (v != null && !v.equals("") && !v.contains(" ")) { OWLObject obj = g.getOWLObjectByIdentifier(v); if (obj != null) { label = g.getLabel(obj); } } System.out.print("\t"+label); } } System.out.println(); } } else if (opts.nextEq("--parse-tsv")) { opts.info("[-s] [-l] [--comment] [-m] [-p PROPERTY] [-a AXIOMTYPE] [-t INDIVIDUALSTYPE] FILE", "parses a tabular file to OWL axioms"); TableToAxiomConverter ttac = new TableToAxiomConverter(g); ttac.config.axiomType = AxiomType.CLASS_ASSERTION; while (opts.hasOpts()) { if (opts.nextEq("-s|--switch")) { opts.info("", "switch subject and object"); ttac.config.isSwitchSubjectObject = true; } else if (opts.nextEq("-l|--label")) { ttac.config.setPropertyToLabel(); ttac.config.axiomType = AxiomType.ANNOTATION_ASSERTION; } else if (opts.nextEq("--comment")) { ttac.config.setPropertyToComment(); ttac.config.axiomType = AxiomType.ANNOTATION_ASSERTION; } else if (opts.nextEq("-m|--map-xrefs")) { ttac.buildClassMap(g); } else if (opts.nextEq("-p|--prop")) { ttac.config.property = ((OWLNamedObject) resolveObjectProperty( opts.nextOpt())).getIRI(); //ttac.config.property = g.getOWLObjectProperty().getIRI(); } else if (opts.nextEq("--default1")) { ttac.config.defaultCol1 = opts.nextOpt(); } else if (opts.nextEq("--default2")) { ttac.config.defaultCol2 = opts.nextOpt(); } else if (opts.nextEq("--object-non-literal")) { ttac.config.isObjectLiteral = false; } else if (opts.nextEq("--iri-prefix")) { int col = 0; String x = opts.nextOpt(); if (x.equals("1") || x.startsWith("s")) { col = 1; } else if (x.equals("2") || x.startsWith("o")) { col = 2; } else { // } String pfx = opts.nextOpt(); // note that we do not put the full URI prefix here for now //if (!pfx.startsWith("http:")) // pfx = "http://purl.obolibrary.org/obo/" + pfx + "_"; if (pfx.startsWith("http:")) ttac.config.iriPrefixMap.put(col, pfx); else ttac.config.iriPrefixMap.put(col, pfx+":"); } else if (opts.nextEq("-a|--axiom-type")) { ttac.config.setAxiomType(opts.nextOpt()); } else if (opts.nextEq("-t|--individuals-type")) { System.out.println("setting types"); ttac.config.individualsType = resolveClass( opts.nextOpt()); } else { throw new OptionException(opts.nextOpt()); } } String f = opts.nextOpt(); System.out.println("tabfile: "+f); ttac.parse(f); } else if (opts.nextEq("--parse-stanzas")) { opts.info("[-m KEY PROPERTY]* [-s] FILE", "parses a tabular file to OWL axioms"); StanzaToOWLConverter sc = new StanzaToOWLConverter(g); while (opts.hasOpts()) { if (opts.nextEq("-m|--map")) { String k = opts.nextOpt(); StanzaToOWLConverter.Mapping m = sc.new Mapping(); String p = opts.nextOpt(); m.property = this.resolveObjectProperty(p); // TODO - allow other types sc.config.keyMap.put(k, m); } else if (opts.nextEq("-s|--strict")) { opts.info("", "set if to be run in strict mode"); sc.config.isStrict = true; } else if (opts.nextEq("--prefix")) { sc.config.defaultPrefix = opts.nextOpt(); } else { continue; } } String f = opts.nextOpt(); System.out.println("tabfile: "+f); sc.parse(f); } else if (opts.nextEq("--idmap-extract-pairs")) { opts.info("IDType1 IDType2 PIRMapFile", "extracts pairs from mapping file"); IDMappingPIRParser p = new IDMappingPIRParser(); IDMapPairWriter h = new IDMapPairWriter(); h.setPair(opts.nextOpt(), opts.nextOpt()); p.handler = h; p.parse(new File(opts.nextOpt())); } else if (opts.nextEq("--parser-idmap")) { opts.info("UniProtIDMapFile", "..."); UniProtIDMapParser p = new UniProtIDMapParser(); p.parse(new File(opts.nextOpt())); System.out.println("Types:"+p.idMap.size()); // TODO... } else if (opts.nextEq("--extract-ontology-subset")) { opts.info("[-i FILE][-u IRI][-s SUBSET][--fill-gaps]", "performs slimdown using IDs from FILE or from named subset"); IRI subOntIRI = IRI.create("http://purl.obolibrary.org/obo/"+g.getOntologyId()+"-subset"); String fileName = null; String subset = null; boolean isFillGaps = false; boolean isSpanGaps = true; while (opts.hasOpts()) { if (opts.nextEq("-u|--uri|--iri")) { subOntIRI = IRI.create(opts.nextOpt()); } else if (opts.nextEq("-i|--input-file")) { fileName = opts.nextOpt(); } else if (opts.nextEq("-s|--subset")) { subset = opts.nextOpt(); } else if (opts.nextEq("--fill-gaps")) { isFillGaps = true; isSpanGaps = false; } else if (opts.nextEq("--minimal")) { isFillGaps = false; isSpanGaps = false; } else { break; } } Mooncat m = new Mooncat(g); Set<OWLClass> cs = new HashSet<OWLClass>(); if (fileName != null) { LOG.info("Reading IDs from: "+fileName); Set<String> unmatchedIds = new HashSet<String>(); for (String line : FileUtils.readLines(new File(fileName))) { OWLClass c = g.getOWLClassByIdentifierNoAltIds(line); if (c == null) { unmatchedIds.add(line); continue; } cs.add(c); } LOG.info("# IDs = "+cs.size()); if (unmatchedIds.size() > 0) { LOG.error(fileName+" contains "+unmatchedIds.size()+" unmatched IDs"); for (String id : unmatchedIds) { LOG.error("UNMATCHED: "+id); } } } if (subset != null) { LOG.info("Adding IDs from "+subset); cs.addAll(g.getOWLClassesInSubset(subset)); } if (cs.size() == 0) { LOG.warn("EMPTY SUBSET"); } // todo LOG.info("Making subset ontology seeded from "+cs.size()+" classes"); g.setSourceOntology(m.makeMinimalSubsetOntology(cs, subOntIRI, isFillGaps, isSpanGaps)); LOG.info("Made subset ontology; # classes = "+cs.size()); } else if (opts.nextEq("--extract-module")) { opts.info("[-n IRI] [-d] [-s SOURCE-ONTOLOGY] [-c] [-m MODULE-TYPE] SEED-OBJECTS", "Uses the OWLAPI module extractor"); String modIRI = null; ModuleType mtype = ModuleType.BOT; boolean isTraverseDown = false; boolean isMerge = false; OWLOntology baseOnt = g.getSourceOntology(); IRI dcSource = null; while (opts.hasOpts()) { if (opts.nextEq("-n")) { modIRI = opts.nextOpt(); } else if (opts.nextEq("-d")) { opts.info("", "Is set, will traverse down class hierarchy to form seed set"); isTraverseDown = true; } else if (opts.nextEq("-c|--merge")) { opts.info("", "Is set, do not use a command-line specified seed object list - use the source ontology as list of seeds"); isMerge = true; } else if (opts.nextEq("-s|--source")) { String srcName = opts.nextOpt(); baseOnt = g.getManager().getOntology(IRI.create(srcName)); if (baseOnt == null) { LOG.error("Could not find specified ontology "+srcName+" for --source"); } } else if (opts.nextEq("-m") || opts.nextEq("--module-type")) { opts.info("MODULE-TYPE", "One of: STAR, TOP, BOT (default)"); mtype = ModuleType.valueOf(opts.nextOpt()); } else { break; } } Set<OWLObject> objs = new HashSet<OWLObject>(); if (isMerge) { // add all relations and classes to seed set // merge support set closure objs.addAll( g.getSourceOntology().getObjectPropertiesInSignature() ); objs.addAll( g.getSourceOntology().getClassesInSignature() ); for (OWLOntology ont : g.getSupportOntologySet()) g.mergeOntology(ont); g.setSupportOntologySet(new HashSet<OWLOntology>()); } else { objs = this.resolveEntityList(opts); } LOG.info("OBJS: "+objs.size()); Set<OWLEntity> seedSig = new HashSet<OWLEntity>(); if (isTraverseDown) { OWLReasoner mr = this.createReasoner(baseOnt, reasonerName, g.getManager()); try { for (OWLObject obj : objs) { if (obj instanceof OWLClassExpression) { seedSig.addAll(mr.getSubClasses((OWLClassExpression) obj, false).getFlattened()); } else if (obj instanceof OWLObjectPropertyExpression) { for (OWLObjectPropertyExpression pe : mr.getSubObjectProperties((OWLObjectPropertyExpression) obj, false).getFlattened()) { if (pe instanceof OWLObjectProperty) { seedSig.add((OWLObjectProperty) pe); } } } } } finally { mr.dispose(); } } SyntacticLocalityModuleExtractor sme = new SyntacticLocalityModuleExtractor(g.getManager(), baseOnt, mtype); for (OWLObject obj : objs) { if (obj instanceof OWLEntity) { seedSig.add((OWLEntity) obj); } } Set<OWLAxiom> modAxioms = sme.extract(seedSig); OWLOntology modOnt; if (modIRI == null) { modOnt = g.getManager().createOntology(); } else { modOnt = g.getManager().createOntology(IRI.create(modIRI)); } if (dcSource == null) { OWLOntologyID oid = baseOnt.getOntologyID(); Optional<IRI> versionIRI = oid.getVersionIRI(); if (versionIRI.isPresent()) { dcSource = versionIRI.get(); } else { Optional<IRI> ontologyIRI = oid.getOntologyIRI(); if (ontologyIRI.isPresent()) { dcSource = ontologyIRI.get(); } } } g.getManager().addAxioms(modOnt, modAxioms); g.setSourceOntology(modOnt); if (dcSource != null) { LOG.info("Setting source: "+dcSource); OWLAnnotation ann = g.getDataFactory().getOWLAnnotation(g.getDataFactory().getOWLAnnotationProperty( IRI.create("http://purl.org/dc/elements/1.1/source")), dcSource); AddOntologyAnnotation addAnn = new AddOntologyAnnotation(g.getSourceOntology(), ann); g.getManager().applyChange(addAnn); } } else if (opts.nextEq("--translate-disjoint-to-equivalent|--translate-disjoints-to-equivalents")) { opts.info("", "adds (Xi and Xj = Nothing) for every DisjointClasses(X1...Xn) where i<j<n"); Mooncat m = new Mooncat(g); m.translateDisjointsToEquivalents(); } else if (opts.nextEq("--build-property-view-ontology|--bpvo")) { opts.info("[-p PROPERTY] [-o OUTFILE] [-r REASONER] [--filter-unused] [--prefix STR] [--suffix STR] [--avfile FILE] [--i2c]", "generates a new ontology O' from O using property P such that for each C in O, O' contains C' = P some C"); OWLOntology sourceOntol = g.getSourceOntology(); // TODO - for now assume exactly 0 or 1 support ontology; if 1, the support is the element ontology OWLOntology annotOntol; if (g.getSupportOntologySet().size() == 1) annotOntol = g.getSupportOntologySet().iterator().next(); else if (g.getSupportOntologySet().size() == 0) annotOntol = g.getManager().createOntology(); else throw new OptionException("must have zero or one support ontologies"); OWLObjectProperty viewProperty = null; String outFile = null; String suffix = null; String prefix = null; boolean isFilterUnused = false; boolean isReplace = false; boolean noReasoner = false; boolean isCreateReflexiveClasses = false; String avFile = null; String viewIRI = "http://example.org/"; while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("PROPERTY-ID-OR-LABEL", "The ObjectProperty P that is used to build the view"); viewProperty = resolveObjectProperty(opts.nextOpt()); } else if (opts.nextEq("-r")) { opts.info("REASONERNAME", "e.g. elk"); reasonerName = opts.nextOpt(); } else if (opts.nextEq("--no-reasoner|nr")) { opts.info("", "do not build an inferred view ontology"); noReasoner = true; } else if (opts.nextEq("--prefix")) { opts.info("STR", "each class in O(P) will have this prefix in its label"); prefix = opts.nextOpt(); } else if (opts.nextEq("--suffix")) { opts.info("STR", "each class in O(P) will have this suffix in its label"); suffix = opts.nextOpt(); } else if (opts.nextEq("-o")) { opts.info("FILE", "file to save O(P)' [i.e. reasoned view ontology] into"); outFile = opts.nextOpt(); } else if (opts.nextEq("--view-iri")) { opts.info("IRI", "IRI for the view ontology"); viewIRI = opts.nextOpt(); } else if (opts.nextEq("--avfile")) { opts.info("FILE", "file to save O(P) [i.e. non-reasoner view ontology] into"); avFile = opts.nextOpt(); } else if (opts.nextEq("--filter-unused")) { opts.info("", "if set, any class or individual that is not subsumed by P some Thing is removed from O(P)"); isFilterUnused = true; } else if (opts.nextEq("--reflexive")) { opts.info("", "Treat property as reflexive"); isCreateReflexiveClasses = true; } else if (opts.nextEq("--replace")) { opts.info("", "if set, the source ontology is replaced with O(P)'"); isReplace = true; } else if (opts.nextEq("" + "")) { annotOntol = g.getSourceOntology(); } else break; } PropertyViewOntologyBuilder pvob = new PropertyViewOntologyBuilder(sourceOntol, annotOntol, viewProperty); pvob.setViewLabelPrefix(prefix); pvob.setViewLabelSuffix(suffix); pvob.buildViewOntology(IRI.create("http://x.org/assertedViewOntology"), IRI.create(viewIRI)); pvob.setFilterUnused(isFilterUnused); pvob.setCreateReflexiveClasses(isCreateReflexiveClasses); OWLOntology avo = pvob.getAssertedViewOntology(); if (avFile != null) pw.saveOWL(avo, avFile); if (noReasoner) { pvob.setInferredViewOntology(pvob.getAssertedViewOntology()); } else { OWLReasoner vr = createReasoner(avo, reasonerName, g.getManager()); pvob.buildInferredViewOntology(vr); vr.dispose(); } // save if (outFile != null) pw.saveOWL(pvob.getInferredViewOntology(), outFile); else if (isReplace) { g.setSourceOntology(pvob.getInferredViewOntology()); } else { g.addSupportOntology(pvob.getInferredViewOntology()); } } else if (opts.nextEq("--materialize-property-inferences|--mpi")) { opts.info("[-p [-r] PROPERTY]... [-m|--merge]", "reasoned property view. Alternative to --bpvo"); // TODO - incorporate this into sparql query Set<OWLObjectProperty> vps = new HashSet<OWLObjectProperty>(); Set<OWLObjectProperty> reflexiveVps = new HashSet<OWLObjectProperty>(); boolean isMerge = false; boolean isPrereason = true; while (opts.hasOpts()) { if (opts.nextEq("-p")) { opts.info("[-r] PROPERTY-ID-OR-LABEL", "The ObjectProperty P that is used to build the view. If -r is specified the view is reflexive"); boolean isReflexive = false; if (opts.nextEq("-r|--reflexive")) isReflexive = true; String s = opts.nextOpt(); OWLObjectProperty viewProperty = resolveObjectProperty(s); if (viewProperty == null) { // the method resolveObjectProperty, will log already a error // escalate to an exception throw new IOException("Could not find an OWLObjectProperty for string: "+s); } vps.add(viewProperty); if (isReflexive) reflexiveVps.add(viewProperty); } else if (opts.nextEq("--merge|-m")) { isMerge = true; } else if (opts.nextEq("--no-assert-inferences|-n")) { isPrereason = false; } else { break; } } if (!isPrereason && !isMerge) { LOG.warn("ontology will be empty!"); } OWLOntology baseOntology = g.getSourceOntology(); OWLOntology vOnt = g.getManager().createOntology(); if (!isMerge) { // make the source ontology the new view g.setSourceOntology(vOnt); } Set<OWLClass> allvcs = new HashSet<OWLClass>(); for (OWLObjectProperty vp : vps) { PropertyViewOntologyBuilder pvob = new PropertyViewOntologyBuilder(baseOntology, vp); if (reflexiveVps.contains(vp)) pvob.setCreateReflexiveClasses(true); pvob.buildViewOntology(); OWLOntology avo = pvob.getAssertedViewOntology(); Set<OWLClass> vcs = avo.getClassesInSignature(); LOG.info("view for "+vp+" num view classes: "+vcs.size()); allvcs.addAll(vcs); g.mergeOntology(avo); // todo - more sophisticated } if (isPrereason) { if (reasoner == null) { reasoner = createReasoner(g.getSourceOntology(),reasonerName,g.getManager()); LOG.info("created reasoner: "+reasoner); } for (OWLClass c : g.getSourceOntology().getClassesInSignature(Imports.INCLUDED)) { Set<OWLClass> scs = reasoner.getSuperClasses(c, false).getFlattened(); for (OWLClass sc : scs) { OWLSubClassOfAxiom sca = g.getDataFactory().getOWLSubClassOfAxiom(c, sc); g.getManager().addAxiom(vOnt, sca); } // inferred (named classes) plus asserted (include class expressions) Set<OWLClassExpression> ecs = OwlHelper.getEquivalentClasses(c, g.getSourceOntology()); ecs.addAll(reasoner.getEquivalentClasses(c).getEntities()); for (OWLClassExpression ec : ecs) { if (ec.equals(c)) continue; OWLEquivalentClassesAxiom eca = g.getDataFactory().getOWLEquivalentClassesAxiom(c, ec); g.getManager().addAxiom(vOnt, eca); // bidirectional subclass axioms for each equivalent pair OWLSubClassOfAxiom sca1 = g.getDataFactory().getOWLSubClassOfAxiom(c, ec); g.getManager().addAxiom(vOnt, sca1); OWLSubClassOfAxiom sca2 = g.getDataFactory().getOWLSubClassOfAxiom(ec, c); g.getManager().addAxiom(vOnt, sca2); } } } else { } // TODO - turn allvcs into bnodes if (isMerge) { g.mergeOntology(vOnt); } else { g.setSourceOntology(vOnt); } } else if (opts.nextEq("--materialize-existentials")) { opts.info("[-p PROP][-l PROPLIST]", "builds view ontology with existentials named"); Set<OWLObjectSomeValuesFrom> svfs = new HashSet<OWLObjectSomeValuesFrom>(); Set<OWLObjectProperty> props = new HashSet<OWLObjectProperty>(); while (opts.hasOpts()) { if (opts.nextEq("-p")) { props.add(this.resolveObjectProperty(opts.nextOpt())); } else if (opts.nextEq("-l|--list")) { props.addAll(this.resolveObjectPropertyList(opts)); } else { break; } } LOG.info("Materializing: "+props); OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); for (OWLOntology ont : g.getAllOntologies()) { for (OWLAxiom ax : ont.getAxioms()) { if (ax instanceof OWLSubClassOfAxiom) { OWLClassExpression supc = ((OWLSubClassOfAxiom)ax).getSuperClass(); if (supc instanceof OWLObjectSomeValuesFrom) { svfs.add((OWLObjectSomeValuesFrom) supc); } } else if (ax instanceof OWLEquivalentClassesAxiom) { for (OWLClassExpression x : ((OWLEquivalentClassesAxiom)ax).getClassExpressions()) { if (x instanceof OWLObjectIntersectionOf) { for (OWLClassExpression y : ((OWLObjectIntersectionOf)x).getOperands()) { if (y instanceof OWLObjectSomeValuesFrom) { svfs.add((OWLObjectSomeValuesFrom) y); } } } } } } } Set<OWLAxiom> newAxioms = new HashSet<OWLAxiom>(); OWLDataFactory df = g.getDataFactory(); for (OWLObjectSomeValuesFrom svf : svfs) { if (svf.getFiller().isAnonymous()) continue; if (svf.getProperty().isAnonymous()) continue; OWLObjectProperty p = (OWLObjectProperty) svf.getProperty(); if (!props.contains(p)) continue; OWLClass c = (OWLClass) svf.getFiller(); PropertyViewOntologyBuilder pvob = new PropertyViewOntologyBuilder(g.getSourceOntology(), p); IRI xIRI = pvob.makeViewClassIRI(c.getIRI(), p.getIRI(), "-"); String label = "Reflexive "+ g.getLabel(p) + " " + g.getLabel(c); OWLClass xc = df.getOWLClass(xIRI); newAxioms.add(df.getOWLEquivalentClassesAxiom(xc, svf)); newAxioms.add(df.getOWLSubClassOfAxiom(c, xc)); newAxioms.add(df.getOWLAnnotationAssertionAxiom(df.getRDFSLabel(), xIRI, df.getOWLLiteral(label))); } LOG.info("Adding "+newAxioms.size()+ " axioms"); g.getManager().addAxioms(g.getSourceOntology(), newAxioms); } else if (opts.nextEq("--report-profile")) { g.getProfiler().report(); } else if (opts.nextEq("--no-cache")) { g.getConfig().isCacheClosure = false; } else if (opts.nextEq("--repeat")) { List<String> ops = new ArrayList<String>(); while (opts.hasArgs()) { if (opts.nextEq("--end")) { break; } else { String op = opts.nextOpt(); ops.add(op); } } // TODO } else if (opts.nextEq("--start-server")) { int port = 9000; while (opts.hasOpts()) { if (opts.nextEq("-p")) { port = Integer.parseInt(opts.nextOpt()); } else { break; } } Server server = new Server(port); server.setHandler(new OWLServer(g)); try { server.start(); server.join(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } else if (opts.nextEq("--create-ontology")) { opts.info("ONT-IRI", "creates a new OWLOntology and makes it the source ontology"); g = new OWLGraphWrapper(opts.nextOpt()); } else if (opts.nextEq("--parse-obo")) { String f = opts.nextOpt(); OWLOntology ont = pw.parseOBO(f); if (g == null) g = new OWLGraphWrapper(ont); else { System.out.println("adding support ont "+ont); g.addSupportOntology(ont); System.out.println("Added support ont"); } } else if (opts.nextEq("--load-ontologies-as-imports")) { opts.info("[ONT]+", "loads and adds the specified ontologies as imports"); List<String> ontologyList = opts.nextList(); if (ontologyList == null || ontologyList.isEmpty()) { LOG.error("No ontologies specified for the command. At least one ontology is required."); exit(-1); } // create a new empty ontology if there is no previous graph final OWLOntologyManager m; final OWLOntology containerOntology; if (g == null) { m = pw.getManager(); containerOntology = m.createOntology(IRI.generateDocumentIRI()); g = new OWLGraphWrapper(containerOntology); } else { m = g.getManager(); containerOntology = g.getSourceOntology(); } final OWLDataFactory factory = m.getOWLDataFactory(); for(String ont : ontologyList) { // load ontology OWLOntology owlOntology = pw.parse(ont); // check for usable ontology ID and ontology IRI OWLOntologyID ontologyID = owlOntology.getOntologyID(); if (ontologyID == null) { LOG.error("The ontology: "+ont+" does not have a valid ontology ID"); exit(-1); } else { Optional<IRI> documentIRI = ontologyID.getDefaultDocumentIRI(); if (documentIRI.isPresent() == false) { LOG.error("The ontology: "+ont+" does not have a valid document IRI"); exit(-1); }else { // add as import, instead of merge OWLImportsDeclaration importDeclaration = factory.getOWLImportsDeclaration(documentIRI.get()); OWLOntologyChange change = new AddImport(containerOntology, importDeclaration); m.applyChange(change); } } } } else { // check first if there is a matching annotated method // always check, to support introspection via '-h' boolean called = false; Method[] methods = getClass().getMethods(); for (Method method : methods) { CLIMethod cliMethod = method.getAnnotation(CLIMethod.class); if (cliMethod !=null) { if (opts.nextEq(cliMethod.value())) { called = true; try { method.invoke(this, opts); } catch (InvocationTargetException e) { // the underlying method has throw an exception Throwable cause = e.getCause(); if (cause instanceof Exception) { throw ((Exception) cause); } throw e; } } } } if (called) { continue; } if (opts.hasArgs()) { // Default is to treat argument as an ontology String f = opts.nextOpt(); try { OWLOntology ont = null; if (f.endsWith("obo")) { ont = pw.parseOBO(f); } else { ont = pw.parse(f); } if (g == null) { g = new OWLGraphWrapper(ont); } else { System.out.println("adding support ont "+ont); g.addSupportOntology(ont); } } catch (Exception e) { LOG.error("could not parse:"+f, e); if (exitOnException) { exit(1); } else { throw e; } } } else { if (opts.isHelpMode()) { helpFooter(); // should only reach here in help mode } } } } } private String owlpp(OWLClass c) { // TODO Auto-generated method stub return null; } static Set<OWLAxiom> traceAxioms(Set<OWLAxiom> axioms, OWLGraphWrapper g, OWLDataFactory df) { final OWLAnnotationProperty p = df.getOWLAnnotationProperty(IRI.create("http://trace.module/source-ont")); final Set<OWLOntology> ontologies = g.getSourceOntology().getImportsClosure(); final Set<OWLAxiom> traced = new HashSet<OWLAxiom>(); for (OWLAxiom axiom : axioms) { Set<OWLOntology> hits = new HashSet<OWLOntology>(); for(OWLOntology ont : ontologies) { if (ont.containsAxiom(axiom)) { hits.add(ont); } } if (hits.isEmpty()) { traced.add(axiom); } else { Set<OWLAnnotation> annotations = new HashSet<OWLAnnotation>(axiom.getAnnotations()); for (OWLOntology hit : hits) { Optional<IRI> hitIRI = hit.getOntologyID().getOntologyIRI(); if(hitIRI.isPresent()) { annotations.add(df.getOWLAnnotation(p, hitIRI.get())); } } traced.add(AxiomAnnotationTools.changeAxiomAnnotations(axiom, annotations, df)); } } return traced; } private Set<OWLClass> removeUnreachableAxioms(OWLOntology src, Set<OWLClass> seedClasses) { Stack<OWLClass> stack = new Stack<OWLClass>(); stack.addAll(seedClasses); Set<OWLClass> visited = new HashSet<OWLClass>(); visited.addAll(stack); while (!stack.isEmpty()) { OWLClass elt = stack.pop(); Set<OWLClass> parents = new HashSet<OWLClass>(); Set<OWLClassExpression> xparents = OwlHelper.getSuperClasses(elt, src); xparents.addAll(OwlHelper.getEquivalentClasses(elt, src)); for (OWLClassExpression x : xparents) { parents.addAll(x.getClassesInSignature()); } //parents.addAll(getReasoner().getSuperClasses(elt, true).getFlattened()); //parents.addAll(getReasoner().getEquivalentClasses(elt).getEntities()); parents.removeAll(visited); stack.addAll(parents); visited.addAll(parents); } LOG.info("# in closure set to keep: "+visited.size()); Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLClass c : src.getClassesInSignature()) { if (!visited.contains(c)) { //LOG.info("removing axioms for EL-unreachable class: "+c); rmAxioms.addAll(src.getAxioms(c, Imports.EXCLUDED)); rmAxioms.add(src.getOWLOntologyManager().getOWLDataFactory().getOWLDeclarationAxiom(c)); } } src.getOWLOntologyManager().removeAxioms(src, rmAxioms); LOG.info("Removed "+rmAxioms.size()+" axioms. Remaining: "+src.getAxiomCount()); return visited; } private void removeAxiomsReferencingDeprecatedClasses(Set<OWLAxiom> axioms) { Set<OWLAxiom> rmAxioms = new HashSet<OWLAxiom>(); for (OWLAxiom axiom : axioms) { for (OWLClass c : axiom.getClassesInSignature()) { if (g.isObsolete(c)) { rmAxioms.add(axiom); break; } } } axioms.removeAll(rmAxioms); } @CLIMethod("--external-mappings-files") public void createExternalMappings(Opts opts) throws Exception { if (g == null) { System.err.println("No graph available for gaf-run-check."); exit(-1); return; } File headerFilesFolder = null; String headerFileSuffix = ".header"; List<String> externalDbNames = null; File outputFolder = new File(".").getCanonicalFile(); String commentPrefix = "!"; String labelPrefix = ""; while (opts.hasOpts()) { if (opts.nextEq("-o|--output|--output-folder")) outputFolder = opts.nextFile().getCanonicalFile(); else if (opts.nextEq("--go-external-default")) { externalDbNames = Arrays.asList("EC","MetaCyc","Reactome","RESID","UM-BBD_enzymeID","UM-BBD_pathwayID","Wikipedia"); labelPrefix = "GO:"; } else if(opts.nextEq("--label-prefix")) { labelPrefix = opts.nextOpt(); } else if(opts.nextEq("--externals")) { externalDbNames = opts.nextList(); } else if (opts.nextEq("--load-headers-from")) { headerFilesFolder = opts.nextFile().getCanonicalFile(); } else if (opts.nextEq("--load-headers")) { headerFilesFolder = new File(".").getCanonicalFile(); } else if (opts.nextEq("--set-header-file-suffix")) { headerFileSuffix = opts.nextOpt(); } else if (opts.nextEq("--comment-prefix")) { commentPrefix = opts.nextOpt(); } else { break; } } if (externalDbNames == null || externalDbNames.isEmpty()) { System.err.println("No external db for extraction defined."); exit(-1); return; } // setup date string and ontology version strings StringBuilder header = new StringBuilder(); OWLOntology ont = g.getSourceOntology(); String ontologyId = Owl2Obo.getOntologyId(ont); String dataVersion = Owl2Obo.getDataVersion(ont); header.append(commentPrefix); header.append(" Generated on "); TimeZone tz = TimeZone.getTimeZone("UTC"); DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm'Z'"); df.setTimeZone(tz); header.append(df.format(new Date())); if (ontologyId != null) { header.append(" from the ontology '"); header.append(ontologyId); header.append('\''); if (dataVersion != null) { header.append(" with data version: '"); header.append(dataVersion); header.append('\''); } } header.append('\n'); header.append(commentPrefix).append('\n'); // load external mappings per db type for(String db : externalDbNames) { String prefix = db+":"; Map<String, Set<OWLClass>> externalMappings = new HashMap<String, Set<OWLClass>>(); Set<OWLClass> allOWLClasses = g.getAllOWLClasses(); for (OWLClass owlClass : allOWLClasses) { boolean obsolete = g.isObsolete(owlClass); if (obsolete == false) { List<String> xrefs = g.getXref(owlClass); if (xrefs != null && !xrefs.isEmpty()) { for (String xref : xrefs) { if (xref.startsWith(prefix)) { String x = xref; int whitespacePos = xref.indexOf(' '); if (whitespacePos > 0) { x = xref.substring(0, whitespacePos); } Set<OWLClass> classSet = externalMappings.get(x); if (classSet == null) { classSet = new HashSet<OWLClass>(); externalMappings.put(x, classSet); } classSet.add(owlClass); } } } } } // sort List<String> xrefList = new ArrayList<String>(externalMappings.keySet()); Collections.sort(xrefList); // open writer BufferedWriter writer = new BufferedWriter(new FileWriter(new File(outputFolder, db.toLowerCase()+"2go"))); // check for pre-defined headers if (headerFilesFolder != null) { File headerFile = new File(headerFilesFolder, db.toLowerCase()+headerFileSuffix); if (headerFile.isFile() && headerFile.canRead()) { LineIterator lineIterator = FileUtils.lineIterator(headerFile); while (lineIterator.hasNext()) { String line = lineIterator.next(); // minor trickery // if the header lines do not have the comment prefix, add it if (line.startsWith(commentPrefix) == false) { writer.append(commentPrefix); writer.append(' '); } writer.append(line); writer.append('\n'); } } } // add generated header writer.append(header); // append sorted xrefs for (String xref : xrefList) { Set<OWLClass> classes = externalMappings.get(xref); List<OWLClass> classesList = new ArrayList<OWLClass>(classes); Collections.sort(classesList); for (OWLClass cls : classesList) { String id = g.getIdentifier(cls); String lbl = g.getLabel(cls); writer.append(xref); writer.append(" > "); writer.append(labelPrefix); writer.append(lbl); writer.append(" ; "); writer.append(id); writer.append('\n'); } } IOUtils.closeQuietly(writer); } } @CLIMethod("--assert-abox-inferences") public void assertAboxInferences(Opts opts) throws Exception { opts.info("", "Finds all inferred OPEs and ClassAssertions and asserts them. Does not handle DPEs. Resulting ontology can be used for sparql queries"); boolean isNew = false; while (opts.hasOpts()) { if (opts.nextEq("-n|--new")) { isNew = true; } else break; } Set<OWLAxiom> newAxioms = new HashSet<OWLAxiom>(); OWLOntology ont = g.getSourceOntology(); // TODO : move this to a utility class OWLOntologyManager mgr = ont.getOWLOntologyManager(); OWLDataFactory df = mgr.getOWLDataFactory(); LOG.info("Initial axioms:"+ont.getAxioms(true).size()); for (OWLNamedIndividual ind : ont.getIndividualsInSignature(Imports.INCLUDED)) { //LOG.info("Checking: "+ind); for (OWLObjectProperty p : ont.getObjectPropertiesInSignature(Imports.INCLUDED)) { NodeSet<OWLNamedIndividual> vs = reasoner.getObjectPropertyValues(ind, p); for (OWLNamedIndividual v : vs.getFlattened()) { //LOG.info("NEW: "+ind+" -> "+p+" -> "+v); newAxioms.add(df.getOWLObjectPropertyAssertionAxiom(p, ind, v)); } } for (OWLClass c : reasoner.getTypes(ind, false).getFlattened()) { newAxioms.add(df.getOWLClassAssertionAxiom(c, ind)); //LOG.info("NEW: "+ind+" :: "+c); } } OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); for (OWLAxiom a : newAxioms) { LOG.info("NEW: "+owlpp.render(a)); } LOG.info("# OF NEW AXIOMS: "+newAxioms.size()); if (isNew) { g.setSourceOntology(mgr.createOntology()); } mgr.addAxioms(g.getSourceOntology(), newAxioms); } @CLIMethod("--assert-inferred-subclass-axioms") public void assertInferredSubClassAxioms(Opts opts) throws Exception { opts.info("[--removeRedundant] [--keepRedundant] [--always-assert-super-classes] [--markIsInferred] [--useIsInferred] [--ignoreNonInferredForRemove] [--allowEquivalencies] [--reportProfile]", "Adds SubClassOf axioms for all entailed direct SubClasses not already asserted"); boolean removeRedundant = true; boolean checkConsistency = true; boolean useIsInferred = false; boolean ignoreNonInferredForRemove = false; boolean checkForNamedClassEquivalencies = true; boolean checkForPotentialRedundant = false; boolean alwaysAssertSuperClasses = false; String reportFile = null; while (opts.hasOpts()) { if (opts.nextEq("--removeRedundant")) removeRedundant = true; else if (opts.nextEq("--keepRedundant")) { removeRedundant = false; } else if (opts.nextEq("--markIsInferred")) { useIsInferred = true; } else if (opts.nextEq("--useIsInferred")) { useIsInferred = true; ignoreNonInferredForRemove = true; } else if (opts.nextEq("--ignoreNonInferredForRemove")) { ignoreNonInferredForRemove = true; } else if (opts.nextEq("--allowEquivalencies")) { checkForNamedClassEquivalencies = false; } else if (opts.nextEq("--reportFile")) { reportFile = opts.nextOpt(); } else if (opts.nextEq("--always-assert-super-classes")) { opts.info("", "if specified, always assert a superclass, " + "even if there exists an equivalence axiom is trivially entails in in solation"); alwaysAssertSuperClasses = true; } else { break; } } BufferedWriter reportWriter = null; if (reportFile != null) { reportWriter = new BufferedWriter(new FileWriter(reportFile)); } OWLClassFilter filter = null; try { AssertInferenceTool.assertInferences(g, removeRedundant, checkConsistency, useIsInferred, ignoreNonInferredForRemove, checkForNamedClassEquivalencies, checkForPotentialRedundant, alwaysAssertSuperClasses, filter, reportWriter); } finally { IOUtils.closeQuietly(reportWriter); } } @CLIMethod("--remove-redundant-superclass") public void removeRedundantSubclasses(Opts opts) throws Exception { if (g == null) { LOG.error("No source ontology available."); exit(-1); return; } if (reasoner == null) { LOG.error("No resoner available."); exit(-1); return; } if (reasoner.isConsistent() == false) { LOG.error("Ontology is inconsistent."); exit(-1); return; } Set<OWLClass> unsatisfiableClasses = reasoner.getUnsatisfiableClasses().getEntitiesMinusBottom(); if (unsatisfiableClasses.isEmpty() == false) { LOG.error("Ontology contains unsatisfiable classes, count: "+unsatisfiableClasses.size()); for (OWLClass cls : unsatisfiableClasses) { LOG.error("UNSAT:\t"+g.getIdentifier(cls)+"\t"+g.getLabel(cls)); } exit(-1); return; } final OWLOntology rootOntology = reasoner.getRootOntology(); final List<RemoveAxiom> changes = new ArrayList<RemoveAxiom>(); Set<OWLClass> allClasses = rootOntology.getClassesInSignature(Imports.EXCLUDED); LOG.info("Check classes for redundant super class axioms, all OWL classes count: "+allClasses.size()); for(OWLClass cls : allClasses) { final Set<OWLClass> directSuperClasses = reasoner.getSuperClasses(cls, true).getFlattened(); Set<OWLSubClassOfAxiom> subClassAxioms = rootOntology.getSubClassAxiomsForSubClass(cls); for (final OWLSubClassOfAxiom subClassAxiom : subClassAxioms) { subClassAxiom.getSuperClass().accept(new OWLClassExpressionVisitorAdapter(){ @Override public void visit(OWLClass desc) { if (directSuperClasses.contains(desc) == false) { changes.add(new RemoveAxiom(rootOntology, subClassAxiom)); } } }); } } LOG.info("Found redundant axioms: "+changes.size()); rootOntology.getOWLOntologyManager().applyChanges(changes); LOG.info("Removed axioms: "+changes.size()); } /** * GeneOntology specific function to create links between molecular * functions and their corresponding processes. This method uses the exact * matching of the equivalence axioms to establish the part_of relations.<br> * All relations created by this method are going to be tagged with an axiom * annotation http://purl.org/dc/terms/source and corresponding GO_REF. * * @param opts * @throws Exception */ @CLIMethod("--create-part-of") public void createPartOfLinks(Opts opts) throws Exception { if (g == null) { LOG.error("No source ontology available."); exit(-1); return; } if (reasoner == null) { LOG.error("No resoner available."); exit(-1); return; } String goRef = "GO_REF:0000090"; String annotationIRIString = "http://purl.org/dc/terms/source"; String targetFileName = null; while (opts.hasOpts()) { if (opts.nextEq("--go-ref")) { goRef = opts.nextOpt(); } else if (opts.nextEq("--annotation-iri")) { annotationIRIString = opts.nextOpt(); } else if (opts.nextEq("--target-file")) { targetFileName = opts.nextOpt(); } else { break; } } if (targetFileName == null) { LOG.error("No target-file as output was specified."); exit(-1); return; } final File targetFile = new File(targetFileName); final IRI targetFileIRI = IRI.create(targetFile); final IRI annotationIRI = IRI.create(annotationIRIString); // first hard coded test for MF -> BP mappings: // transporter activity -part_of-> transporter // transmembrane transporter activity -part_of-> transmembrane transport final OWLClass ta = g.getOWLClassByIdentifier("GO:0005215"); // transporter activity final OWLClass t = g.getOWLClassByIdentifier("GO:0006810"); // transport final OWLClass tmta = g.getOWLClassByIdentifier("GO:0022857"); // transmembrane transport activity final OWLClass tmt = g.getOWLClassByIdentifier("GO:0055085"); // transmembrane transport final OWLObjectProperty partOf = g.getOWLObjectPropertyByIdentifier("part_of"); final OWLObjectProperty transports = g.getOWLObjectPropertyByIdentifier("transports_or_maintains_localization_of"); List<LinkPattern> patterns = new ArrayList<LinkPattern>(2); patterns.add(new LinkPattern(ta, t, transports, partOf)); patterns.add(new LinkPattern(tmta, tmt, transports, partOf)); OWLDataFactory factory = g.getDataFactory(); OWLAnnotationProperty property = factory.getOWLAnnotationProperty(annotationIRI); OWLAnnotation sourceAnnotation = factory.getOWLAnnotation(property, factory.getOWLLiteral(goRef)); LinkMaker maker = new LinkMaker(g, reasoner); LinkMakerResult result = maker.makeLinks(patterns, sourceAnnotation, false); LOG.info("Predictions size: "+result.getPredictions().size()); OWLPrettyPrinter pp = getPrettyPrinter(); for (OWLAxiom ax : result.getPredictions()) { LOG.info(pp.render(ax)); } LOG.info("Existing size: "+result.getExisiting().size()); LOG.info("Modified size: "+result.getModified().size()); OWLOntologyManager manager = g.getManager(); manager.removeAxioms(g.getSourceOntology(), result.getExisiting()); manager.addAxioms(g.getSourceOntology(), result.getModified()); manager.addAxioms(g.getSourceOntology(), result.getPredictions()); manager.saveOntology(g.getSourceOntology(), targetFileIRI); } @CLIMethod("--remove-redundant-svfs") public void removeRedundantSVFs(Opts opts) throws Exception { opts.info("", "removes redundant existentials: X R Some C, X R Some D, C SubClassOf* D"); if (g == null) { LOG.error("No current ontology loaded"); exit(-1); } if (reasoner == null) { LOG.error("No reasoner available for the current ontology"); exit(-1); } while (opts.hasOpts()) { if (opts.nextEq("--report-file")) { //reportFile = opts.nextOpt(); } else { break; } } Set<OWLSubClassOfAxiom> axioms = g.getSourceOntology().getAxioms(AxiomType.SUBCLASS_OF); Set<OWLSubClassOfAxiom> rmAxioms = new HashSet<OWLSubClassOfAxiom>(); LOG.info("Candidates: " + axioms.size()); for (OWLSubClassOfAxiom axiom : axioms) { if (axiom.getSubClass().isAnonymous()) continue; OWLClass subClass = (OWLClass)axiom.getSubClass(); if (axiom.getSuperClass() instanceof OWLObjectSomeValuesFrom) { //LOG.info(" TESTING " + axiom); OWLObjectSomeValuesFrom svf = ((OWLObjectSomeValuesFrom)axiom.getSuperClass()); for (OWLSubClassOfAxiom msAxiom : g.getSourceOntology().getSubClassAxiomsForSubClass(subClass)) { if (msAxiom.getSuperClass() instanceof OWLObjectSomeValuesFrom) { OWLObjectSomeValuesFrom mssvf = ((OWLObjectSomeValuesFrom)msAxiom.getSuperClass()); if (mssvf.getProperty().equals(svf.getProperty())) { if (!svf.getFiller().isAnonymous()) { if (reasoner.getSuperClasses(mssvf.getFiller(), false). containsEntity((OWLClass) svf.getFiller())) { LOG.info(axiom+" IS_REDUNDANT: "+mssvf.getFiller() + " more-specific-than "+svf.getFiller()); rmAxioms.add(axiom); } } } } else if (!msAxiom.getSuperClass().isAnonymous()) { // TODO } } } } g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); } @CLIMethod("--remove-redundant-inferred-svfs") public void removeRedundantInferredSVFs(Opts opts) throws Exception { opts.info("", "removes redundant existentials using extended reasoner"); if (g == null) { LOG.error("No current ontology loaded"); exit(-1); } if (reasoner == null) { LOG.error("No reasoner available for the current ontology"); exit(-1); } if (!(reasoner instanceof OWLExtendedReasoner)) { LOG.error("Reasoner is not extended"); exit(-1); } OWLExtendedReasoner exr = (OWLExtendedReasoner)reasoner; while (opts.hasOpts()) { if (opts.nextEq("--report-file")) { //reportFile = opts.nextOpt(); } else { break; } } OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); Set<OWLSubClassOfAxiom> axioms = g.getSourceOntology().getAxioms(AxiomType.SUBCLASS_OF); Set<OWLSubClassOfAxiom> rmAxioms = new HashSet<OWLSubClassOfAxiom>(); LOG.info("Candidates: " + axioms.size()); int n = 0; for (OWLSubClassOfAxiom axiom : axioms) { n++; if (n % 100 == 0) { LOG.info("Testing axiom #" +n); } if (axiom.getSubClass().isAnonymous()) continue; OWLClass subClass = (OWLClass)axiom.getSubClass(); if (axiom.getSuperClass() instanceof OWLObjectSomeValuesFrom) { OWLObjectSomeValuesFrom svf = ((OWLObjectSomeValuesFrom)axiom.getSuperClass()); if (svf.getProperty().isAnonymous()) continue; if (svf.getFiller().isAnonymous()) continue; OWLObjectProperty p = (OWLObjectProperty)svf.getProperty(); Set<OWLClass> directParents = exr.getSuperClassesOver(subClass, p, true); if (!directParents.contains(svf.getFiller())) { rmAxioms.add(axiom); LOG.info(" IS_REDUNDANT: "+owlpp.render(axiom)+" as filler not in "+directParents); for (OWLClass dp : directParents) { LOG.info("DIRECT_PARENT_OVER "+owlpp.render(p)+" "+owlpp.render(dp)); } } } } g.getManager().removeAxioms(g.getSourceOntology(), rmAxioms); } @CLIMethod("--remove-redundant-inferred-super-classes") public void removeRedundantInferredSuperClassAxioms(Opts opts) throws Exception { String reportFile = null; if (g == null) { LOG.error("No current ontology loaded"); exit(-1); } if (reasoner == null) { LOG.error("No reasoner available for the current ontology"); exit(-1); } while (opts.hasOpts()) { if (opts.nextEq("--report-file")) { reportFile = opts.nextOpt(); } else { break; } } LOG.info("Start finding and removing redundant and previously inferred super classes"); Map<OWLClass, Set<RedundantAxiom>> allRedundantAxioms = RedundantInferences.removeRedundantSubClassAxioms(g.getSourceOntology(), reasoner); if (reportFile == null) { LOG.warn("No report file available, skipping report."); } else { BufferedWriter writer = new BufferedWriter(new FileWriter(reportFile)); try { List<OWLClass> sortedClasses = new ArrayList<OWLClass>(allRedundantAxioms.keySet()); Collections.sort(sortedClasses); for (OWLClass cls : sortedClasses) { Set<RedundantAxiom> redundants = allRedundantAxioms.get(cls); List<OWLClass> superClasses = new ArrayList<OWLClass>(redundants.size()); Map<OWLClass, Set<OWLClass>> intermediateClasses = new HashMap<OWLClass, Set<OWLClass>>(); for(RedundantAxiom redundant : redundants) { OWLSubClassOfAxiom axiom = redundant.getAxiom(); OWLClass superClass = axiom.getSuperClass().asOWLClass(); superClasses.add(superClass); intermediateClasses.put(superClass, redundant.getMoreSpecific()); } Collections.sort(superClasses); for (OWLClass superClass : superClasses) { String subClassId = g.getIdentifier(cls); String subClassLabel = g.getLabel(cls); String superClassId = g.getIdentifier(superClass); String superClassLabel = g.getLabel(superClass); writer.append("REMOVE").append('\t').append(subClassId).append('\t'); if (subClassLabel != null) { writer.append('\'').append(subClassLabel).append('\''); } writer.append('\t').append(superClassId).append('\t'); if (superClassLabel != null) { writer.append('\'').append(superClassLabel).append('\''); } writer.append('\t').append("MORE SPECIFIC: "); for(OWLClass moreSpecific : intermediateClasses.get(superClass)) { String moreSpecificId = g.getIdentifier(moreSpecific); String moreSpecificLabel = g.getLabel(moreSpecific); writer.append('\t').append(moreSpecificId).append('\t'); if (moreSpecificLabel != null) { writer.append('\'').append(moreSpecificLabel).append('\''); } } writer.append('\n'); } } } finally { IOUtils.closeQuietly(writer); } } } @CLIMethod("--remove-subset-entities") public void removeSubsetEntities(Opts opts) throws Exception { opts.info("[SUBSET]+","Removes all classes, individuals and object properties that are in the specific subset(s)"); List<String> subSets = opts.nextList(); if (subSets == null || subSets.isEmpty()) { System.err.println("At least one subset is required for this function."); exit(-1); } // create annotation values to match Set<OWLAnnotationValue> values = new HashSet<OWLAnnotationValue>(); OWLDataFactory f = g.getDataFactory(); for(String subSet : subSets) { // subset as plain string values.add(f.getOWLLiteral(subSet)); // subset as IRI values.add(IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+"#"+subSet)); } // get annotation property for subset OWLAnnotationProperty p = g.getAnnotationProperty(OboFormatTag.TAG_SUBSET.getTag()); // collect all objects in the given subset final Set<OWLObject> entities = Mooncat.findTaggedEntities(p, values, g); LOG.info("Found "+entities.size()+" tagged objects."); if (entities.isEmpty() == false) { final List<RemoveAxiom> changes = Mooncat.findRelatedAxioms(entities, g); if (changes.isEmpty() == false) { LOG.info("applying changes to ontology, count: "+changes.size()); g.getManager().applyChanges(changes); } else { LOG.info("No axioms found for removal."); } } } /** * Simple helper to create a subset tag for matching entities, allows to specify exceptions * * @param opts * @throws Exception */ @CLIMethod("--create-subset-tags") public void createSubsetTags(Opts opts) throws Exception { opts.info("[-s|--source SOURCE] -n|--subset SUBSET_NAME -p PREFIX [-e|--exception EXCEPTION]", "Create subset tags for all classes and properties, which match the id prefix (OBO style). Specifiy exceptions to skip entities."); String source = null; String subset = null; String prefix = null; final Set<String> matchExceptions = new HashSet<String>(); while (opts.hasOpts()) { if (opts.nextEq("-s|--source")) { source = opts.nextOpt(); } else if (opts.nextEq("-n|--subset")) { subset = opts.nextOpt(); } else if (opts.nextEq("-p|--prefix")) { prefix = opts.nextOpt(); } else if (opts.nextEq("-e|--exception")) { matchExceptions.add(opts.nextOpt()); } else { break; } } if (subset == null) { throw new RuntimeException("A subset is required."); } if (prefix == null) { throw new RuntimeException("A prefix is required."); } final Set<OWLEntity> signature; if (source != null) { ParserWrapper newPw = new ParserWrapper(); newPw.addIRIMappers(pw.getIRIMappers()); final OWLOntology sourceOntology = newPw.parse(source); signature = sourceOntology.getSignature(Imports.INCLUDED); } else { signature = new HashSet<OWLEntity>(); for (OWLOntology o : g.getAllOntologies()) { signature.addAll(o.getSignature()); } } final Set<IRI> upperLevelIRIs = new HashSet<IRI>(); final String matchPrefix = prefix; for (OWLEntity owlEntity : signature) { owlEntity.accept(new OWLEntityVisitorAdapter(){ @Override public void visit(OWLClass cls) { String id = Owl2Obo.getIdentifier(cls.getIRI()); if (id.startsWith(matchPrefix) && !matchExceptions.contains(id)) { upperLevelIRIs.add(cls.getIRI()); } } @Override public void visit(OWLObjectProperty property) { String id = Owl2Obo.getIdentifier(property.getIRI()); if (id.startsWith(matchPrefix) && !matchExceptions.contains(id)) { upperLevelIRIs.add(property.getIRI()); } } }); } final OWLOntologyManager m = g.getManager(); final OWLDataFactory f = g.getDataFactory(); final OWLAnnotationProperty p = g.getAnnotationProperty(OboFormatTag.TAG_SUBSET.getTag()); final OWLAnnotation annotation = f.getOWLAnnotation(p, IRI.create(Obo2OWLConstants.DEFAULT_IRI_PREFIX+"#"+subset)); for (IRI iri : upperLevelIRIs) { OWLAnnotationAssertionAxiom ax = f.getOWLAnnotationAssertionAxiom(iri, annotation); m.addAxiom(g.getSourceOntology(), ax); } } @CLIMethod("--verify-changes") public void verifyChanges(Opts opts) throws Exception { String previousInput = null; String idFilterPrefix = null; boolean checkMissingLabels = false; String reportFile = null; while (opts.hasOpts()) { if (opts.nextEq("-p|--previous")) { previousInput = opts.nextOpt(); } else if (opts.nextEq("--id-prefix-filter")) { idFilterPrefix = opts.nextOpt(); } else if (opts.nextEq("--check-missing-labels")) { checkMissingLabels = true; } else if (opts.nextEq("-o|--report-file")) { reportFile = opts.nextOpt(); } else { break; } } if (g == null) { LOG.error("No current ontology loaded for comparison"); exit(-1); } else if (previousInput == null) { LOG.error("No previous ontology configured for comparison"); exit(-1); } else { // create new parser & manager for clean load of previous ontology final ParserWrapper pw = new ParserWrapper(); // use same IRI mappers as main parser List<OWLOntologyIRIMapper> mappers = this.pw.getIRIMappers(); if (mappers != null) { for (OWLOntologyIRIMapper mapper : mappers) { pw.addIRIMapper(mapper); } } // load previous IRI previousIRI = IRI.create(new File(previousInput).getCanonicalFile()); final OWLGraphWrapper previous = pw.parseToOWLGraph(previousIRI.toString()); LOG.info("Start verifying changes."); // create (filtered) class ids and labels, obsolete, alt_ids // prev final Map<String, String> previousIdLabels = Maps.newHashMap(); final Set<String> previousObsoletes = Sets.newHashSet(); final Set<String> previousAltIds = Sets.newHashSet(); extractClassInfo(previous, previousIdLabels, previousObsoletes, previousAltIds, idFilterPrefix); // current final Map<String, String> currentIdLabels = Maps.newHashMap(); final Set<String> currentObsoletes = Sets.newHashSet(); final Set<String> currentAltIds = Sets.newHashSet(); extractClassInfo(g, currentIdLabels, currentObsoletes, currentAltIds, idFilterPrefix); // check that all ids are also in the current ontology boolean hasErrors = false; // normal ids final List<String> missingIds = Lists.newArrayList(); final Map<String, String> missingLabels = Maps.newHashMap(); for(String previousId : previousIdLabels.keySet()) { if (!(currentIdLabels.containsKey(previousId) || currentAltIds.contains(previousId) || currentObsoletes.contains(previousId))) { missingIds.add(previousId); hasErrors = true; } else if (checkMissingLabels && currentAltIds.contains(previousId)) { // this id has been merged into another class // optional: check that all primary labels of merged terms are still in the merged term final OWLObject currentObject = g.getOWLObjectByAltId(previousId); final String currentLbl = g.getLabel(currentObject); final String previousLbl = previousIdLabels.get(previousId); if (currentLbl != null && previousLbl != null) { if (currentLbl.equals(previousLbl) == false) { // check synonyms List<ISynonym> synonyms = g.getOBOSynonyms(currentObject); boolean found = false; if (synonyms != null) { for (ISynonym synonym : synonyms) { if (previousLbl.equals(synonym.getLabel())) { found = true; break; } } } if (found == false) { hasErrors = true; missingLabels.put(previousId, previousLbl); } } } } } if (!missingIds.isEmpty()) { Collections.sort(missingIds); } // alt_ids final List<String> missingAltIds = Lists.newArrayList(Sets.difference(previousAltIds, currentAltIds)); if (!missingAltIds.isEmpty()) { Collections.sort(missingAltIds); hasErrors = true; } // obsolete Set<String> differenceObsolete = Sets.difference(previousObsoletes, currentObsoletes); if (!differenceObsolete.isEmpty()) { // special case: obsolete ids might be resurrected as valid ids differenceObsolete = Sets.difference(differenceObsolete, currentIdLabels.keySet()); } final List<String> missingObsoletes = Lists.newArrayList(differenceObsolete); if (!missingObsoletes.isEmpty()) { Collections.sort(missingObsoletes); hasErrors = true; } LOG.info("Verification finished."); // clean up old file in case of no errors if (!hasErrors && reportFile != null) { FileUtils.deleteQuietly(new File(reportFile)); } if (hasErrors) { LOG.error("The verification failed with the following errors."); PrintWriter writer = null; try { if (reportFile != null) { writer = new PrintWriter(new FileWriter(reportFile)); } for(String missingId : missingIds) { LOG.error("Missing ID: "+missingId); if (writer != null) { writer.append("MISSING-ID").append('\t').append(missingId).println(); } } for (String missingId : missingAltIds) { LOG.error("Missing alternate ID: "+missingId); if (writer != null) { writer.append("MISSING-ALT_ID").append('\t').append(missingId).println(); } } for (String missingId : missingObsoletes) { LOG.error("Missing obsolete ID: "+missingId); if (writer != null) { writer.append("MISSING-OBSOLETE_ID").append('\t').append(missingId).println(); } } for (Entry<String, String> missingEntry : missingLabels.entrySet()) { LOG.error("Missing primary label for merged term: '"+missingEntry.getValue()+"' "+missingEntry.getKey()); if (writer != null) { writer.append("MISSING-LABEL").append('\t').append(missingEntry.getValue()).append('\t').append(missingEntry.getKey()).println(); } } } finally { IOUtils.closeQuietly(writer); } exit(-1); } } } /** * @param graph * @param idLabels * @param obsoletes * @param allAltIds * @param idFilterPrefix */ private void extractClassInfo(OWLGraphWrapper graph, Map<String, String> idLabels, Set<String> obsoletes, Set<String> allAltIds, String idFilterPrefix) { for(OWLObject obj : graph.getAllOWLObjects()) { if (obj instanceof OWLClass) { String id = graph.getIdentifier(obj); if (idFilterPrefix != null && !id.startsWith(idFilterPrefix)) { continue; } List<String> altIds = graph.getAltIds(obj); if (altIds != null) { allAltIds.addAll(altIds); } boolean isObsolete = graph.isObsolete(obj); if (isObsolete) { obsoletes.add(id); } else { String lbl = graph.getLabel(obj); idLabels.put(id, lbl); } } } } @CLIMethod("--create-biochebi") public void createBioChebi(Opts opts) throws Exception { final String chebiPURL = "http://purl.obolibrary.org/obo/chebi.owl"; String chebiFile = null; String output = null; String ignoredSubset = "no_conj_equiv"; while (opts.hasOpts()) { if (opts.nextEq("-o|--output")) { output = opts.nextOpt(); } else if (opts.nextEq("-c|--chebi-file")) { chebiFile = opts.nextOpt(); } else if (opts.nextEq("-i|--ignored-subset")) { ignoredSubset = opts.nextOpt(); } else { break; } } if (chebiFile != null) { File inputFile = new File(chebiFile); OWLOntology chebiOWL = pw.parse(IRI.create(inputFile).toString()); // sanity check: // check that the purl is the expected one boolean hasOntologyId = false; OWLOntologyID ontologyID = chebiOWL.getOntologyID(); if (ontologyID != null) { Optional<IRI> ontologyIRI = ontologyID.getOntologyIRI(); if (ontologyIRI.isPresent()) { hasOntologyId = chebiPURL.equals(ontologyIRI.get().toString()); } } if (hasOntologyId == false) { throw new RuntimeException("The loaded ontology file ("+chebiFile+") does not have the expected ChEBI purl: "+chebiPURL); } } if (g == null) { // load default template InputStream stream = loadResource("bio-chebi-input.owl"); if (stream == null) { throw new RuntimeException("Could not load default bio chebi input file: 'bio-chebi-input.owl'"); } g = new OWLGraphWrapper(pw.getManager().loadOntologyFromOntologyDocument(stream)); } BioChebiGenerator.createBioChebi(g, ignoredSubset); if (output != null) { OWLOntology ontology = g.getSourceOntology(); File outFile = new File(output); ontology.getOWLOntologyManager().saveOntology(ontology, IRI.create(outFile)); } } @CLIMethod("--run-obo-basic-dag-check") public void runDAGCheck(Opts opts) throws Exception { if (g != null) { List<List<OWLObject>> cycles = OboBasicDagCheck.findCycles(g); if (cycles != null && !cycles.isEmpty()) { OWLPrettyPrinter pp = getPrettyPrinter(); System.err.println("Found cycles in the graph"); for (List<OWLObject> cycle : cycles) { StringBuilder sb = new StringBuilder("Cycle:"); for (OWLObject owlObject : cycle) { sb.append(" "); sb.append(pp.render(owlObject)); } System.err.println(sb); } } } } // @CLIMethod("--rdf-to-json-ld") // public void rdfToJsonLd(Opts opts) throws Exception { // String ofn = null; // while (opts.hasOpts()) { // if (opts.nextEq("-o")) { // ofn = opts.nextOpt(); // LOG.info("SAVING JSON TO: "+ofn); // } // else { // break; // } // } // File inputFile = opts.nextFile(); // LOG.info("input rdf: "+inputFile); // FileInputStream s = new FileInputStream(inputFile); // final Model modelResult = ModelFactory.createDefaultModel().read( // s, "", "RDF/XML"); // final JenaRDFParser parser = new JenaRDFParser(); // Options jsonOpts = new Options(); // // final Object json = JSONLD.fromRDF(modelResult, jsonOpts , parser); // FileOutputStream out = new FileOutputStream(ofn); // String jsonStr = JSONUtils.toPrettyString(json); // IOUtils.write(jsonStr, out); // } // // @CLIMethod("--json-ld-to-rdf") // public void jsonLdToRdf(Opts opts) throws Exception { // String ofn = null; // while (opts.hasOpts()) { // if (opts.nextEq("-o")) { // ofn = opts.nextOpt(); // } // else { // break; // } // } // final JSONLDTripleCallback callback = new JenaTripleCallback(); // // FileInputStream s = new FileInputStream(opts.nextFile()); // Object json = JSONUtils.fromInputStream(s); // final Model model = (Model) JSONLD.toRDF(json, callback); // // final StringWriter w = new StringWriter(); // model.write(w, "TURTLE"); // // FileOutputStream out = new FileOutputStream(ofn); // IOUtils.write(w.toString(), out); // } @CLIMethod("--extract-annotation-value") public void extractAnnotationValue(Opts opts) throws Exception { String delimiter = "\t"; String idPrefix = null; boolean addLabel = true; OWLAnnotationProperty valueProperty = null; String output = null; final OWLDataFactory f = g.getDataFactory(); final OWLAnnotationProperty rdfsLabel = f.getRDFSLabel(); while (opts.hasOpts()) { if (opts.nextEq("-p|--property")) { String propString = opts.nextOpt(); valueProperty = f.getOWLAnnotationProperty(IRI.create(propString)); } else if (opts.nextEq("-o|--output")) { output = opts.nextOpt(); } else if (opts.nextEq("-d|--delimiter")) { delimiter = opts.nextOpt(); } else if (opts.nextEq("--id-prefix")) { idPrefix = opts.nextOpt(); } else if (opts.nextEq("--excludeLabel")) { addLabel = false; } else { break; } } if (output == null) { LOG.error("No outfile specified."); exit(-1); } else if (valueProperty == null) { LOG.error("No property specified."); exit(-1); } else { List<String> lines = new ArrayList<String>(); final Set<OWLOntology> allOntologies = g.getAllOntologies(); LOG.info("Extracting values for property: "+valueProperty.getIRI()); for(OWLClass cls : g.getAllOWLClasses()) { final String id = g.getIdentifier(cls); if (idPrefix != null && !id.startsWith(idPrefix)) { continue; } String label = null; String propertyValue = null; Set<OWLAnnotationAssertionAxiom> allAnnotationAxioms = new HashSet<OWLAnnotationAssertionAxiom>(); for(OWLOntology ont : allOntologies) { allAnnotationAxioms.addAll(ont.getAnnotationAssertionAxioms(cls.getIRI())); } for (OWLAnnotationAssertionAxiom axiom : allAnnotationAxioms) { OWLAnnotationProperty currentProp = axiom.getProperty(); if (valueProperty.equals(currentProp)) { OWLAnnotationValue av = axiom.getValue(); if (av instanceof OWLLiteral) { propertyValue = ((OWLLiteral)av).getLiteral(); } } else if (addLabel && rdfsLabel.equals(currentProp)) { OWLAnnotationValue av = axiom.getValue(); if (av instanceof OWLLiteral) { label = ((OWLLiteral)av).getLiteral(); } } // stop search once the values are available if (propertyValue != null) { if(addLabel) { if (label != null) { break; } } else { break; } } } // write the information StringBuilder sb = new StringBuilder(); if (addLabel) { if (label != null && propertyValue != null) { sb.append(id); sb.append(delimiter); sb.append(label); sb.append(delimiter); sb.append(propertyValue); } } else { if (label != null && propertyValue != null) { sb.append(id); sb.append(delimiter); sb.append(propertyValue); } } lines.add(sb.toString()); } LOG.info("Finished extraction, sorting output."); Collections.sort(lines); File outputFile = new File(output).getCanonicalFile(); LOG.info("Write extracted properties to file: "+outputFile.getPath()); BufferedWriter writer = null; try { writer = new BufferedWriter(new FileWriter(outputFile)); for (String line : lines) { writer.append(line).append('\n'); } } finally { IOUtils.closeQuietly(writer); } } } /** * Extract all xps ({@link OWLEquivalentClassesAxiom}) from the loaded * ontology. Requires a set of roots classes to restrict the set of * extracted xps. * * @param opts * @throws Exception */ @CLIMethod("--extract-extension-file") public void extractExtensionFile(Opts opts) throws Exception { final Set<OWLClass> rootTerms = new HashSet<OWLClass>(); String ontologyIRI = null; String outputFileOwl = null; String outputFileObo = null; String versionIRI = null; while (opts.hasOpts()) { if (opts.nextEq("-id|--ontology-id")) { ontologyIRI = opts.nextOpt(); } else if (opts.nextEq("-owl|--output-owl")) { outputFileOwl = opts.nextOpt(); } else if (opts.nextEq("-obo|--output-obo")) { outputFileObo = opts.nextOpt(); } else if (opts.nextEq("-v|--version")) { versionIRI = opts.nextOpt(); } else if (opts.nextEq("-t|--term")) { String term = opts.nextOpt(); OWLClass owlClass = g.getOWLClassByIdentifierNoAltIds(term); if (owlClass != null) { rootTerms.add(owlClass); } else { throw new RuntimeException("Could not find a class for id: "+term); } } else { break; } } if (rootTerms.isEmpty()) { throw new RuntimeException("At least one term is required for filtering"); } if (ontologyIRI == null) { throw new RuntimeException("An ontology IRI is required."); } final OWLOntologyID newID; final IRI newOntologyIRI = IRI.create(ontologyIRI); if (versionIRI != null) { final IRI newVersionIRI = IRI.create(versionIRI); newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.of(newVersionIRI)); } else { newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.<IRI>absent()); } final OWLOntologyManager m = g.getManager(); final OWLOntology work = m.createOntology(newID); // filter axioms final Set<OWLObjectProperty> usedProperties = new HashSet<OWLObjectProperty>(); final Set<OWLAxiom> filtered = new HashSet<OWLAxiom>(); final OWLOntology source = g.getSourceOntology(); // get relevant equivalent class axioms for(OWLClass cls : source.getClassesInSignature()) { Set<OWLEquivalentClassesAxiom> eqAxioms = source.getEquivalentClassesAxioms(cls); for (OWLEquivalentClassesAxiom eqAxiom : eqAxioms) { if (hasFilterClass(eqAxiom, rootTerms)) { filtered.add(eqAxiom); usedProperties.addAll(eqAxiom.getObjectPropertiesInSignature()); } } } // add used properties for (OWLObjectProperty p : usedProperties) { filtered.addAll(source.getDeclarationAxioms(p)); filtered.addAll(source.getAxioms(p, Imports.EXCLUDED)); filtered.addAll(source.getAnnotationAssertionAxioms(p.getIRI())); } // add all axioms into the ontology m.addAxioms(work, filtered); // write ontology // owl if (outputFileOwl != null) { OutputStream outputStream = new FileOutputStream(outputFileOwl); try { m.saveOntology(work, new RDFXMLDocumentFormat(), outputStream); } finally { outputStream.close(); } } // obo if (outputFileObo != null) { Owl2Obo owl2Obo = new Owl2Obo(); OBODoc doc = owl2Obo.convert(work); OBOFormatWriter writer = new OBOFormatWriter(); BufferedWriter fileWriter = null; try { fileWriter = new BufferedWriter(new FileWriter(outputFileObo)); NameProvider nameprovider = new OWLGraphWrapperNameProvider(g); writer.write(doc, fileWriter, nameprovider); } finally { IOUtils.closeQuietly(fileWriter); } } } /** * Retain only subclass of axioms and intersection of axioms if they contain * a class in it's signature of a given set of parent terms. * * For example, to create the x-chemical.owl do the following steps: * <ol> * <li>Load ChEBI as main ontology graph</li> * <li>(Optional) load go, recommended for OBO write</li> * <li>Setup reasoner: '--elk --init-reasoner'</li> * <li>'--filter-extension-file'</li> * <li>Load extensions file using: '-e' or '--extension-file'</li> * <li>Add required root terms: '-t' or '--term', use multiple paramteres to add multiple terms</li> * <li>Set ontology IRI for filtered file: '-id' or '--ontology-id'</li> * <li> set output files: * <ul> * <li>OWL: '-owl|--output-owl' owl-filename</li> * <li>OBO: '-obo|--output-obo' obo-filename</li> * </ul> * </li> * <li>(Optional) set version: '-v' or '--version'</li> * </ol> * @param opts * @throws Exception */ @CLIMethod("--filter-extension-file") public void filterExtensionFile(Opts opts) throws Exception { String extensionFile = null; final Set<OWLClass> rootTerms = new HashSet<OWLClass>(); String ontologyIRI = null; String outputFileOwl = null; String outputFileObo = null; String versionIRI = null; while (opts.hasOpts()) { if (opts.nextEq("-e|--extension-file")) { extensionFile = opts.nextOpt(); } else if (opts.nextEq("-id|--ontology-id")) { ontologyIRI = opts.nextOpt(); } else if (opts.nextEq("-owl|--output-owl")) { outputFileOwl = opts.nextOpt(); } else if (opts.nextEq("-obo|--output-obo")) { outputFileObo = opts.nextOpt(); } else if (opts.nextEq("-v|--version")) { versionIRI = opts.nextOpt(); } else if (opts.nextEq("-t|--term")) { String term = opts.nextOpt(); OWLClass owlClass = g.getOWLClassByIdentifierNoAltIds(term); if (owlClass != null) { rootTerms.add(owlClass); } else { throw new RuntimeException("Could not find a class for id: "+term); } } else { break; } } if (extensionFile == null) { throw new RuntimeException("No extension file was specified."); } if (rootTerms.isEmpty()) { throw new RuntimeException("At least one term is required for filtering"); } if (ontologyIRI == null) { throw new RuntimeException("An ontology IRI is required."); } // create new parser and new OWLOntologyManager ParserWrapper p = new ParserWrapper(); final OWLOntology work = p.parse(extensionFile); // update ontology ID final OWLOntologyID oldId = work.getOntologyID(); final IRI oldVersionIRI; if(oldId != null && oldId.getVersionIRI().isPresent()) { oldVersionIRI = oldId.getVersionIRI().get(); } else { oldVersionIRI = null; } final OWLOntologyID newID; final IRI newOntologyIRI = IRI.create(ontologyIRI); if (versionIRI != null) { final IRI newVersionIRI = IRI.create(versionIRI); newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.of(newVersionIRI)); } else if (oldVersionIRI != null) { newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.of(oldVersionIRI)); } else { newID = new OWLOntologyID(Optional.of(newOntologyIRI), Optional.<IRI>absent()); } // filter axioms Set<OWLAxiom> allAxioms = work.getAxioms(); for(OWLClass cls : work.getClassesInSignature()) { Set<OWLClassAxiom> current = work.getAxioms(cls, Imports.EXCLUDED); if (hasFilterClass(current, rootTerms) == false) { allAxioms.removeAll(work.getDeclarationAxioms(cls)); allAxioms.removeAll(current); allAxioms.removeAll(work.getAnnotationAssertionAxioms(cls.getIRI())); } } OWLOntologyManager manager = OWLManager.createOWLOntologyManager(); OWLOntology filtered = manager.createOntology(newID); manager.addAxioms(filtered, allAxioms); // write ontology // owl if (outputFileOwl != null) { OutputStream outputStream = new FileOutputStream(outputFileOwl); try { manager.saveOntology(filtered, new RDFXMLDocumentFormat(), outputStream); } finally { outputStream.close(); } } // obo if (outputFileObo != null) { Owl2Obo owl2Obo = new Owl2Obo(); OBODoc doc = owl2Obo.convert(filtered); OBOFormatWriter writer = new OBOFormatWriter(); BufferedWriter fileWriter = null; try { fileWriter = new BufferedWriter(new FileWriter(outputFileObo)); NameProvider nameprovider = new OWLGraphWrapperNameProvider(g); writer.write(doc, fileWriter, nameprovider); } finally { IOUtils.closeQuietly(fileWriter); } } } /** * Check that there is an axiom, which use a class (in its signature) that * has a ancestor in the root term set. * * @param axioms set to check * @param rootTerms set root of terms * @return boolean */ private boolean hasFilterClass(Set<OWLClassAxiom> axioms, Set<OWLClass> rootTerms) { if (axioms != null && !axioms.isEmpty()) { for (OWLClassAxiom ax : axioms) { if (ax instanceof OWLEquivalentClassesAxiom) { Set<OWLClass> signature = ax.getClassesInSignature(); for (OWLClass sigCls : signature) { NodeSet<OWLClass> superClasses = reasoner.getSuperClasses(sigCls, false); for(OWLClass root : rootTerms) { if (superClasses.containsEntity(root)) { return true; } } } } } } return false; } /** * Check that there is an axiom, which use a class (in its signature) that * has a ancestor in the root term set. * * @param axioms set to check * @param rootTerms set root of terms * @return boolean */ private boolean hasFilterClass(OWLEquivalentClassesAxiom axiom, Set<OWLClass> rootTerms) { if (axiom != null) { Set<OWLClass> signature = axiom.getClassesInSignature(); for (OWLClass sigCls : signature) { NodeSet<OWLClass> superClasses = reasoner.getSuperClasses(sigCls, false); for(OWLClass root : rootTerms) { if (superClasses.containsEntity(root)) { return true; } } } } return false; } @CLIMethod("--create-slim") public void createSlim(Opts opts) throws Exception { String idResource = null; String outputOwl = null; String outputObo = null; String oldOwl = null; String oldObo = null; IRI ontologyIRI = null; // parse CLI options while (opts.hasOpts()) { if (opts.nextEq("--output-owl")) { outputOwl = opts.nextOpt(); } else if (opts.nextEq("--output-obo")) { outputObo = opts.nextOpt(); } else if (opts.nextEq("-i|--ids")) { idResource = opts.nextOpt(); } else if (opts.nextEq("--old-owl")) { oldOwl = opts.nextOpt(); } else if (opts.nextEq("--old-obo")) { oldObo = opts.nextOpt(); } else if (opts.nextEq("--iri")) { String iriString = opts.nextOpt(); ontologyIRI = IRI.create(iriString); } else { break; } } // check required parameters if (idResource == null) { throw new RuntimeException("No identifier resource specified. A list of terms is required to create a slim."); } if (outputOwl == null && outputObo == null) { throw new RuntimeException("No output file specified. At least one output file (obo or owl) is needed."); } if (ontologyIRI == null) { throw new RuntimeException("No IRI found. An ontology IRI is required."); } // set of all OWL classes required in the slim. Set<OWLClass> seeds = new HashSet<OWLClass>(); // create map of alternate identifiers for fast lookup Map<String, OWLObject> objectsByAltId = g.getAllOWLObjectsByAltId(); // load list of identifiers from file LineIterator lineIterator = FileUtils.lineIterator(new File(idResource)); while (lineIterator.hasNext()) { String line = lineIterator.next(); if (line.startsWith("#")) { continue; } addId(line, seeds, objectsByAltId); } // (optional) load previous slim in OWL. // Check that all classes are also available in the new base ontology. if (oldOwl != null) { ParserWrapper pw = new ParserWrapper(); OWLOntologyManager tempManager = pw.getManager(); OWLOntology oldSlim = tempManager.loadOntologyFromOntologyDocument(new File(oldOwl)); OWLGraphWrapper oldSlimGraph = new OWLGraphWrapper(oldSlim); Set<OWLClass> classes = oldSlim.getClassesInSignature(); for (OWLClass owlClass : classes) { boolean found = false; for(OWLOntology o : g.getAllOntologies()) { if (o.getDeclarationAxioms(owlClass).isEmpty() == false) { found = true; seeds.add(owlClass); break; } } if (!found) { LOG.warn("Could not find old class ("+oldSlimGraph.getIdentifier(owlClass)+") in new ontology."); } } oldSlimGraph.close(); } // (optional) load previous slim in OBO format. // Check that all classes are also available in the new base ontology. if (oldObo != null) { OBOFormatParser p = new OBOFormatParser(); OBODoc oboDoc = p.parse(new File(oldObo)); Collection<Frame> termFrames = oboDoc.getTermFrames(); if (termFrames != null) { for (Frame frame : termFrames) { String id = frame.getId(); addId(id, seeds, objectsByAltId); } } } // sanity check if (seeds.isEmpty()) { throw new RuntimeException("There are no classes in the seed set for the slim generation. Id problem or empty id resource?"); } // create the slim Mooncat mooncat = new Mooncat(g); OWLOntology slim = mooncat.makeMinimalSubsetOntology(seeds, ontologyIRI, true, false); mooncat = null; // write the output if (outputOwl != null) { File outFile = new File(outputOwl); slim.getOWLOntologyManager().saveOntology(slim, IRI.create(outFile)); } if (outputObo != null) { Owl2Obo owl2Obo = new Owl2Obo(); OBODoc oboDoc = owl2Obo.convert(slim); OBOFormatWriter w = new OBOFormatWriter(); w.write(oboDoc, outputObo); } } private void addId(String id, Set<OWLClass> seeds, Map<String, OWLObject> altIds) { id = StringUtils.trimToNull(id); if (id != null) { // #1 check alt_ids OWLObject owlObject = altIds.get(id); if (owlObject != null && owlObject instanceof OWLClass) { LOG.warn("Retrieving class "+g.getIdentifier(owlObject)+" by alt_id: "+id+"\nPlease consider updating your idenitifers."); seeds.add((OWLClass) owlObject); } // #2 use normal code path OWLClass cls = g.getOWLClassByIdentifier(id); if (cls != null) { seeds.add(cls); } else { LOG.warn("Could not find a class for id: "+id); } } } private InputStream loadResource(String name) { InputStream inputStream = getClass().getResourceAsStream(name); if (inputStream == null) { inputStream = ClassLoader.getSystemResourceAsStream(name); } if (inputStream == null) { File file = new File(name); if (file.isFile() && file.canRead()) { try { return new FileInputStream(file); } catch (FileNotFoundException exception) { // intentionally empty } } } return inputStream; } private OWLReasoner createReasoner(OWLOntology ont, String reasonerName, OWLOntologyManager manager) { OWLReasonerFactory reasonerFactory = createReasonerFactory(reasonerName); if (reasonerFactory == null) { System.out.println("no such reasoner: "+reasonerName); } else { reasoner = reasonerFactory.createReasoner(ont); LOG.info("Created reasoner: "+reasoner); } return reasoner; } private OWLReasonerFactory createReasonerFactory(String reasonerName) { OWLReasonerFactory reasonerFactory = null; if (reasonerName.equals("hermit")) { reasonerFactory = new org.semanticweb.HermiT.ReasonerFactory(); } else if (reasonerName.equals("ogr")) { reasonerFactory = new GraphReasonerFactory(); } else if (reasonerName.equals("mexr")) { if (reasonerFactory == null) { // set default to ELK reasonerFactory = new ElkReasonerFactory(); } reasonerFactory = new ExpressionMaterializingReasonerFactory(reasonerFactory); } else if (reasonerName.equals("elk")) { reasonerFactory = new ElkReasonerFactory(); } else if (reasonerName.equals("welk")) { System.out.println("The wrapping elk reasoner is deprecated, using normal elk instead"); reasonerFactory = new ElkReasonerFactory(); } return reasonerFactory; } private void catOntologies(Opts opts) throws OWLOntologyCreationException, IOException { opts.info("[-r|--ref-ont ONT] [-i|--use-imports]", "Catenate ontologies taking only referenced subsets of supporting onts.\n"+ " See Mooncat docs"); Mooncat m = new Mooncat(g); ParserWrapper pw = new ParserWrapper(); String newURI = null; while (opts.hasOpts()) { //String opt = opts.nextOpt(); if (opts.nextEq("-r") || opts.nextEq("--ref-ont")) { LOG.error("DEPRECATED - list all ref ontologies on main command line"); String f = opts.nextOpt(); m.addReferencedOntology(pw.parseOWL(f)); } else if (opts.nextEq("-s") || opts.nextEq("--src-ont")) { m.setOntology(pw.parseOWL(opts.nextOpt())); } else if (opts.nextEq("-p") || opts.nextEq("--prefix")) { m.addSourceOntologyPrefix(opts.nextOpt()); } else if (opts.nextEq("-i") || opts.nextEq("--use-imports")) { System.out.println("using everything in imports closure"); g.addSupportOntologiesFromImportsClosure(); } else if (opts.nextEq("-n") || opts.nextEq("--new-uri")) { System.out.println("new URI for merged ontology"); newURI = opts.nextOpt(); } else { break; //opts.fail(); } } //if (m.getReferencedOntologies().size() == 0) { // m.setReferencedOntologies(g.getSupportOntologySet()); //} //g.useImportClosureForQueries(); //for (OWLAxiom ax : m.getClosureAxiomsOfExternalReferencedEntities()) { // System.out.println("M_AX:"+ax); //} m.mergeOntologies(); m.removeDanglingAxioms(); if (newURI != null) { SetOntologyID soi = new SetOntologyID(g.getSourceOntology(), new OWLOntologyID(Optional.of(IRI.create(newURI)), Optional.<IRI>absent())); g.getManager().applyChange(soi); /* HashSet<OWLOntology> cpOnts = new HashSet<OWLOntology>(); LOG.info("srcOnt annots:"+g.getSourceOntology().getAnnotations().size()); cpOnts.add(g.getSourceOntology()); OWLOntology newOnt = g.getManager().createOntology(IRI.create(newURI), cpOnts); LOG.info("newOnt annots:"+newOnt.getAnnotations().size()); //g.getDataFactory().getOWLOn g.setSourceOntology(newOnt); */ } } private void showEdges(Set<OWLGraphEdge> edges) { OWLPrettyPrinter owlpp = new OWLPrettyPrinter(g); for (OWLGraphEdge e : edges) { System.out.println(owlpp.render(e)); } } }
making a log warning option
OWLTools-Runner/src/main/java/owltools/cli/CommandRunner.java
making a log warning option
<ide><path>WLTools-Runner/src/main/java/owltools/cli/CommandRunner.java <ide> else if (opts.nextEq("--log-info")) { <ide> Logger.getRootLogger().setLevel(Level.INFO); <ide> } <add> else if (opts.nextEq("--log-warning")) { <add> Logger.getRootLogger().setLevel(Level.WARN); <add> } <ide> else if (opts.nextEq("--log-debug")) { <ide> Logger.getRootLogger().setLevel(Level.DEBUG); <ide> }
Java
mit
b3b1c65dbd92cf5733b0c85dbf45dc646d75b291
0
DeviceConnect/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,DeviceConnect/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android,TakayukiHoshi1984/DeviceConnect-Android
/* DConnectService.java Copyright (c) 2016 NTT DOCOMO,INC. Released under the MIT license http://opensource.org/licenses/mit-license.php */ package org.deviceconnect.android.service; import android.content.Context; import android.content.Intent; import android.util.Log; import org.deviceconnect.android.message.DevicePluginContext; import org.deviceconnect.android.message.MessageUtils; import org.deviceconnect.android.profile.DConnectProfile; import org.deviceconnect.android.profile.DConnectProfileProvider; import org.deviceconnect.android.profile.ServiceInformationProfile; import org.deviceconnect.profile.ServiceDiscoveryProfileConstants; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Device Connect APIサービス. * * @author NTT DOCOMO, INC. */ public class DConnectService implements DConnectProfileProvider, ServiceDiscoveryProfileConstants { /** * サービスID. */ private final String mId; /** * サポートするプロファイル一覧. */ private final Map<String, DConnectProfile> mProfiles = new HashMap<>(); /** * サービス名. */ private String mName; /** * サービスタイプ. */ private String mType; /** * オンラインフラグ. */ private boolean mIsOnline; /** * サービスのコンフィグ. */ private String mConfig; /** * コンテキスト. */ private Context mContext; /** * プラグインのコンテキスト. */ private DevicePluginContext mPluginContext; /** * ステータス更新通知リスナー. */ private OnStatusChangeListener mStatusListener; /** * コンストラクタ. * @param id サービスID * @throws NullPointerException idに<code>null</code>が指定された場合 */ public DConnectService(final String id) { if (id == null) { throw new NullPointerException("id is null."); } mId = id; addProfile(new ServiceInformationProfile()); } /** * サービスIDを取得する. * @return サービスID */ public String getId() { return mId; } /** * サービス名を設定する. * * @param name サービス名 */ public void setName(final String name) { mName = name; } /** * サービス名を取得する. * * @return サービス名. */ public String getName() { return mName; } /** * サービスのネットワークタイプを設定する. * * @param type ネットワークタイプ */ public void setNetworkType(final NetworkType type) { mType = type.getValue(); } /** * サービスのネットワークタイプを設定する. * <p> * {@link org.deviceconnect.profile.ServiceDiscoveryProfileConstants.NetworkType NetworkType} * に定義されていないタイプの場合には、このメソッドを使用して独自のネットワークタイプを設定することができる。 * </p> * @param type ネットワークタイプ */ public void setNetworkType(final String type) { mType = type; } /** * サービスのネットワークタイプを取得する. * @return ネットワークタイプ */ public String getNetworkType() { return mType; } /** * ネットワークの状態を設定する. * * @param isOnline オンラインの場合はtrue、オフラインの場合はfalse */ public void setOnline(final boolean isOnline) { mIsOnline = isOnline; if (mStatusListener != null) { mStatusListener.onStatusChange(this); } } /** * ネットワークの状態を取得する. * @return オンラインの場合はtrue、オフラインの場合はfalse */ public boolean isOnline() { return mIsOnline; } /** * サービスのコンフィグを取得する. * <p> * コンフィグ情報が存在しない場合には{@code null}を返却する。 * </p> * @return サービスのコンフィグ */ public String getConfig() { return mConfig; } /** * サービスのコンフィグを設定する. * <p> * コンフィグ情報が存在しない場合には、{@code null}を設定する。<br> * デフォルトは、{@code null}が設定されている。 * </p> * @param config コンフィグ情報 */ public void setConfig(final String config) { mConfig = config; } /** * コンテキストを設定する. * <p> * {@link DConnectServiceManager}に追加されるときにコンテキストが設定される。 * </p> * @param context コンテキスト */ void setContext(final Context context) { mContext = context; } /** * コンテキストを取得する. * @return コンテキスト */ public Context getContext() { return mContext; } /** * プラグインコンテキストを設定します. * * @param pluginContext プラグインコンテキスト */ void setPluginContext(DevicePluginContext pluginContext) { mPluginContext = pluginContext; } /** * プラグインコンテキストを取得します. * * @return プラグインコンテキスト */ public DevicePluginContext getPluginContext() { return mPluginContext; } @Override public List<DConnectProfile> getProfileList() { return new ArrayList<>(mProfiles.values()); } @Override public DConnectProfile getProfile(final String name) { if (name == null) { return null; } return mProfiles.get(name.toLowerCase()); } @Override public void addProfile(final DConnectProfile profile) { if (profile == null) { return; } profile.setService(this); profile.setContext(mContext); profile.setPluginContext(mPluginContext); profile.setResponder(mPluginContext); mProfiles.put(profile.getProfileName().toLowerCase(), profile); } @Override public void removeProfile(final DConnectProfile profile) { if (profile == null) { return; } mProfiles.remove(profile.getProfileName().toLowerCase()); } /** * サービスに命令が通知されたときに呼び出されるメソッド. * <p> * このメソッドの中でサービスに登録されている各プロファイルに命令を振り分ける。<br> * 各プロファイルでは、requestに対するレスポンスをresponseに格納する。 * </p> * <p> * レスポンスにtrueが返却した場合には、Plugin SDKは、responseをDevice Connect Managerに返却する。<br> * falseの場合には、Plugin SDKは、responseをDevice Connect Managerに返却しません。プラグイン側で、 * {@link org.deviceconnect.android.message.DConnectMessageService#sendResponse(Intent)}を用いて * レスポンスを返却する必要があります。 * </p> * @param request リクエスト * @param response レスポンス * @return 同期的にレスポンスを返却する場合にはtrue、それ以外はfalse */ public boolean onRequest(final Intent request, final Intent response) { DConnectProfile profile = getProfile(DConnectProfile.getProfile(request)); if (profile == null) { MessageUtils.setNotSupportProfileError(response); return true; } return profile.onRequest(request, response); } /** * ステータス更新通知リスナーを設定する. * * @param listener リスナー */ void setOnStatusChangeListener(final OnStatusChangeListener listener) { mStatusListener = listener; } /** * ステータス更新通知リスナー. * * @author NTT DOCOMO, INC. */ interface OnStatusChangeListener { /** * ステータスが変更されたサービスを通知する. * * @param service ステータスが変更されたサービス */ void onStatusChange(DConnectService service); } }
dConnectDevicePlugin/dConnectDevicePluginSDK/dconnect-device-plugin-sdk/src/main/java/org/deviceconnect/android/service/DConnectService.java
/* DConnectService.java Copyright (c) 2016 NTT DOCOMO,INC. Released under the MIT license http://opensource.org/licenses/mit-license.php */ package org.deviceconnect.android.service; import android.content.Context; import android.content.Intent; import android.util.Log; import org.deviceconnect.android.message.DevicePluginContext; import org.deviceconnect.android.message.MessageUtils; import org.deviceconnect.android.profile.DConnectProfile; import org.deviceconnect.android.profile.DConnectProfileProvider; import org.deviceconnect.android.profile.ServiceInformationProfile; import org.deviceconnect.profile.ServiceDiscoveryProfileConstants; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Device Connect APIサービス. * * @author NTT DOCOMO, INC. */ public class DConnectService implements DConnectProfileProvider, ServiceDiscoveryProfileConstants { /** * サービスID. */ private final String mId; /** * サポートするプロファイル一覧. */ private final Map<String, DConnectProfile> mProfiles = new HashMap<>(); /** * サービス名. */ private String mName; /** * サービスタイプ. */ private String mType; /** * オンラインフラグ. */ private boolean mIsOnline; /** * サービスのコンフィグ. */ private String mConfig; /** * コンテキスト. */ private Context mContext; /** * プラグインのコンテキスト. */ private DevicePluginContext mPluginContext; /** * ステータス更新通知リスナー. */ private OnStatusChangeListener mStatusListener; /** * コンストラクタ. * @param id サービスID * @throws NullPointerException idに<code>null</code>が指定された場合 */ public DConnectService(final String id) { if (id == null) { throw new NullPointerException("id is null."); } mId = id; addProfile(new ServiceInformationProfile()); } /** * サービスIDを取得する. * @return サービスID */ public String getId() { return mId; } /** * サービス名を設定する. * * @param name サービス名 */ public void setName(final String name) { mName = name; } /** * サービス名を取得する. * * @return サービス名. */ public String getName() { return mName; } /** * サービスのネットワークタイプを設定する. * * @param type ネットワークタイプ */ public void setNetworkType(final NetworkType type) { mType = type.getValue(); } /** * サービスのネットワークタイプを設定する. * <p> * {@link org.deviceconnect.profile.ServiceDiscoveryProfileConstants.NetworkType NetworkType} * に定義されていないタイプの場合には、このメソッドを使用して独自のネットワークタイプを設定することができる。 * </p> * @param type ネットワークタイプ */ public void setNetworkType(final String type) { mType = type; } /** * サービスのネットワークタイプを取得する. * @return ネットワークタイプ */ public String getNetworkType() { return mType; } /** * ネットワークの状態を設定する. * * @param isOnline オンラインの場合はtrue、オフラインの場合はfalse */ public void setOnline(final boolean isOnline) { mIsOnline = isOnline; if (mStatusListener != null) { mStatusListener.onStatusChange(this); } } /** * ネットワークの状態を取得する. * @return オンラインの場合はtrue、オフラインの場合はfalse */ public boolean isOnline() { return mIsOnline; } /** * サービスのコンフィグを取得する. * <p> * コンフィグ情報が存在しない場合には{@code null}を返却する。 * </p> * @return サービスのコンフィグ */ public String getConfig() { return mConfig; } /** * サービスのコンフィグを設定する. * <p> * コンフィグ情報が存在しない場合には、{@code null}を設定する。<br> * デフォルトは、{@code null}が設定されている。 * </p> * @param config コンフィグ情報 */ public void setConfig(final String config) { mConfig = config; } /** * コンテキストを設定する. * <p> * {@link DConnectServiceManager}に追加されるときにコンテキストが設定される。 * </p> * @param context コンテキスト */ void setContext(final Context context) { mContext = context; } /** * コンテキストを取得する. * @return コンテキスト */ public Context getContext() { return mContext; } /** * プラグインコンテキストを設定します. * * @param pluginContext プラグインコンテキスト */ void setPluginContext(DevicePluginContext pluginContext) { mPluginContext = pluginContext; } /** * プラグインコンテキストを取得します. * * @return プラグインコンテキスト */ public DevicePluginContext getPluginContext() { return mPluginContext; } @Override public List<DConnectProfile> getProfileList() { return new ArrayList<>(mProfiles.values()); } @Override public DConnectProfile getProfile(final String name) { if (name == null) { return null; } return mProfiles.get(name.toLowerCase()); } @Override public void addProfile(final DConnectProfile profile) { if (profile == null) { return; } profile.setService(this); profile.setContext(mContext); profile.setPluginContext(mPluginContext); profile.setResponder(mPluginContext); mProfiles.put(profile.getProfileName().toLowerCase(), profile); } @Override public void removeProfile(final DConnectProfile profile) { if (profile == null) { return; } mProfiles.remove(profile.getProfileName().toLowerCase()); } /** * サービスに命令が通知されたときに呼び出されるメソッド. * <p> * このメソッドの中でサービスに登録されている各プロファイルに命令を振り分ける。<br> * 各プロファイルでは、requestに対するレスポンスをresponseに格納する。 * </p> * <p> * レスポンスにtrueが返却した場合には、Plugin SDKは、responseをDevice Connect Managerに返却する。<br> * falseの場合には、Plugin SDKは、responseをDevice Connect Managerに返却しません。プラグイン側で、 * {@link org.deviceconnect.android.message.DConnectMessageService#sendResponse(Intent)}を用いて * レスポンスを返却する必要があります。 * </p> * @param request リクエスト * @param response レスポンス * @return 同期的にレスポンスを返却する場合にはtrue、それ以外はfalse */ public boolean onRequest(final Intent request, final Intent response) { DConnectProfile profile = getProfile(DConnectProfile.getProfile(request)); if (profile == null) { Log.d("ABC", "/????222"); Log.d("ABC", "request: " + request); Log.d("ABC", "request extras: " + request.getExtras()); MessageUtils.setNotSupportProfileError(response); return true; } return profile.onRequest(request, response); } /** * ステータス更新通知リスナーを設定する. * * @param listener リスナー */ void setOnStatusChangeListener(final OnStatusChangeListener listener) { mStatusListener = listener; } /** * ステータス更新通知リスナー. * * @author NTT DOCOMO, INC. */ interface OnStatusChangeListener { /** * ステータスが変更されたサービスを通知する. * * @param service ステータスが変更されたサービス */ void onStatusChange(DConnectService service); } }
ログの削除.
dConnectDevicePlugin/dConnectDevicePluginSDK/dconnect-device-plugin-sdk/src/main/java/org/deviceconnect/android/service/DConnectService.java
ログの削除.
<ide><path>ConnectDevicePlugin/dConnectDevicePluginSDK/dconnect-device-plugin-sdk/src/main/java/org/deviceconnect/android/service/DConnectService.java <ide> public boolean onRequest(final Intent request, final Intent response) { <ide> DConnectProfile profile = getProfile(DConnectProfile.getProfile(request)); <ide> if (profile == null) { <del> Log.d("ABC", "/????222"); <del> Log.d("ABC", "request: " + request); <del> Log.d("ABC", "request extras: " + request.getExtras()); <del> <ide> MessageUtils.setNotSupportProfileError(response); <ide> return true; <ide> }
Java
mit
ec5fe4234f7bc3eecfe3e9628afa2508ea4cbcd4
0
InseeFr/Eno,InseeFr/Eno
package fr.insee.eno.preprocessing; import java.io.File; import java.io.FilenameFilter; import java.io.InputStream; import java.io.OutputStream; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import fr.insee.eno.Constants; import fr.insee.eno.exception.EnoGenerationException; import fr.insee.eno.parameters.PreProcessing; import fr.insee.eno.transform.xsl.XslTransformation; /** * A DDI specific preprocessor. */ public class DDIDereferencingPreprocessor implements Preprocessor { private static final Logger logger = LoggerFactory.getLogger(DDIDereferencingPreprocessor.class); private XslTransformation saxonService = new XslTransformation(); @Override public File process(File inputFile, byte[] parametersFile, String survey, String in2out) throws Exception { logger.info("DDIPreprocessing Target : START"); String sUB_TEMP_FOLDER = Constants.sUB_TEMP_FOLDER(survey); // ----- Dereferencing logger.debug("Dereferencing : -Input : " + inputFile + " -Output : " + Constants.tEMP_NULL_TMP(sUB_TEMP_FOLDER) + " -Stylesheet : " + Constants.UTIL_DDI_DEREFERENCING_XSL + " -Parameters : " + sUB_TEMP_FOLDER); InputStream isDDI_DEREFERENCING_XSL = Constants.getInputStreamFromPath(Constants.DDI_DEREFERENCING_XSL); InputStream isInputFile = FileUtils.openInputStream(inputFile); OutputStream osTEMP_NULL_TMP = FileUtils.openOutputStream(Constants.tEMP_NULL_TMP(sUB_TEMP_FOLDER)); try { saxonService.transformDereferencing(isInputFile, isDDI_DEREFERENCING_XSL, osTEMP_NULL_TMP, Constants.sUB_TEMP_FOLDER_FILE(survey)); }catch(Exception e) { throw new EnoGenerationException("An error was occured during the " + toString() + " transformation. "+e.getMessage()); } isInputFile.close(); isDDI_DEREFERENCING_XSL.close(); osTEMP_NULL_TMP.close(); // ----- Cleaning logger.debug("Cleaning target"); File f = Constants.sUB_TEMP_FOLDER_FILE(survey); File[] matchCleaningInput = f.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !name.startsWith("null") && name.endsWith(".tmp"); } }); String cleaningInput = null; logger.debug("Searching matching files in : " + sUB_TEMP_FOLDER); for (File file : matchCleaningInput) { if(!file.isDirectory()) { cleaningInput = file.getAbsolutePath(); logger.debug("Found : " + cleaningInput); } } if(cleaningInput==null) { throw new EnoGenerationException("DDIDereferencing produced no file."); } logger.debug("DDIPreprocessing Dereferencing : END"); return new File(cleaningInput); } public String toString() { return PreProcessing.DDI_DEREFERENCING.name(); } }
src/main/java/fr/insee/eno/preprocessing/DDIDereferencingPreprocessor.java
package fr.insee.eno.preprocessing; import java.io.File; import java.io.FilenameFilter; import java.io.InputStream; import java.io.OutputStream; import org.apache.commons.io.FileUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import fr.insee.eno.Constants; import fr.insee.eno.exception.EnoGenerationException; import fr.insee.eno.parameters.PreProcessing; import fr.insee.eno.transform.xsl.XslTransformation; /** * A DDI specific preprocessor. */ public class DDIDereferencingPreprocessor implements Preprocessor { private static final Logger logger = LoggerFactory.getLogger(DDIDereferencingPreprocessor.class); private XslTransformation saxonService = new XslTransformation(); @Override public File process(File inputFile, byte[] parametersFile, String survey, String in2out) throws Exception { logger.info("DDIPreprocessing Target : START"); String sUB_TEMP_FOLDER = Constants.sUB_TEMP_FOLDER(survey); // ----- Dereferencing logger.debug("Dereferencing : -Input : " + inputFile + " -Output : " + Constants.tEMP_NULL_TMP(sUB_TEMP_FOLDER) + " -Stylesheet : " + Constants.UTIL_DDI_DEREFERENCING_XSL + " -Parameters : " + sUB_TEMP_FOLDER); InputStream isDDI_DEREFERENCING_XSL = Constants.getInputStreamFromPath(Constants.DDI_DEREFERENCING_XSL); InputStream isInputFile = FileUtils.openInputStream(inputFile); OutputStream osTEMP_NULL_TMP = FileUtils.openOutputStream(Constants.tEMP_NULL_TMP(sUB_TEMP_FOLDER)); try { saxonService.transformDereferencing(isInputFile, isDDI_DEREFERENCING_XSL, osTEMP_NULL_TMP, Constants.sUB_TEMP_FOLDER_FILE(survey)); }catch(Exception e) { throw new EnoGenerationException("An error was occured during the " + toString() + " transformation. "+e.getMessage()); } isInputFile.close(); isDDI_DEREFERENCING_XSL.close(); osTEMP_NULL_TMP.close(); // ----- Cleaning logger.debug("Cleaning target"); File f = Constants.sUB_TEMP_FOLDER_FILE(survey); File[] matchCleaningInput = f.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return !name.startsWith("null"); } }); String cleaningInput = null; logger.debug("Searching matching files in : " + sUB_TEMP_FOLDER); for (File file : matchCleaningInput) { if(!file.isDirectory()) { cleaningInput = file.getAbsolutePath(); logger.debug("Found : " + cleaningInput); } } if(cleaningInput==null) { throw new EnoGenerationException("DDIDereferencing produced no file."); } logger.debug("DDIPreprocessing Dereferencing : END"); return new File(cleaningInput); } public String toString() { return PreProcessing.DDI_DEREFERENCING.name(); } }
Dereferencing : add condition on generated file
src/main/java/fr/insee/eno/preprocessing/DDIDereferencingPreprocessor.java
Dereferencing : add condition on generated file
<ide><path>rc/main/java/fr/insee/eno/preprocessing/DDIDereferencingPreprocessor.java <ide> <ide> @Override <ide> public boolean accept(File dir, String name) { <del> return !name.startsWith("null"); <add> return !name.startsWith("null") && name.endsWith(".tmp"); <ide> } <ide> }); <ide>
JavaScript
mit
185cceecd7c32074a26ecd09e74c7bbfef079d28
0
ahmadassaf/Bosco,ahmadassaf/Bosco,tes/bosco,tes/bosco
var _ = require('lodash'); var async = require('async'); var fs = require('fs'); var http = require('http'); var watch = require('watch'); module.exports = { name:'cdn', description:'Aggregates all the static assets across all microservices and serves them via a pseudo local CDN url', example:'bosco cdn <minify>', cmd:cmd, options: [{ option: 'tag', syntax: ['-t, --tag [tag]', 'Filter by a tag defined within bosco-service.json'] }, { option: 'watch', syntax: ['-w, --watch [regex]', 'Filter by a regex of services to watch (similar to -r in run)'] }] } function cmd(bosco, args) { var minify = _.contains(args,'minify'); var port = bosco.config.get('cdn:port') || 7334; var repoPattern = bosco.options.repo; var repoRegex = new RegExp(repoPattern); var watchPattern = bosco.options.watch || '$a'; var watchRegex = new RegExp(watchPattern); var repoTag = bosco.options.tag; bosco.log('Starting pseudo CDN on port: ' + (port+'').blue); var repos = bosco.getRepos(); if(!repos) return bosco.error('You are repo-less :( You need to initialise bosco first, try \'bosco clone\'.'); var startServer = function(staticAssets, staticRepos, serverPort) { var server = http.createServer(function(request, response) { var url = request.url.replace('/',''); var asset = getAsset(staticAssets, url); if(asset) { response.writeHead(200, { 'Content-Type': asset.mimeType, 'Cache-Control': 'no-cache, must-revalidate', 'Pragma': 'no-cache', 'Expires': 'Sat, 21 May 1952 00:00:00 GMT' }); getContent(asset, function(err, content) { if(err) { response.writeHead(500, {'Content-Type': 'text/html'}); response.end('<h2>There was an error: ' + err.message + '</h2>'); } else { response.end(content); } }); } else { if (request.url == '/repos') { response.writeHead(200, {'Content-Type': 'text/html'}); return response.end(staticRepos.formattedRepos); } response.writeHead(404, {'Content-Type': 'text/html'}); response.end(staticAssets.formattedAssets); } }); server.listen(serverPort); bosco.log('Server is listening on ' + serverPort); } var getAsset = function(staticAssets, url) { return _.find(staticAssets, 'assetKey', url); } var startMonitor = function(staticAssets) { var watchSet = {}, reloading = {}; _.forEach(staticAssets, function(asset) { var key = asset.assetKey; if(asset.repo && !asset.repo.match(watchRegex)) { return; } if(!minify) { if(asset.path) { watchSet[asset.path] = key; } return; } if(asset.extname == '.manifest') { var manifestFiles = asset.files; manifestFiles.forEach(function(file) { if(file) { watchSet[file.path] = asset.tag; } }); } }); var filterFn = function(f, stat) { return f.match(watchRegex) && stat.isDirectory() || watchSet[f]; } var getIndexForKey = function(assetList, fileKey) { var foundKey; _.forEach(assetList, function(asset, key) { if(asset.assetKey === fileKey) { foundKey = key; } }); return foundKey; } var reloadFile = function(fileKey) { if(!minify) { if(fileKey) { var assetIndex = getIndexForKey(staticAssets, fileKey); if(!assetIndex) { bosco.error('Unable to locate asset with key: ' + fileKey); return; } fs.readFile(staticAssets[assetIndex].path, function (err, data) { if (err) { bosco.log('Error reloading '+fileKey); bosco.log(err.toString()); return; } staticAssets[assetIndex].data = data; staticAssets[assetIndex].content = data.toString(); bosco.log('Reloaded ' + fileKey); reloading[fileKey] = false; }); } } else { if(fileKey) { bosco.log('Recompiling tag ' + fileKey.blue); var options = { repos: repos, minify: minify, buildNumber: 'local', tagFilter: fileKey, watchBuilds: false, reloadOnly: true } bosco.staticUtils.getStaticAssets(options, function(err, updatedAssets) { _.forEach(updatedAssets, function(value) { var index = getIndexForKey(staticAssets, value.assetKey); staticAssets[index] = value; }); bosco.log('Reloaded minified assets for tag ' + fileKey.blue); reloading[fileKey] = false; }); } } } watch.createMonitor(bosco.getOrgPath(), {filter: filterFn, ignoreDotFiles: true, ignoreUnreadableDir: true, ignoreDirectoryPattern: /node_modules|\.git|coverage/, interval: 1000}, function (monitor) { bosco.log('Watching '+ _.keys(monitor.files).length + ' files ...'); monitor.on('changed', function (f) { var fileKey = watchSet[f]; if(reloading[fileKey]) return; reloading[fileKey] = true; reloadFile(fileKey); }); }); } var getContent = function(asset, next) { next(null, asset.data || asset.content); } if(minify) bosco.log('Minifying front end assets, this can take some time ...'); var options = { repos: repos, buildNumber: 'local', minify: minify, tagFilter: null, watchBuilds: true, reloadOnly: false, watchRegex: watchRegex, repoRegex: repoRegex, repoTag: repoTag } var executeAsync = { staticAssets: bosco.staticUtils.getStaticAssets.bind(null, options), staticRepos: bosco.staticUtils.getStaticRepos.bind(null, options) } async.parallel(executeAsync, function(err, results){ startServer(results.staticAssets, results.staticRepos, port); startMonitor(results.staticAssets); }); }
commands/cdn.js
var _ = require('lodash'); var async = require('async'); var fs = require('fs'); var http = require('http'); var watch = require('watch'); module.exports = { name:'cdn', description:'Aggregates all the static assets across all microservices and serves them via a pseudo local CDN url', example:'bosco cdn <minify>', cmd:cmd, options: [{ option: 'tag', syntax: ['-t, --tag [tag]', 'Filter by a tag defined within bosco-service.json'] }, { option: 'watch', syntax: ['-w, --watch [regex]', 'Filter by a regex of services to watch (similar to -r in run)'] }] } function cmd(bosco, args) { var minify = _.contains(args,'minify'); var port = bosco.config.get('cdn:port') || 7334; var repoPattern = bosco.options.repo; var repoRegex = new RegExp(repoPattern); var watchPattern = bosco.options.watch || '$a'; var watchRegex = new RegExp(watchPattern); var repoTag = bosco.options.tag; bosco.log('Starting pseudo CDN on port: ' + (port+'').blue); var repos = bosco.getRepos(); if(!repos) return bosco.error('You are repo-less :( You need to initialise bosco first, try \'bosco clone\'.'); var startServer = function(staticAssets, staticRepos, serverPort) { var server = http.createServer(function(request, response) { var url = request.url.replace('/',''); var asset = getAsset(staticAssets, url); if(asset) { response.writeHead(200, { 'Content-Type': asset.mimeType, 'Cache-Control': 'no-cache, must-revalidate', 'Pragma': 'no-cache', 'Expires': 'Sat, 21 May 1952 00:00:00 GMT' }); getContent(asset, function(err, content) { if(err) { response.writeHead(500, {'Content-Type': 'text/html'}); response.end('<h2>There was an error: ' + err.message + '</h2>'); } else { response.end(content); } }); } else { if (request.url == '/repos') { response.writeHead(200, {'Content-Type': 'text/html'}); return response.end(staticRepos.formattedRepos); } response.writeHead(404, {'Content-Type': 'text/html'}); response.end(staticAssets.formattedAssets); } }); server.listen(serverPort); bosco.log('Server is listening on ' + serverPort); } var getAsset = function(staticAssets, url) { return _.filter(staticAssets, function(item) { if(item.assetKey === url) { return item; } })[0]; } var startMonitor = function(staticAssets) { var watchSet = {}, reloading = {}; _.forEach(staticAssets, function(asset) { var key = asset.assetKey; if(asset.repo && !asset.repo.match(watchRegex)) { return; } if(!minify) { if(asset.path) { watchSet[asset.path] = key; } return; } if(asset.extname == '.manifest') { var manifestFiles = asset.files; manifestFiles.forEach(function(file) { if(file) { watchSet[file.path] = asset.tag; } }); } }); var filterFn = function(f, stat) { return f.match(watchRegex) && stat.isDirectory() || watchSet[f]; } var getIndexForKey = function(assetList, fileKey) { var foundKey; _.forEach(assetList, function(asset, key) { if(asset.assetKey === fileKey) { foundKey = key; } }); return foundKey; } var reloadFile = function(fileKey) { if(!minify) { if(fileKey) { var assetIndex = getIndexForKey(staticAssets, fileKey); if(!assetIndex) { bosco.error('Unable to locate asset with key: ' + fileKey); return; } fs.readFile(staticAssets[assetIndex].path, function (err, data) { if (err) { bosco.log('Error reloading '+fileKey); bosco.log(err.toString()); return; } staticAssets[assetIndex].data = data; staticAssets[assetIndex].content = data.toString(); bosco.log('Reloaded ' + fileKey); reloading[fileKey] = false; }); } } else { if(fileKey) { bosco.log('Recompiling tag ' + fileKey.blue); var options = { repos: repos, minify: minify, buildNumber: 'local', tagFilter: fileKey, watchBuilds: false, reloadOnly: true } bosco.staticUtils.getStaticAssets(options, function(err, updatedAssets) { _.forEach(updatedAssets, function(value) { var index = getIndexForKey(staticAssets, value.assetKey); staticAssets[index] = value; }); bosco.log('Reloaded minified assets for tag ' + fileKey.blue); reloading[fileKey] = false; }); } } } watch.createMonitor(bosco.getOrgPath(), {filter: filterFn, ignoreDotFiles: true, ignoreUnreadableDir: true, ignoreDirectoryPattern: /node_modules|\.git|coverage/, interval: 1000}, function (monitor) { bosco.log('Watching '+ _.keys(monitor.files).length + ' files ...'); monitor.on('changed', function (f) { var fileKey = watchSet[f]; if(reloading[fileKey]) return; reloading[fileKey] = true; reloadFile(fileKey); }); }); } var getContent = function(asset, next) { next(null, asset.data || asset.content); } if(minify) bosco.log('Minifying front end assets, this can take some time ...'); var options = { repos: repos, buildNumber: 'local', minify: minify, tagFilter: null, watchBuilds: true, reloadOnly: false, watchRegex: watchRegex, repoRegex: repoRegex, repoTag: repoTag } var executeAsync = { staticAssets: bosco.staticUtils.getStaticAssets.bind(null, options), staticRepos: bosco.staticUtils.getStaticRepos.bind(null, options) } async.parallel(executeAsync, function(err, results){ startServer(results.staticAssets, results.staticRepos, port); startMonitor(results.staticAssets); }); }
Use find instead of insane filter
commands/cdn.js
Use find instead of insane filter
<ide><path>ommands/cdn.js <ide> } <ide> <ide> var getAsset = function(staticAssets, url) { <del> return _.filter(staticAssets, function(item) { <del> if(item.assetKey === url) { <del> return item; <del> } <del> })[0]; <add> return _.find(staticAssets, 'assetKey', url); <ide> } <ide> <ide> var startMonitor = function(staticAssets) { <ide> <ide> if(reloading[fileKey]) return; <ide> reloading[fileKey] = true; <del> <ide> reloadFile(fileKey); <ide> <ide> });
Java
apache-2.0
037a1c4a8d6543a288e5d921bd290458f8435555
0
yinhe402/commons-io,mohanaraosv/commons-io,jankill/commons-io,krosenvold/commons-io,mohanaraosv/commons-io,krosenvold/commons-io,girirajsharma/commons-io,girirajsharma/commons-io,jankill/commons-io,yinhe402/commons-io,girirajsharma/commons-io,MuShiiii/commons-io,mohanaraosv/commons-io,MuShiiii/commons-io,MuShiiii/commons-io,yinhe402/commons-io,jankill/commons-io,krosenvold/commons-io
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.io; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.Collection; import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.commons.io.filefilter.IOFileFilter; import org.apache.commons.io.filefilter.TrueFileFilter; /** * Abstract class that walks through a directory hierarchy and provides * subclasses with convenient hooks to add specific behaviour. * <p> * This class operates with a {@link FileFilter} and maximum depth to * limit the files and direcories visited. * Commons IO supplies many common filter implementations in the * <a href="filefilter/package-summary.html"> filefilter</a> package. * <p> * The following sections describe: * <ul> * <li><a href="#example">1. Example Implementation</a> - example * <code>FileCleaner</code> implementation.</li> * <li><a href="#filter">2. Filter Example</a> - using * {@link FileFilter}(s) with <code>DirectoryWalker</code>.</li> * <li><a href="#cancel">3. Cancellation</a> - how to implement cancellation * behaviour.</li> * </ul> * * <a name="example"></a> * <h3>1. Example Implementation</h3> * * There are many possible extensions, for example, to delete all * files and '.svn' directories, and return a list of deleted files: * <pre> * public class FileCleaner extends DirectoryWalker { * * public FileCleaner() { * super(); * } * * public List clean(File startDirectory) { * List results = new ArrayList(); * walk(startDirectory, results); * return results; * } * * protected boolean handleDirectory(File directory, int depth, Collection results) { * // delete svn directories and then skip * if (".svn".equals(directory.getName())) { * directory.delete(); * return false; * } else { * return true; * } * * } * * protected void handleFile(File file, int depth, Collection results) { * // delete file and add to list of deleted * file.delete(); * results.add(file); * } * } * </pre> * * <a name="filter"></a> * <h3>2. Filter Example</h3> * * Choosing which directories and files to process can be a key aspect * of using this class. This information can be setup in three ways, * via three different constructors. * <p> * The first option is to visit all directories and files. * This is achieved via the no-args constructor. * <p> * The second constructor option is to supply a single {@link FileFilter} * that describes the files and directories to visit. Care must be taken * with this option as the same filter is used for both directories * and files. * <p> * For example, if you wanted all directories which are not hidden * and files which end in ".txt": * <pre> * public class FooDirectoryWalker extends DirectoryWalker { * public FooDirectoryWalker(FileFilter filter) { * super(filter, -1); * } * } * * // Build up the filters and create the walker * // Create a filter for Non-hidden directories * IOFileFilter fooDirFilter = * FileFilterUtils.andFileFilter(FileFilterUtils.directoryFileFilter, * HiddenFileFilter.VISIBLE); * * // Create a filter for Files ending in ".txt" * IOFileFilter fooFileFilter = * FileFilterUtils.andFileFilter(FileFilterUtils.fileFileFilter, * FileFilterUtils.suffixFileFilter(".txt")); * * // Combine the directory and file filters using an OR condition * java.io.FileFilter fooFilter = * FileFilterUtils.orFileFilter(fooDirFilter, fooFileFilter); * * // Use the filter to construct a DirectoryWalker implementation * FooDirectoryWalker walker = new FooDirectoryWalker(fooFilter); * </pre> * <p> * The third constructor option is to specify separate filters, one for * directories and one for files. These are combined internally to form * the correct <code>FileFilter</code>, something which is very easy to * get wrong when attempted manually, particularly when trying to * express constructs like 'any file in directories named docs'. * <p> * For example, if you wanted all directories which are not hidden * and files which end in ".txt": * <pre> * public class FooDirectoryWalker extends DirectoryWalker { * public FooDirectoryWalker(IOFileFilter dirFilter, IOFileFilter fileFilter) { * super(dirFilter, fileFilter, -1); * } * } * * // Use the filters to construct the walker * FooDirectoryWalker walker = new FooDirectoryWalker( * HiddenFileFilter.VISIBLE, * FileFilterUtils.suffixFileFilter(".txt"), * ); * </pre> * This is much simpler than the previous example, and is why it is the preferred * option for filtering. * * <a name="cancel"></a> * <h3>3. Cancellation</h3> * * The DirectoryWalker contains some of the logic required for cancel processing. * Subclasses must complete the implementation. * <p> * What <code>DirectoryWalker</code> does provide for cancellation is: * <ul> * <li>{@link CancelException} which can be thrown in any of the * <i>lifecycle</i> methods to stop processing.</li> * <li>The <code>walk()</code> method traps thrown {@link CancelException} * and calls the <code>handleCancelled()</code> method, providing * a place for custom cancel processing.</li> * </ul> * <p> * Implementations need to provide: * <ul> * <li>The decision logic on whether to cancel processing or not.</li> * <li>Constructing and throwing a {@link CancelException}.</li> * <li>Custom cancel processing in the <code>handleCancelled()</code> method. * </ul> * <p> * Two possible scenarios are envisaged for cancellation: * <ul> * <li><a href="#external">3.1 External / Mult-threaded</a> - cancellation being * decided/initiated by an external process.</li> * <li><a href="#internal">3.2 Internal</a> - cancellation being decided/initiated * from within a DirectoryWalker implementation.</li> * </ul> * <p> * The following sections provide example implementations for these two different * scenarios. * * <a name="external"></a> * <h4>3.1 External / Multi-threaded</h4> * * This example provides a public <code>cancel()</code> method that can be * called by another thread to stop the processing. A typical example use-case * would be a cancel button on a GUI. Calling this method sets a * <a href="http://java.sun.com/docs/books/jls/second_edition/html/classes.doc.html#36930"> * volatile</a> flag to ensure it will work properly in a multi-threaded environment. * The flag is returned by the <code>handleIsCancelled()</code> method, which * will cause the walk to stop immediately. The <code>handleCancelled()</code> * method will be the next, and last, callback method received once cancellation * has occurred. * * <pre> * public class FooDirectoryWalker extends DirectoryWalker { * * private volatile boolean cancelled = false; * * public void cancel() { * cancelled = true; * } * * private void handleIsCancelled(File file, int depth, Collection results) { * return cancelled; * } * * protected void handleCancelled(File startDirectory, Collection results, CancelException cancel) { * // implement processing required when a cancellation occurs * } * } * </pre> * * <a name="internal"></a> * <h4>3.2 Internal</h4> * * This shows an example of how internal cancellation processing could be implemented. * <b>Note</b> the decision logic and throwing a {@link CancelException} could be implemented * in any of the <i>lifecycle</i> methods. * * <pre> * public class BarDirectoryWalker extends DirectoryWalker { * * protected boolean handleDirectory(File directory, int depth, Collection results) throws IOException { * // cancel if hidden directory * if (directory.isHidden()) { * throw new CancelException(file, depth); * } * return true; * } * * protected void handleFile(File file, int depth, Collection results) throws IOException { * // cancel if read-only file * if (!file.canWrite()) { * throw new CancelException(file, depth); * } * results.add(file); * } * * protected void handleCancelled(File startDirectory, Collection results, CancelException cancel) { * // implement processing required when a cancellation occurs * } * } * </pre> * * @since Commons IO 1.3 * @version $Revision: 424748 $ */ public abstract class DirectoryWalker { /** * The file filter to use to filter files and directories. */ private final FileFilter filter; /** * The limit on the directory depth to walk. */ private final int depthLimit; /** * Construct an instance with no filtering and unlimited <i>depth</i>. */ protected DirectoryWalker() { this(null, -1); } /** * Construct an instance with a filter and limit the <i>depth</i> navigated to. * <p> * The filter controls which files and directories will be navigated to as * part of the walk. The {@link FileFilterUtils} class is useful for combining * various filters together. A <code>null</code> filter means that no * filtering should occur and all files and directories will be visited. * * @param filter the filter to apply, null means visit all files * @param depthLimit controls how <i>deep</i> the hierarchy is * navigated to (less than 0 means unlimited) */ protected DirectoryWalker(FileFilter filter, int depthLimit) { this.filter = filter; this.depthLimit = depthLimit; } /** * Construct an instance with a directory and a file filter and an optional * limit on the <i>depth</i> navigated to. * <p> * The filters control which files and directories will be navigated to as part * of the walk. This constructor uses {@link FileFilterUtils#makeDirectoryOnly(IOFileFilter)} * and {@link FileFilterUtils#makeFileOnly(IOFileFilter)} internally to combine the filters. * A <code>null</code> filter means that no filtering should occur. * * @param directoryFilter the filter to apply to directories, null means visit all directories * @param fileFilter the filter to apply to files, null means visit all files * @param depthLimit controls how <i>deep</i> the hierarchy is * navigated to (less than 0 means unlimited) */ protected DirectoryWalker(IOFileFilter directoryFilter, IOFileFilter fileFilter, int depthLimit) { if (directoryFilter == null && fileFilter == null) { this.filter = null; } else { directoryFilter = (directoryFilter != null ? directoryFilter : TrueFileFilter.TRUE); fileFilter = (fileFilter != null ? fileFilter : TrueFileFilter.TRUE); directoryFilter = FileFilterUtils.makeDirectoryOnly(directoryFilter); fileFilter = FileFilterUtils.makeFileOnly(fileFilter); this.filter = FileFilterUtils.orFileFilter(directoryFilter, fileFilter); } this.depthLimit = depthLimit; } //----------------------------------------------------------------------- /** * Internal method that walks the directory hierarchy in a depth-first manner. * <p> * Users of this class do not need to call this method. This method will * be called automatically by another (public) method on the specific subclass. * <p> * Writers of subclasses should call this method to start the directory walk. * Once called, this method will emit events as it walks the hierarchy. * The event methods have the prefix <code>handle</code>. * * @param startDirectory the directory to start from, not null * @param results the collection of result objects, may be updated * @throws NullPointerException if the start directory is null * @throws IOException if an I/O Error occurs */ protected final void walk(File startDirectory, Collection<?> results) throws IOException { if (startDirectory == null) { throw new NullPointerException("Start Directory is null"); } try { handleStart(startDirectory, results); walk(startDirectory, 0, results); handleEnd(results); } catch(CancelException cancel) { handleCancelled(startDirectory, results, cancel); } } /** * Main recursive method to examine the directory hierarchy. * * @param directory the directory to examine, not null * @param depth the directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ private void walk(File directory, int depth, Collection<?> results) throws IOException { checkIfCancelled(directory, depth, results); if (handleDirectory(directory, depth, results)) { handleDirectoryStart(directory, depth, results); int childDepth = depth + 1; if (depthLimit < 0 || childDepth <= depthLimit) { checkIfCancelled(directory, depth, results); File[] childFiles = (filter == null ? directory.listFiles() : directory.listFiles(filter)); if (childFiles == null) { handleRestricted(directory, childDepth, results); } else { for (int i = 0; i < childFiles.length; i++) { File childFile = childFiles[i]; if (childFile.isDirectory()) { walk(childFile, childDepth, results); } else { checkIfCancelled(childFile, childDepth, results); handleFile(childFile, childDepth, results); checkIfCancelled(childFile, childDepth, results); } } } } handleDirectoryEnd(directory, depth, results); } checkIfCancelled(directory, depth, results); } //----------------------------------------------------------------------- /** * Checks whether the walk has been cancelled by calling {@link #handleIsCancelled}, * throwing a <code>CancelException</code> if it has. * <p> * Writers of subclasses should not normally call this method as it is called * automatically by the walk of the tree. However, sometimes a single method, * typically {@link #handleFile}, may take a long time to run. In that case, * you may wish to check for cancellation by calling this method. * * @param file the current file being processed * @param depth the current file level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected final void checkIfCancelled(File file, int depth, Collection<?> results) throws IOException { if (handleIsCancelled(file, depth, results)) { throw new CancelException(file, depth); } } /** * Overridable callback method invoked to determine if the entire walk * operation should be immediately cancelled. * <p> * This method should be implemented by those subclasses that want to * provide a public <code>cancel()</code> method available from another * thread. The design pattern for the subclass should be as follows: * <pre> * public class FooDirectoryWalker extends DirectoryWalker { * private volatile boolean cancelled = false; * * public void cancel() { * cancelled = true; * } * private void handleIsCancelled(File file, int depth, Collection results) { * return cancelled; * } * protected void handleCancelled(File startDirectory, * Collection results, CancelException cancel) { * // implement processing required when a cancellation occurs * } * } * </pre> * <p> * If this method returns true, then the directory walk is immediately * cancelled. The next callback method will be {@link #handleCancelled}. * <p> * This implementation returns false. * * @param file the file or directory being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @return true if the walk has been cancelled * @throws IOException if an I/O Error occurs */ protected boolean handleIsCancelled( File file, int depth, Collection<?> results) throws IOException { // do nothing - overridable by subclass return false; // not cancelled } /** * Overridable callback method invoked when the operation is cancelled. * The file being processed when the cancellation occurred can be * obtained from the exception. * <p> * This implementation just re-throws the {@link CancelException}. * * @param startDirectory the directory that the walk started from * @param results the collection of result objects, may be updated * @param cancel the exception throw to cancel further processing * containing details at the point of cancellation. * @throws IOException if an I/O Error occurs */ protected void handleCancelled(File startDirectory, Collection<?> results, CancelException cancel) throws IOException { // re-throw exception - overridable by subclass throw cancel; } //----------------------------------------------------------------------- /** * Overridable callback method invoked at the start of processing. * <p> * This implementation does nothing. * * @param startDirectory the directory to start from * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleStart(File startDirectory, Collection<?> results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked to determine if a directory should be processed. * <p> * This method returns a boolean to indicate if the directory should be examined or not. * If you return false, the entire directory and any subdirectories will be skipped. * Note that this functionality is in addition to the filtering by file filter. * <p> * This implementation does nothing and returns true. * * @param directory the current directory being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @return true to process this directory, false to skip this directory * @throws IOException if an I/O Error occurs */ protected boolean handleDirectory(File directory, int depth, Collection<?> results) throws IOException { // do nothing - overridable by subclass return true; // process directory } /** * Overridable callback method invoked at the start of processing each directory. * <p> * This implementation does nothing. * * @param directory the current directory being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleDirectoryStart(File directory, int depth, Collection<?> results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked for each (non-directory) file. * <p> * This implementation does nothing. * * @param file the current file being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleFile(File file, int depth, Collection<?> results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked for each restricted directory. * <p> * This implementation does nothing. * * @param directory the restricted directory * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleRestricted(File directory, int depth, Collection<?> results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked at the end of processing each directory. * <p> * This implementation does nothing. * * @param directory the directory being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleDirectoryEnd(File directory, int depth, Collection<?> results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked at the end of processing. * <p> * This implementation does nothing. * * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleEnd(Collection<?> results) throws IOException { // do nothing - overridable by subclass } //----------------------------------------------------------------------- /** * CancelException is thrown in DirectoryWalker to cancel the current * processing. */ public static class CancelException extends IOException { /** Serialization id. */ private static final long serialVersionUID = 1347339620135041008L; /** The file being processed when the exception was thrown. */ private File file; /** The file depth when the exception was thrown. */ private int depth = -1; /** * Constructs a <code>CancelException</code> with * the file and depth when cancellation occurred. * * @param file the file when the operation was cancelled, may be null * @param depth the depth when the operation was cancelled, may be null */ public CancelException(File file, int depth) { this("Operation Cancelled", file, depth); } /** * Constructs a <code>CancelException</code> with * an appropriate message and the file and depth when * cancellation occurred. * * @param message the detail message * @param file the file when the operation was cancelled * @param depth the depth when the operation was cancelled */ public CancelException(String message, File file, int depth) { super(message); this.file = file; this.depth = depth; } /** * Return the file when the operation was cancelled. * * @return the file when the operation was cancelled */ public File getFile() { return file; } /** * Return the depth when the operation was cancelled. * * @return the depth when the operation was cancelled */ public int getDepth() { return depth; } } }
src/java/org/apache/commons/io/DirectoryWalker.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.io; import java.io.File; import java.io.FileFilter; import java.io.IOException; import java.util.Collection; import org.apache.commons.io.filefilter.FileFilterUtils; import org.apache.commons.io.filefilter.IOFileFilter; import org.apache.commons.io.filefilter.TrueFileFilter; /** * Abstract class that walks through a directory hierarchy and provides * subclasses with convenient hooks to add specific behaviour. * <p> * This class operates with a {@link FileFilter} and maximum depth to * limit the files and direcories visited. * Commons IO supplies many common filter implementations in the * <a href="filefilter/package-summary.html"> filefilter</a> package. * <p> * The following sections describe: * <ul> * <li><a href="#example">1. Example Implementation</a> - example * <code>FileCleaner</code> implementation.</li> * <li><a href="#filter">2. Filter Example</a> - using * {@link FileFilter}(s) with <code>DirectoryWalker</code>.</li> * <li><a href="#cancel">3. Cancellation</a> - how to implement cancellation * behaviour.</li> * </ul> * * <a name="example"></a> * <h3>1. Example Implementation</h3> * * There are many possible extensions, for example, to delete all * files and '.svn' directories, and return a list of deleted files: * <pre> * public class FileCleaner extends DirectoryWalker { * * public FileCleaner() { * super(); * } * * public List clean(File startDirectory) { * List results = new ArrayList(); * walk(startDirectory, results); * return results; * } * * protected boolean handleDirectory(File directory, int depth, Collection results) { * // delete svn directories and then skip * if (".svn".equals(directory.getName())) { * directory.delete(); * return false; * } else { * return true; * } * * } * * protected void handleFile(File file, int depth, Collection results) { * // delete file and add to list of deleted * file.delete(); * results.add(file); * } * } * </pre> * * <a name="filter"></a> * <h3>2. Filter Example</h3> * * Choosing which directories and files to process can be a key aspect * of using this class. This information can be setup in three ways, * via three different constructors. * <p> * The first option is to visit all directories and files. * This is achieved via the no-args constructor. * <p> * The second constructor option is to supply a single {@link FileFilter} * that describes the files and directories to visit. Care must be taken * with this option as the same filter is used for both directories * and files. * <p> * For example, if you wanted all directories which are not hidden * and files which end in ".txt": * <pre> * public class FooDirectoryWalker extends DirectoryWalker { * public FooDirectoryWalker(FileFilter filter) { * super(filter, -1); * } * } * * // Build up the filters and create the walker * // Create a filter for Non-hidden directories * IOFileFilter fooDirFilter = * FileFilterUtils.andFileFilter(FileFilterUtils.directoryFileFilter, * HiddenFileFilter.VISIBLE); * * // Create a filter for Files ending in ".txt" * IOFileFilter fooFileFilter = * FileFilterUtils.andFileFilter(FileFilterUtils.fileFileFilter, * FileFilterUtils.suffixFileFilter(".txt")); * * // Combine the directory and file filters using an OR condition * java.io.FileFilter fooFilter = * FileFilterUtils.orFileFilter(fooDirFilter, fooFileFilter); * * // Use the filter to construct a DirectoryWalker implementation * FooDirectoryWalker walker = new FooDirectoryWalker(fooFilter); * </pre> * <p> * The third constructor option is to specify separate filters, one for * directories and one for files. These are combined internally to form * the correct <code>FileFilter</code>, something which is very easy to * get wrong when attempted manually, particularly when trying to * express constructs like 'any file in directories named docs'. * <p> * For example, if you wanted all directories which are not hidden * and files which end in ".txt": * <pre> * public class FooDirectoryWalker extends DirectoryWalker { * public FooDirectoryWalker(IOFileFilter dirFilter, IOFileFilter fileFilter) { * super(dirFilter, fileFilter, -1); * } * } * * // Use the filters to construct the walker * FooDirectoryWalker walker = new FooDirectoryWalker( * HiddenFileFilter.VISIBLE, * FileFilterUtils.suffixFileFilter(".txt"), * ); * </pre> * This is much simpler than the previous example, and is why it is the preferred * option for filtering. * * <a name="cancel"></a> * <h3>3. Cancellation</h3> * * The DirectoryWalker contains some of the logic required for cancel processing. * Subclasses must complete the implementation. * <p> * What <code>DirectoryWalker</code> does provide for cancellation is: * <ul> * <li>{@link CancelException} which can be thrown in any of the * <i>lifecycle</i> methods to stop processing.</li> * <li>The <code>walk()</code> method traps thrown {@link CancelException} * and calls the <code>handleCancelled()</code> method, providing * a place for custom cancel processing.</li> * </ul> * <p> * Implementations need to provide: * <ul> * <li>The decision logic on whether to cancel processing or not.</li> * <li>Constructing and throwing a {@link CancelException}.</li> * <li>Custom cancel processing in the <code>handleCancelled()</code> method. * </ul> * <p> * Two possible scenarios are envisaged for cancellation: * <ul> * <li><a href="#external">3.1 External / Mult-threaded</a> - cancellation being * decided/initiated by an external process.</li> * <li><a href="#internal">3.2 Internal</a> - cancellation being decided/initiated * from within a DirectoryWalker implementation.</li> * </ul> * <p> * The following sections provide example implementations for these two different * scenarios. * * <a name="external"></a> * <h4>3.1 External / Multi-threaded</h4> * * This example provides a public <code>cancel()</code> method that can be * called by another thread to stop the processing. A typical example use-case * would be a cancel button on a GUI. Calling this method sets a * <a href="http://java.sun.com/docs/books/jls/second_edition/html/classes.doc.html#36930"> * volatile</a> flag to ensure it will work properly in a multi-threaded environment. * The flag is returned by the <code>handleIsCancelled()</code> method, which * will cause the walk to stop immediately. The <code>handleCancelled()</code> * method will be the next, and last, callback method received once cancellation * has occurred. * * <pre> * public class FooDirectoryWalker extends DirectoryWalker { * * private volatile boolean cancelled = false; * * public void cancel() { * cancelled = true; * } * * private void handleIsCancelled(File file, int depth, Collection results) { * return cancelled; * } * * protected void handleCancelled(File startDirectory, Collection results, CancelException cancel) { * // implement processing required when a cancellation occurs * } * } * </pre> * * <a name="internal"></a> * <h4>3.2 Internal</h4> * * This shows an example of how internal cancellation processing could be implemented. * <b>Note</b> the decision logic and throwing a {@link CancelException} could be implemented * in any of the <i>lifecycle</i> methods. * * <pre> * public class BarDirectoryWalker extends DirectoryWalker { * * protected boolean handleDirectory(File directory, int depth, Collection results) throws IOException { * // cancel if hidden directory * if (directory.isHidden()) { * throw new CancelException(file, depth); * } * return true; * } * * protected void handleFile(File file, int depth, Collection results) throws IOException { * // cancel if read-only file * if (!file.canWrite()) { * throw new CancelException(file, depth); * } * results.add(file); * } * * protected void handleCancelled(File startDirectory, Collection results, CancelException cancel) { * // implement processing required when a cancellation occurs * } * } * </pre> * * @since Commons IO 1.3 * @version $Revision: 424748 $ */ public abstract class DirectoryWalker { /** * The file filter to use to filter files and directories. */ private final FileFilter filter; /** * The limit on the directory depth to walk. */ private final int depthLimit; /** * Construct an instance with no filtering and unlimited <i>depth</i>. */ protected DirectoryWalker() { this(null, -1); } /** * Construct an instance with a filter and limit the <i>depth</i> navigated to. * <p> * The filter controls which files and directories will be navigated to as * part of the walk. The {@link FileFilterUtils} class is useful for combining * various filters together. A <code>null</code> filter means that no * filtering should occur and all files and directories will be visited. * * @param filter the filter to apply, null means visit all files * @param depthLimit controls how <i>deep</i> the hierarchy is * navigated to (less than 0 means unlimited) */ protected DirectoryWalker(FileFilter filter, int depthLimit) { this.filter = filter; this.depthLimit = depthLimit; } /** * Construct an instance with a directory and a file filter and an optional * limit on the <i>depth</i> navigated to. * <p> * The filters control which files and directories will be navigated to as part * of the walk. This constructor uses {@link FileFilterUtils#makeDirectoryOnly(IOFileFilter)} * and {@link FileFilterUtils#makeFileOnly(IOFileFilter)} internally to combine the filters. * A <code>null</code> filter means that no filtering should occur. * * @param directoryFilter the filter to apply to directories, null means visit all directories * @param fileFilter the filter to apply to files, null means visit all files * @param depthLimit controls how <i>deep</i> the hierarchy is * navigated to (less than 0 means unlimited) */ protected DirectoryWalker(IOFileFilter directoryFilter, IOFileFilter fileFilter, int depthLimit) { if (directoryFilter == null && fileFilter == null) { this.filter = null; } else { directoryFilter = (directoryFilter != null ? directoryFilter : TrueFileFilter.TRUE); fileFilter = (fileFilter != null ? fileFilter : TrueFileFilter.TRUE); directoryFilter = FileFilterUtils.makeDirectoryOnly(directoryFilter); fileFilter = FileFilterUtils.makeFileOnly(fileFilter); this.filter = FileFilterUtils.orFileFilter(directoryFilter, fileFilter); } this.depthLimit = depthLimit; } //----------------------------------------------------------------------- /** * Internal method that walks the directory hierarchy in a depth-first manner. * <p> * Users of this class do not need to call this method. This method will * be called automatically by another (public) method on the specific subclass. * <p> * Writers of subclasses should call this method to start the directory walk. * Once called, this method will emit events as it walks the hierarchy. * The event methods have the prefix <code>handle</code>. * * @param startDirectory the directory to start from, not null * @param results the collection of result objects, may be updated * @throws NullPointerException if the start directory is null * @throws IOException if an I/O Error occurs */ protected final void walk(File startDirectory, Collection results) throws IOException { if (startDirectory == null) { throw new NullPointerException("Start Directory is null"); } try { handleStart(startDirectory, results); walk(startDirectory, 0, results); handleEnd(results); } catch(CancelException cancel) { handleCancelled(startDirectory, results, cancel); } } /** * Main recursive method to examine the directory hierarchy. * * @param directory the directory to examine, not null * @param depth the directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ private void walk(File directory, int depth, Collection results) throws IOException { checkIfCancelled(directory, depth, results); if (handleDirectory(directory, depth, results)) { handleDirectoryStart(directory, depth, results); int childDepth = depth + 1; if (depthLimit < 0 || childDepth <= depthLimit) { checkIfCancelled(directory, depth, results); File[] childFiles = (filter == null ? directory.listFiles() : directory.listFiles(filter)); if (childFiles == null) { handleRestricted(directory, childDepth, results); } else { for (int i = 0; i < childFiles.length; i++) { File childFile = childFiles[i]; if (childFile.isDirectory()) { walk(childFile, childDepth, results); } else { checkIfCancelled(childFile, childDepth, results); handleFile(childFile, childDepth, results); checkIfCancelled(childFile, childDepth, results); } } } } handleDirectoryEnd(directory, depth, results); } checkIfCancelled(directory, depth, results); } //----------------------------------------------------------------------- /** * Checks whether the walk has been cancelled by calling {@link #handleIsCancelled}, * throwing a <code>CancelException</code> if it has. * <p> * Writers of subclasses should not normally call this method as it is called * automatically by the walk of the tree. However, sometimes a single method, * typically {@link #handleFile}, may take a long time to run. In that case, * you may wish to check for cancellation by calling this method. * * @param file the current file being processed * @param depth the current file level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected final void checkIfCancelled(File file, int depth, Collection results) throws IOException { if (handleIsCancelled(file, depth, results)) { throw new CancelException(file, depth); } } /** * Overridable callback method invoked to determine if the entire walk * operation should be immediately cancelled. * <p> * This method should be implemented by those subclasses that want to * provide a public <code>cancel()</code> method available from another * thread. The design pattern for the subclass should be as follows: * <pre> * public class FooDirectoryWalker extends DirectoryWalker { * private volatile boolean cancelled = false; * * public void cancel() { * cancelled = true; * } * private void handleIsCancelled(File file, int depth, Collection results) { * return cancelled; * } * protected void handleCancelled(File startDirectory, * Collection results, CancelException cancel) { * // implement processing required when a cancellation occurs * } * } * </pre> * <p> * If this method returns true, then the directory walk is immediately * cancelled. The next callback method will be {@link #handleCancelled}. * <p> * This implementation returns false. * * @param file the file or directory being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @return true if the walk has been cancelled * @throws IOException if an I/O Error occurs */ protected boolean handleIsCancelled( File file, int depth, Collection results) throws IOException { // do nothing - overridable by subclass return false; // not cancelled } /** * Overridable callback method invoked when the operation is cancelled. * The file being processed when the cancellation occurred can be * obtained from the exception. * <p> * This implementation just re-throws the {@link CancelException}. * * @param startDirectory the directory that the walk started from * @param results the collection of result objects, may be updated * @param cancel the exception throw to cancel further processing * containing details at the point of cancellation. * @throws IOException if an I/O Error occurs */ protected void handleCancelled(File startDirectory, Collection results, CancelException cancel) throws IOException { // re-throw exception - overridable by subclass throw cancel; } //----------------------------------------------------------------------- /** * Overridable callback method invoked at the start of processing. * <p> * This implementation does nothing. * * @param startDirectory the directory to start from * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleStart(File startDirectory, Collection results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked to determine if a directory should be processed. * <p> * This method returns a boolean to indicate if the directory should be examined or not. * If you return false, the entire directory and any subdirectories will be skipped. * Note that this functionality is in addition to the filtering by file filter. * <p> * This implementation does nothing and returns true. * * @param directory the current directory being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @return true to process this directory, false to skip this directory * @throws IOException if an I/O Error occurs */ protected boolean handleDirectory(File directory, int depth, Collection results) throws IOException { // do nothing - overridable by subclass return true; // process directory } /** * Overridable callback method invoked at the start of processing each directory. * <p> * This implementation does nothing. * * @param directory the current directory being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleDirectoryStart(File directory, int depth, Collection results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked for each (non-directory) file. * <p> * This implementation does nothing. * * @param file the current file being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleFile(File file, int depth, Collection results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked for each restricted directory. * <p> * This implementation does nothing. * * @param directory the restricted directory * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleRestricted(File directory, int depth, Collection results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked at the end of processing each directory. * <p> * This implementation does nothing. * * @param directory the directory being processed * @param depth the current directory level (starting directory = 0) * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleDirectoryEnd(File directory, int depth, Collection results) throws IOException { // do nothing - overridable by subclass } /** * Overridable callback method invoked at the end of processing. * <p> * This implementation does nothing. * * @param results the collection of result objects, may be updated * @throws IOException if an I/O Error occurs */ protected void handleEnd(Collection results) throws IOException { // do nothing - overridable by subclass } //----------------------------------------------------------------------- /** * CancelException is thrown in DirectoryWalker to cancel the current * processing. */ public static class CancelException extends IOException { /** Serialization id. */ private static final long serialVersionUID = 1347339620135041008L; /** The file being processed when the exception was thrown. */ private File file; /** The file depth when the exception was thrown. */ private int depth = -1; /** * Constructs a <code>CancelException</code> with * the file and depth when cancellation occurred. * * @param file the file when the operation was cancelled, may be null * @param depth the depth when the operation was cancelled, may be null */ public CancelException(File file, int depth) { this("Operation Cancelled", file, depth); } /** * Constructs a <code>CancelException</code> with * an appropriate message and the file and depth when * cancellation occurred. * * @param message the detail message * @param file the file when the operation was cancelled * @param depth the depth when the operation was cancelled */ public CancelException(String message, File file, int depth) { super(message); this.file = file; this.depth = depth; } /** * Return the file when the operation was cancelled. * * @return the file when the operation was cancelled */ public File getFile() { return file; } /** * Return the depth when the operation was cancelled. * * @return the depth when the operation was cancelled */ public int getDepth() { return depth; } } }
Use generics. git-svn-id: cb61607abf5ac23ab48a85ccf5b9d9390a6837ee@620379 13f79535-47bb-0310-9956-ffa450edef68
src/java/org/apache/commons/io/DirectoryWalker.java
Use generics.
<ide><path>rc/java/org/apache/commons/io/DirectoryWalker.java <ide> * @throws NullPointerException if the start directory is null <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected final void walk(File startDirectory, Collection results) throws IOException { <add> protected final void walk(File startDirectory, Collection<?> results) throws IOException { <ide> if (startDirectory == null) { <ide> throw new NullPointerException("Start Directory is null"); <ide> } <ide> * @param results the collection of result objects, may be updated <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> private void walk(File directory, int depth, Collection results) throws IOException { <add> private void walk(File directory, int depth, Collection<?> results) throws IOException { <ide> checkIfCancelled(directory, depth, results); <ide> if (handleDirectory(directory, depth, results)) { <ide> handleDirectoryStart(directory, depth, results); <ide> * @param results the collection of result objects, may be updated <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected final void checkIfCancelled(File file, int depth, Collection results) throws IOException { <add> protected final void checkIfCancelled(File file, int depth, Collection<?> results) throws IOException { <ide> if (handleIsCancelled(file, depth, results)) { <ide> throw new CancelException(file, depth); <ide> } <ide> * @throws IOException if an I/O Error occurs <ide> */ <ide> protected boolean handleIsCancelled( <del> File file, int depth, Collection results) throws IOException { <add> File file, int depth, Collection<?> results) throws IOException { <ide> // do nothing - overridable by subclass <ide> return false; // not cancelled <ide> } <ide> * containing details at the point of cancellation. <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected void handleCancelled(File startDirectory, Collection results, <add> protected void handleCancelled(File startDirectory, Collection<?> results, <ide> CancelException cancel) throws IOException { <ide> // re-throw exception - overridable by subclass <ide> throw cancel; <ide> * @param results the collection of result objects, may be updated <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected void handleStart(File startDirectory, Collection results) throws IOException { <add> protected void handleStart(File startDirectory, Collection<?> results) throws IOException { <ide> // do nothing - overridable by subclass <ide> } <ide> <ide> * @return true to process this directory, false to skip this directory <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected boolean handleDirectory(File directory, int depth, Collection results) throws IOException { <add> protected boolean handleDirectory(File directory, int depth, Collection<?> results) throws IOException { <ide> // do nothing - overridable by subclass <ide> return true; // process directory <ide> } <ide> * @param results the collection of result objects, may be updated <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected void handleDirectoryStart(File directory, int depth, Collection results) throws IOException { <add> protected void handleDirectoryStart(File directory, int depth, Collection<?> results) throws IOException { <ide> // do nothing - overridable by subclass <ide> } <ide> <ide> * @param results the collection of result objects, may be updated <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected void handleFile(File file, int depth, Collection results) throws IOException { <add> protected void handleFile(File file, int depth, Collection<?> results) throws IOException { <ide> // do nothing - overridable by subclass <ide> } <ide> <ide> * @param results the collection of result objects, may be updated <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected void handleRestricted(File directory, int depth, Collection results) throws IOException { <add> protected void handleRestricted(File directory, int depth, Collection<?> results) throws IOException { <ide> // do nothing - overridable by subclass <ide> } <ide> <ide> * @param results the collection of result objects, may be updated <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected void handleDirectoryEnd(File directory, int depth, Collection results) throws IOException { <add> protected void handleDirectoryEnd(File directory, int depth, Collection<?> results) throws IOException { <ide> // do nothing - overridable by subclass <ide> } <ide> <ide> * @param results the collection of result objects, may be updated <ide> * @throws IOException if an I/O Error occurs <ide> */ <del> protected void handleEnd(Collection results) throws IOException { <add> protected void handleEnd(Collection<?> results) throws IOException { <ide> // do nothing - overridable by subclass <ide> } <ide>
Java
mit
46354d8a931944cab7e5a674f864363d90965d04
0
Implosions/BullyBot
package pugbot.core.commands; import net.dv8tion.jda.core.entities.Member; import net.dv8tion.jda.core.entities.Message; import pugbot.Utils; import pugbot.core.entities.Game; import pugbot.core.entities.QueueManager; import pugbot.core.entities.Game.GameStatus; import pugbot.core.exceptions.BadArgumentsException; import pugbot.core.exceptions.InvalidUseException; public class CmdSwapPlayers extends Command { @Override public Message execCommand(Member caller, String[] args) { if(args.length != 2){ throw new BadArgumentsException(); } QueueManager qm = server.getQueueManager(); Member p1 = server.getMember(args[0]); Member p2 = server.getMember(args[1]); Game game = qm.getPlayersGame(p1); if(game == null){ throw new InvalidUseException(String.format("%s is not in-game", p1.getEffectiveName())); } if(!(game.isCaptain(caller) || server.isAdmin(caller))){ throw new InvalidUseException("You must be a captain or admin to use this command"); } if(game.getStatus() != GameStatus.PLAYING){ throw new InvalidUseException("Game must not be currently picking or finished"); } if(!game.containsPlayer(p2)){ throw new InvalidUseException("Players are not in the same game"); } if(game.isCaptain(p1) || game.isCaptain(p2)){ throw new InvalidUseException("Captains cannot be swapped"); } if(game.getTeam(p1) == game.getTeam(p2)){ throw new InvalidUseException("Players must be on different teams"); } game.swapPlayers(p1, p2); return Utils.createMessage( String.format("`%s has been swapped with %s`", p1.getEffectiveName(), p2.getEffectiveName())); } @Override public boolean isAdminRequired() { return false; } @Override public boolean isGlobalCommand() { return false; } @Override public String getName() { return "SwapPlayers"; } @Override public String getDescription() { return "Swaps a player on each team in a game"; } @Override public String getHelp() { return getBaseCommand() + " <player1> <player2>"; } }
src/main/java/pugbot/core/commands/CmdSwapPlayers.java
package pugbot.core.commands; import net.dv8tion.jda.core.entities.Member; import net.dv8tion.jda.core.entities.Message; import pugbot.Utils; import pugbot.core.entities.Game; import pugbot.core.entities.QueueManager; import pugbot.core.entities.Game.GameStatus; import pugbot.core.exceptions.BadArgumentsException; import pugbot.core.exceptions.InvalidUseException; public class CmdSwapPlayers extends Command { @Override public Message execCommand(Member caller, String[] args) { if(args.length != 2){ throw new BadArgumentsException(); } QueueManager qm = server.getQueueManager(); Member p1 = server.getMember(args[0]); Member p2 = server.getMember(args[1]); Game game = qm.getPlayersGame(p1); if(game == null){ throw new InvalidUseException(String.format("%s is not in-game", p1.getEffectiveName())); } if(!(game.isCaptain(caller) || server.isAdmin(caller))){ throw new InvalidUseException("You must be a captain or admin to use this command"); } if(game.getStatus() != GameStatus.PLAYING){ throw new InvalidUseException("Game must not be currently picking or finished"); } if(!game.containsPlayer(p2)){ throw new InvalidUseException("Players are not in the same game"); } if(game.isCaptain(p1) || game.isCaptain(p2)){ throw new InvalidUseException("Captains cannot be swapped"); } if(game.getTeam(p1) == game.getTeam(p2)){ throw new InvalidUseException("Players must be on different teaams"); } game.swapPlayers(p1, p2); return Utils.createMessage( String.format("`%s has been swapped with %s`", p1.getEffectiveName(), p2.getEffectiveName())); } @Override public boolean isAdminRequired() { return false; } @Override public boolean isGlobalCommand() { return false; } @Override public String getName() { return "SwapPlayers"; } @Override public String getDescription() { return "Swaps a player on each team in a game"; } @Override public String getHelp() { return getBaseCommand() + " <player1> <player2>"; } }
Fix typo
src/main/java/pugbot/core/commands/CmdSwapPlayers.java
Fix typo
<ide><path>rc/main/java/pugbot/core/commands/CmdSwapPlayers.java <ide> } <ide> <ide> if(game.getTeam(p1) == game.getTeam(p2)){ <del> throw new InvalidUseException("Players must be on different teaams"); <add> throw new InvalidUseException("Players must be on different teams"); <ide> } <ide> <ide> game.swapPlayers(p1, p2);
JavaScript
mit
2db79fa9c26493e650c241aa3c23bc82cf881a9b
0
steverydz/build-url
describe('buildUrl', function () { var buildUrl = require('../dist/build-url'); it('should be defined', function () { expect(buildUrl).toBeDefined(); }); it('should return undefined if called with no arguments', function () { expect(buildUrl()).toBe(undefined); }); it('should return a string if called with an argument', function () { expect(typeof(buildUrl('something'))).toEqual('string'); }); it('should append a path when passed as an option', function () { expect(buildUrl('http://example.com', { path: 'about/me' })).toEqual('http://example.com/about/me'); }); it('should append a path when passed an option with a leading "/"', function () { expect(buildUrl('http://example.com', { path: '/about/me' })).toEqual('http://example.com/about/me'); }); it('should append a query string when passed as an option', function () { expect(buildUrl('http://example.com', { queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('http://example.com?foo=bar&bar=baz'); }); it('should transform an array to a comma separated list if part of queryParams', function () { expect(buildUrl('http://example.com', { queryParams: { foo: 'bar', bar: ['one', 'two', 'three'] } })).toEqual('http://example.com?foo=bar&bar=one%2Ctwo%2Cthree'); }); it('should append a fragment identifier when passed as an option', function () { expect(buildUrl('http://example.com', { hash: 'contact' })).toEqual('http://example.com#contact'); }); it('should append a path and a query string when passed as options', function () { expect(buildUrl('http://example.com', { path: 'about/me', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('http://example.com/about/me?foo=bar&bar=baz'); }); it('should append a path and a fragment identifier when passed as options', function () { expect(buildUrl('http://example.com', { path: 'about/me', hash: 'contact' })).toEqual('http://example.com/about/me#contact'); }); it('should append a path, query string and a fragment identifier when passed as options', function () { expect(buildUrl('http://example.com', { path: 'about/me', hash: 'contact', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('http://example.com/about/me?foo=bar&bar=baz#contact'); }); it('should append a query string and a fragment identifier when passed as options', function () { expect(buildUrl('http://example.com', { hash: 'contact', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('http://example.com?foo=bar&bar=baz#contact'); }); it('should return only the query string when URL parameter is an empty string', function () { expect(buildUrl('', { queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('?foo=bar&bar=baz'); }); it('should return only the query string when URL parameter is null', function () { expect(buildUrl(null, { queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('?foo=bar&bar=baz'); }); it('should return only the query string when URL parameter is not present', function () { expect(buildUrl({ queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('?foo=bar&bar=baz'); }); it('should return only the hash when URL parameter is an empty string', function () { expect(buildUrl('', { hash: 'about' })).toEqual('#about'); }); it('should return only the hash when URL parameter is null', function () { expect(buildUrl(null, { hash: 'about' })).toEqual('#about'); }); it('should return only the has when URL parameter is not present', function () { expect(buildUrl({ hash: 'about' })).toEqual('#about'); }); it('should return only the path when URL parameter is an empty string', function () { expect(buildUrl('', { path: 'contact' })).toEqual('/contact'); }); it('should return only the path when URL parameter is null', function () { expect(buildUrl(null, { path: 'contact' })).toEqual('/contact'); }); it('should return only the path when URL parameter is not present', function () { expect(buildUrl({ path: 'contact' })).toEqual('/contact'); }); it('should return only formatted options when URL parameter is an empty string', function () { expect(buildUrl('', { path: 'contact', hash: 'about', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('/contact?foo=bar&bar=baz#about'); }); it('should return only formatted options when URL parameter is null', function () { expect(buildUrl(null, { path: 'contact', hash: 'about', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('/contact?foo=bar&bar=baz#about'); }); it('should return only formatted options when URL parameter is not present', function () { expect(buildUrl({ path: 'contact', hash: 'about', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('/contact?foo=bar&bar=baz#about'); }); it('should not append a queryParam if it\'s undefined', function () { expect(buildUrl('http://example.com', { queryParams: { foo: 'bar', bar: void 0 } })).toEqual('http://example.com?foo=bar'); }); it('should not show a double slash with domain', function () { expect(buildUrl('http://example.com/', { path: '/contact' })).toEqual('http://example.com/contact'); }); it('should encode query parameters', () => { const queryParams = { param0: 'Sanford & Sons', param1: 'O\'Reilly', param2: 'Hawai`i', param3: '"Bull" Connor', param4: 'Lech Wałęsa', param5: 'Herr Müller', }; const url = buildUrl('https://example.com', { queryParams }); const queryParamString = Object .values(queryParams) .map((param, i) => `param${i}=${encodeURIComponent(param)}`) .join('&'); expect(url).toEqual(`https://example.com?${queryParamString}`); }); });
spec/build-url-spec.js
describe('buildUrl', function () { var buildUrl = require('../dist/build-url'); it('should be defined', function () { expect(buildUrl).toBeDefined(); }); it('should return undefined if called with no arguments', function () { expect(buildUrl()).toBe(undefined); }); it('should return a string if called with an argument', function () { expect(typeof(buildUrl('something'))).toEqual('string'); }); it('should append a path when passed as an option', function () { expect(buildUrl('http://example.com', { path: 'about/me' })).toEqual('http://example.com/about/me'); }); it('should append a path when passed an option with a leading "/"', function () { expect(buildUrl('http://example.com', { path: '/about/me' })).toEqual('http://example.com/about/me'); }); it('should append a query string when passed as an option', function () { expect(buildUrl('http://example.com', { queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('http://example.com?foo=bar&bar=baz'); }); it('should transform an array to a comma separated list if part of queryParams', function () { expect(buildUrl('http://example.com', { queryParams: { foo: 'bar', bar: ['one', 'two', 'three'] } })).toEqual('http://example.com?foo=bar&bar=one%2Ctwo%2Cthree'); }); it('should append a fragment identifier when passed as an option', function () { expect(buildUrl('http://example.com', { hash: 'contact' })).toEqual('http://example.com#contact'); }); it('should append a path and a query string when passed as options', function () { expect(buildUrl('http://example.com', { path: 'about/me', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('http://example.com/about/me?foo=bar&bar=baz'); }); it('should append a path and a fragment identifier when passed as options', function () { expect(buildUrl('http://example.com', { path: 'about/me', hash: 'contact' })).toEqual('http://example.com/about/me#contact'); }); it('should append a path, query string and a fragment identifier when passed as options', function () { expect(buildUrl('http://example.com', { path: 'about/me', hash: 'contact', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('http://example.com/about/me?foo=bar&bar=baz#contact'); }); it('should append a query string and a fragment identifier when passed as options', function () { expect(buildUrl('http://example.com', { hash: 'contact', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('http://example.com?foo=bar&bar=baz#contact'); }); it('should return only the query string when URL parameter is an empty string', function () { expect(buildUrl('', { queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('?foo=bar&bar=baz'); }); it('should return only the query string when URL parameter is null', function () { expect(buildUrl(null, { queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('?foo=bar&bar=baz'); }); it('should return only the query string when URL parameter is not present', function () { expect(buildUrl({ queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('?foo=bar&bar=baz'); }); it('should return only the hash when URL parameter is an empty string', function () { expect(buildUrl('', { hash: 'about' })).toEqual('#about'); }); it('should return only the hash when URL parameter is null', function () { expect(buildUrl(null, { hash: 'about' })).toEqual('#about'); }); it('should return only the has when URL parameter is not present', function () { expect(buildUrl({ hash: 'about' })).toEqual('#about'); }); it('should return only the path when URL parameter is an empty string', function () { expect(buildUrl('', { path: 'contact' })).toEqual('/contact'); }); it('should return only the path when URL parameter is null', function () { expect(buildUrl(null, { path: 'contact' })).toEqual('/contact'); }); it('should return only the path when URL parameter is not present', function () { expect(buildUrl({ path: 'contact' })).toEqual('/contact'); }); it('should return only formatted options when URL parameter is an empty string', function () { expect(buildUrl('', { path: 'contact', hash: 'about', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('/contact?foo=bar&bar=baz#about'); }); it('should return only formatted options when URL parameter is null', function () { expect(buildUrl(null, { path: 'contact', hash: 'about', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('/contact?foo=bar&bar=baz#about'); }); it('should return only formatted options when URL parameter is not present', function () { expect(buildUrl({ path: 'contact', hash: 'about', queryParams: { foo: 'bar', bar: 'baz' } })).toEqual('/contact?foo=bar&bar=baz#about'); }); it('should not append a queryParam if it\'s undefined', function () { expect(buildUrl('http://example.com', { queryParams: { foo: 'bar', bar: void 0 } })).toEqual('http://example.com?foo=bar'); }); it('should not show a double slash with domain', function () { expect(buildUrl('http://example.com/', { path: '/contact' })).toEqual('http://example.com/contact'); }); it('should encode query parameters', () => { const queryParams = { param0: 'Sanford & Sons', param1: 'O\'Reilly', param2: 'Hawai`i', param3: '"Bull" Connor', param4: 'Lech Wałęsa', param5: 'Herr Müller', }; const url = buildUrl('https://example.com', { queryParams }); const queryParamString = Object .values(queryParams) .map((param, i) => `param${i}=${encodeURIComponent(param)}`) .join('&') expect(url).toEqual(`https://example.com?${queryParamString}`); }) });
Add missing semicolons
spec/build-url-spec.js
Add missing semicolons
<ide><path>pec/build-url-spec.js <ide> Object <ide> .values(queryParams) <ide> .map((param, i) => `param${i}=${encodeURIComponent(param)}`) <del> .join('&') <add> .join('&'); <ide> <ide> expect(url).toEqual(`https://example.com?${queryParamString}`); <del> }) <add> }); <ide> });
Java
apache-2.0
fdca03aefa1fc72ebf1717593f05e5a5d3e1541b
0
xzel23/meja,xzel23/meja
package com.dua3.meja.text; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Locale; import java.util.function.BiFunction; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.dua3.meja.model.Cell; import com.dua3.meja.model.RefOption; import com.dua3.meja.model.Workbook; import com.dua3.meja.util.MejaHelper; import com.dua3.utility.io.FileSystemView; public class FormatTest { private Workbook workbook; @Before public void initialize() throws Exception { Class<? extends FormatTest> clazz = getClass(); String fileName = clazz.getSimpleName()+".xlsx"; // the FileSystemView is needed in case the test is run from a jar file try (FileSystemView fsv = FileSystemView.create(clazz)) { Path wbPath = fsv.resolve(fileName); workbook = MejaHelper.openWorkbook(wbPath); } catch (IOException e) { // WORKAROUND - If anyone knows a less hackish solution for this, please send a pull request! // When tests are run from within gradle, resources are placed in another location (outside of classpath). // In that case, we try to guess the correct location of the resource files to be able to perform the tests. System.err.println("Resource not found! "+e.getMessage()); String pathStr = clazz.getResource(".").getPath(); // When started from within Bash on windows, a slash is prepended to the path returned by getResource. // We have to remove it again. if (pathStr.matches("^/[a-zA-Z]:/")) { pathStr = pathStr.replaceFirst("^/", ""); } // Change the path so that it points to the probable resource dir. String s = pathStr.replaceAll("/build/classes/java/test/", "/build/resources/test/"); // Then try to load the workbook from there. Path path = Paths.get(s); try (FileSystemView fsv = FileSystemView.create(path)) { Path wbPath = fsv.resolve(fileName); workbook = MejaHelper.openWorkbook(wbPath); } } } @After public void cleanup() throws IOException { workbook.close(); workbook = null; } /** * Test formatting applied when calling Cell.toString(). * <p> * The workboook 'FormatTest.xlsx' is read from the classpath. Each sheet contains for columns used for testing: * <ul> * <li> description of what is being tested in the current row * <li> value with an applied format to be tested * <li> the expected result as a {@code String} * <li> an optional remark - if it contains the text {@literal #IGNORE#}, the row is skipped * </ul> * </p> */ @Test public void testFormat_getAsText() { testHelper((cell,locale) -> cell.getAsText(locale).toString()); } @Test public void testFormat_toString() { testHelper((cell,locale) -> cell.toString(locale)); } /** * Test formatting. * <p> * The workboook 'FormatTest.xlsx' is read from the classpath. Each sheet contains for columns used for testing: * <ul> * <li> A Flag to indicate ignored test cases * <li> description of what is being tested in the current row * <li> the locale to use * <li> value with an applied format to be tested * <li> the expected result as a {@code String} * <li> an optional remark * </ul> * </p> * @param extract a lambda expression maps (Cell, Locale) -> (formatted cell content) */ public void testHelper(BiFunction<Cell,Locale,String> extract) { workbook.sheets() .peek(s -> System.out.format("Processing sheet '%s'%n", s.getSheetName())) .forEach(s -> { s.rows() .skip(1) .forEach(r -> { boolean ignored = r.getCell(0).toString().equalsIgnoreCase("x"); if (ignored) { System.out.format("line %d ignored%n", r.getRowNumber()+1); } else { String description = r.getCell(1).toString(); Cell languageCell = r.getCell(2); String languageTag = languageCell.toString(); Locale locale = Locale.forLanguageTag(languageTag); if (!languageTag.equals(locale.toLanguageTag())) { throw new IllegalStateException("Check language tag in cell "+languageCell.getCellRef(RefOption.WITH_SHEET)); } String actual = extract.apply(r.getCell(3), locale); String expected = r.getCell(4).toString(); Assert.assertEquals(String.format("in line %d: %s - expected '%s', actual '%s'", r.getRowNumber()+1, description, expected, actual), expected, actual); } }); }); } }
meja/src/test/java/com/dua3/meja/text/FormatTest.java
package com.dua3.meja.text; import java.io.IOException; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Locale; import java.util.function.BiFunction; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import com.dua3.meja.model.Cell; import com.dua3.meja.model.RefOption; import com.dua3.meja.model.Workbook; import com.dua3.meja.util.MejaHelper; import com.dua3.utility.io.FileSystemView; public class FormatTest { private Workbook workbook; @Before public void initialize() throws Exception { Class<? extends FormatTest> clazz = getClass(); String fileName = clazz.getSimpleName()+".xlsx"; // the FileSystemView is needed in case the test is run from a jar file try (FileSystemView fsv = FileSystemView.create(clazz)) { Path wbPath = fsv.resolve(fileName); workbook = MejaHelper.openWorkbook(wbPath); } catch (IOException e) { // XXX when run from within gradle, resources are placed in another location System.err.println("Resource not found! "+e.getMessage()); String s = clazz.getResource(".").getPath().replaceFirst("^/", "").replaceAll("/build/classes/java/test/", "/build/resources/test/"); Path path = Paths.get(s); try (FileSystemView fsv = FileSystemView.create(path)) { Path wbPath = fsv.resolve(fileName); workbook = MejaHelper.openWorkbook(wbPath); } } } @After public void cleanup() throws IOException { workbook.close(); workbook = null; } /** * Test formatting applied when calling Cell.toString(). * <p> * The workboook 'FormatTest.xlsx' is read from the classpath. Each sheet contains for columns used for testing: * <ul> * <li> description of what is being tested in the current row * <li> value with an applied format to be tested * <li> the expected result as a {@code String} * <li> an optional remark - if it contains the text {@literal #IGNORE#}, the row is skipped * </ul> * </p> */ @Test public void testFormat_getAsText() { testHelper((cell,locale) -> cell.getAsText(locale).toString()); } @Test public void testFormat_toString() { testHelper((cell,locale) -> cell.toString(locale)); } /** * Test formatting. * <p> * The workboook 'FormatTest.xlsx' is read from the classpath. Each sheet contains for columns used for testing: * <ul> * <li> A Flag to indicate ignored test cases * <li> description of what is being tested in the current row * <li> the locale to use * <li> value with an applied format to be tested * <li> the expected result as a {@code String} * <li> an optional remark * </ul> * </p> * @param extract a lambda expression maps (Cell, Locale) -> (formatted cell content) */ public void testHelper(BiFunction<Cell,Locale,String> extract) { workbook.sheets() .peek(s -> System.out.format("Processing sheet '%s'%n", s.getSheetName())) .forEach(s -> { s.rows() .skip(1) .forEach(r -> { boolean ignored = r.getCell(0).toString().equalsIgnoreCase("x"); if (ignored) { System.out.format("line %d ignored%n", r.getRowNumber()+1); } else { String description = r.getCell(1).toString(); Cell languageCell = r.getCell(2); String languageTag = languageCell.toString(); Locale locale = Locale.forLanguageTag(languageTag); if (!languageTag.equals(locale.toLanguageTag())) { throw new IllegalStateException("Check language tag in cell "+languageCell.getCellRef(RefOption.WITH_SHEET)); } String actual = extract.apply(r.getCell(3), locale); String expected = r.getCell(4).toString(); Assert.assertEquals(String.format("in line %d: %s - expected '%s', actual '%s'", r.getRowNumber()+1, description, expected, actual), expected, actual); } }); }); } }
workaround for the workaround to load test data when started from gradle
meja/src/test/java/com/dua3/meja/text/FormatTest.java
workaround for the workaround to load test data when started from gradle
<ide><path>eja/src/test/java/com/dua3/meja/text/FormatTest.java <ide> Path wbPath = fsv.resolve(fileName); <ide> workbook = MejaHelper.openWorkbook(wbPath); <ide> } catch (IOException e) { <del> // XXX when run from within gradle, resources are placed in another location <add> // WORKAROUND - If anyone knows a less hackish solution for this, please send a pull request! <add> <add> // When tests are run from within gradle, resources are placed in another location (outside of classpath). <add> // In that case, we try to guess the correct location of the resource files to be able to perform the tests. <ide> System.err.println("Resource not found! "+e.getMessage()); <del> String s = clazz.getResource(".").getPath().replaceFirst("^/", "").replaceAll("/build/classes/java/test/", "/build/resources/test/"); <add> String pathStr = clazz.getResource(".").getPath(); <add> <add> // When started from within Bash on windows, a slash is prepended to the path returned by getResource. <add> // We have to remove it again. <add> if (pathStr.matches("^/[a-zA-Z]:/")) { <add> pathStr = pathStr.replaceFirst("^/", ""); <add> } <add> <add> // Change the path so that it points to the probable resource dir. <add> String s = pathStr.replaceAll("/build/classes/java/test/", "/build/resources/test/"); <add> <add> // Then try to load the workbook from there. <ide> Path path = Paths.get(s); <ide> try (FileSystemView fsv = FileSystemView.create(path)) { <ide> Path wbPath = fsv.resolve(fileName);
Java
apache-2.0
6ea09922139e2284b6cac92e1c95b31699ef5248
0
strapdata/elassandra,strapdata/elassandra,strapdata/elassandra,strapdata/elassandra,vroyer/elassandra,vroyer/elassandra,strapdata/elassandra,vroyer/elassandra
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.indexlifecycle; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.indexlifecycle.DeleteAction; import org.elasticsearch.xpack.core.indexlifecycle.ForceMergeAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecyclePolicy; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleSettings; import org.elasticsearch.xpack.core.indexlifecycle.Phase; import org.elasticsearch.xpack.core.indexlifecycle.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.indexlifecycle.action.PutLifecycleAction; import org.junit.Before; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.client.Requests.clusterHealthRequest; import static org.elasticsearch.client.Requests.createIndexRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsNull.nullValue; @ESIntegTestCase.ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class IndexLifecycleInitialisationIT extends ESIntegTestCase { private Settings settings; private LifecyclePolicy lifecyclePolicy; @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal)); settings.put(XPackSettings.INDEX_LIFECYCLE_ENABLED.getKey(), true); settings.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false); settings.put(XPackSettings.SECURITY_ENABLED.getKey(), false); settings.put(XPackSettings.WATCHER_ENABLED.getKey(), false); settings.put(XPackSettings.MONITORING_ENABLED.getKey(), false); settings.put(XPackSettings.GRAPH_ENABLED.getKey(), false); settings.put(XPackSettings.LOGSTASH_ENABLED.getKey(), false); settings.put(LifecycleSettings.LIFECYCLE_POLL_INTERVAL, "1s"); return settings.build(); } @Override protected boolean ignoreExternalCluster() { return true; } @Override protected Settings transportClientSettings() { Settings.Builder settings = Settings.builder().put(super.transportClientSettings()); settings.put(XPackSettings.INDEX_LIFECYCLE_ENABLED.getKey(), true); settings.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false); settings.put(XPackSettings.SECURITY_ENABLED.getKey(), false); settings.put(XPackSettings.WATCHER_ENABLED.getKey(), false); settings.put(XPackSettings.MONITORING_ENABLED.getKey(), false); settings.put(XPackSettings.GRAPH_ENABLED.getKey(), false); settings.put(XPackSettings.LOGSTASH_ENABLED.getKey(), false); return settings.build(); } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class); } @Override protected Collection<Class<? extends Plugin>> transportClientPlugins() { return nodePlugins(); } @Before public void init() { settings = Settings.builder().put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0).put(LifecycleSettings.LIFECYCLE_NAME, "test").build(); Map<String, Phase> phases = new HashMap<>(); Map<String, LifecycleAction> warmPhaseActions = Collections.singletonMap(ForceMergeAction.NAME, new ForceMergeAction(10000, false)); phases.put("warm", new Phase("warm", TimeValue.timeValueSeconds(2), warmPhaseActions)); Map<String, LifecycleAction> deletePhaseActions = Collections.singletonMap(DeleteAction.NAME, new DeleteAction()); phases.put("delete", new Phase("delete", TimeValue.timeValueSeconds(3), deletePhaseActions)); lifecyclePolicy = new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, "test", phases); } public void testSingleNodeCluster() throws Exception { // start master node logger.info("Starting server1"); final String server_1 = internalCluster().startNode(); final String node1 = getLocalNodeId(server_1); logger.info("Creating lifecycle [test_lifecycle]"); PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); PutLifecycleAction.Response putLifecycleResponse = client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get(); assertAcked(putLifecycleResponse); logger.info("Creating index [test]"); CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)) .actionGet(); assertAcked(createIndexResponse); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); RoutingNode routingNodeEntry1 = clusterState.getRoutingNodes().node(node1); assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED), equalTo(1)); assertBusy(() -> { assertEquals(false, client().admin().indices().prepareExists("test").get().isExists()); }); } public void testMasterDedicatedDataDedicated() throws Exception { // start master node logger.info("Starting sever1"); internalCluster().startMasterOnlyNode(); // start data node logger.info("Starting sever1"); final String server_2 = internalCluster().startDataOnlyNode(); final String node2 = getLocalNodeId(server_2); logger.info("Creating lifecycle [test_lifecycle]"); PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); PutLifecycleAction.Response putLifecycleResponse = client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get(); assertAcked(putLifecycleResponse); logger.info("Creating index [test]"); CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)) .actionGet(); assertAcked(createIndexResponse); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); RoutingNode routingNodeEntry1 = clusterState.getRoutingNodes().node(node2); assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED), equalTo(1)); assertBusy(() -> { assertEquals(false, client().admin().indices().prepareExists("test").get().isExists()); }); } // NORELEASE re-enable when force merge action bug is fixed @AwaitsFix(bugUrl = "Fails because force merge action expect shards to be assigned") public void testMasterFailover() throws Exception { // start one server logger.info("Starting sever1"); final String server_1 = internalCluster().startNode(); final String node1 = getLocalNodeId(server_1); logger.info("Creating lifecycle [test_lifecycle]"); PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); PutLifecycleAction.Response putLifecycleResponse = client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get(); assertAcked(putLifecycleResponse); logger.info("Creating index [test]"); CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)) .actionGet(); assertAcked(createIndexResponse); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); RoutingNode routingNodeEntry1 = clusterState.getRoutingNodes().node(node1); assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED), equalTo(1)); logger.info("Starting server2"); // start another server internalCluster().startNode(); // first wait for 2 nodes in the cluster logger.info("Waiting for replicas to be assigned"); ClusterHealthResponse clusterHealth = client().admin().cluster() .health(clusterHealthRequest().waitForGreenStatus().waitForNodes("2")).actionGet(); logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); logger.info("Closing server1"); // kill the first server internalCluster().stopCurrentMasterNode(); assertBusy(() -> { assertEquals(false, client().admin().indices().prepareExists("test").get().isExists()); }); } private String getLocalNodeId(String name) { TransportService transportService = internalCluster().getInstance(TransportService.class, name); String nodeId = transportService.getLocalNode().getId(); assertThat(nodeId, not(nullValue())); return nodeId; } }
x-pack/plugin/index-lifecycle/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleInitialisationIT.java
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.indexlifecycle; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.analysis.common.CommonAnalysisPlugin; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.routing.RoutingNode; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.indexlifecycle.DeleteAction; import org.elasticsearch.xpack.core.indexlifecycle.ForceMergeAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleAction; import org.elasticsearch.xpack.core.indexlifecycle.LifecyclePolicy; import org.elasticsearch.xpack.core.indexlifecycle.LifecycleSettings; import org.elasticsearch.xpack.core.indexlifecycle.Phase; import org.elasticsearch.xpack.core.indexlifecycle.TimeseriesLifecycleType; import org.elasticsearch.xpack.core.indexlifecycle.action.PutLifecycleAction; import org.junit.Before; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.Map; import static org.elasticsearch.client.Requests.clusterHealthRequest; import static org.elasticsearch.client.Requests.createIndexRequest; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.core.IsNull.nullValue; @ESIntegTestCase.ClusterScope(scope = Scope.TEST, numDataNodes = 0) public class IndexLifecycleInitialisationIT extends ESIntegTestCase { private Settings settings; private LifecyclePolicy lifecyclePolicy; @Override protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settings = Settings.builder().put(super.nodeSettings(nodeOrdinal)); settings.put(XPackSettings.INDEX_LIFECYCLE_ENABLED.getKey(), true); settings.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false); settings.put(XPackSettings.SECURITY_ENABLED.getKey(), false); settings.put(XPackSettings.WATCHER_ENABLED.getKey(), false); settings.put(XPackSettings.MONITORING_ENABLED.getKey(), false); settings.put(XPackSettings.GRAPH_ENABLED.getKey(), false); settings.put(XPackSettings.LOGSTASH_ENABLED.getKey(), false); settings.put(LifecycleSettings.LIFECYCLE_POLL_INTERVAL, "1s"); return settings.build(); } @Override protected boolean ignoreExternalCluster() { return true; } @Override protected Settings transportClientSettings() { Settings.Builder settings = Settings.builder().put(super.transportClientSettings()); settings.put(XPackSettings.INDEX_LIFECYCLE_ENABLED.getKey(), true); settings.put(XPackSettings.MACHINE_LEARNING_ENABLED.getKey(), false); settings.put(XPackSettings.SECURITY_ENABLED.getKey(), false); settings.put(XPackSettings.WATCHER_ENABLED.getKey(), false); settings.put(XPackSettings.MONITORING_ENABLED.getKey(), false); settings.put(XPackSettings.GRAPH_ENABLED.getKey(), false); settings.put(XPackSettings.LOGSTASH_ENABLED.getKey(), false); return settings.build(); } @Override protected Collection<Class<? extends Plugin>> nodePlugins() { return Arrays.asList(LocalStateCompositeXPackPlugin.class, IndexLifecycle.class); } @Override protected Collection<Class<? extends Plugin>> transportClientPlugins() { return nodePlugins(); } @Before public void init() { settings = Settings.builder().put(indexSettings()).put(SETTING_NUMBER_OF_SHARDS, 1) .put(SETTING_NUMBER_OF_REPLICAS, 0).put(LifecycleSettings.LIFECYCLE_NAME, "test").build(); Map<String, Phase> phases = new HashMap<>(); Map<String, LifecycleAction> warmPhaseActions = Collections.singletonMap(ForceMergeAction.NAME, new ForceMergeAction(10000)); phases.put("warm", new Phase("warm", TimeValue.timeValueSeconds(2), warmPhaseActions)); Map<String, LifecycleAction> deletePhaseActions = Collections.singletonMap(DeleteAction.NAME, new DeleteAction()); phases.put("delete", new Phase("delete", TimeValue.timeValueSeconds(3), deletePhaseActions)); lifecyclePolicy = new LifecyclePolicy(TimeseriesLifecycleType.INSTANCE, "test", phases); } public void testSingleNodeCluster() throws Exception { // start master node logger.info("Starting server1"); final String server_1 = internalCluster().startNode(); final String node1 = getLocalNodeId(server_1); logger.info("Creating lifecycle [test_lifecycle]"); PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); PutLifecycleAction.Response putLifecycleResponse = client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get(); assertAcked(putLifecycleResponse); logger.info("Creating index [test]"); CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)) .actionGet(); assertAcked(createIndexResponse); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); RoutingNode routingNodeEntry1 = clusterState.getRoutingNodes().node(node1); assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED), equalTo(1)); assertBusy(() -> { assertEquals(false, client().admin().indices().prepareExists("test").get().isExists()); }); } public void testMasterDedicatedDataDedicated() throws Exception { // start master node logger.info("Starting sever1"); internalCluster().startMasterOnlyNode(); // start data node logger.info("Starting sever1"); final String server_2 = internalCluster().startDataOnlyNode(); final String node2 = getLocalNodeId(server_2); logger.info("Creating lifecycle [test_lifecycle]"); PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); PutLifecycleAction.Response putLifecycleResponse = client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get(); assertAcked(putLifecycleResponse); logger.info("Creating index [test]"); CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)) .actionGet(); assertAcked(createIndexResponse); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); RoutingNode routingNodeEntry1 = clusterState.getRoutingNodes().node(node2); assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED), equalTo(1)); assertBusy(() -> { assertEquals(false, client().admin().indices().prepareExists("test").get().isExists()); }); } // NORELEASE re-enable when force merge action bug is fixed @AwaitsFix(bugUrl = "Fails because force merge action expect shards to be assigned") public void testMasterFailover() throws Exception { // start one server logger.info("Starting sever1"); final String server_1 = internalCluster().startNode(); final String node1 = getLocalNodeId(server_1); logger.info("Creating lifecycle [test_lifecycle]"); PutLifecycleAction.Request putLifecycleRequest = new PutLifecycleAction.Request(lifecyclePolicy); PutLifecycleAction.Response putLifecycleResponse = client().execute(PutLifecycleAction.INSTANCE, putLifecycleRequest).get(); assertAcked(putLifecycleResponse); logger.info("Creating index [test]"); CreateIndexResponse createIndexResponse = client().admin().indices().create(createIndexRequest("test").settings(settings)) .actionGet(); assertAcked(createIndexResponse); ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); RoutingNode routingNodeEntry1 = clusterState.getRoutingNodes().node(node1); assertThat(routingNodeEntry1.numberOfShardsWithState(STARTED), equalTo(1)); logger.info("Starting server2"); // start another server internalCluster().startNode(); // first wait for 2 nodes in the cluster logger.info("Waiting for replicas to be assigned"); ClusterHealthResponse clusterHealth = client().admin().cluster() .health(clusterHealthRequest().waitForGreenStatus().waitForNodes("2")).actionGet(); logger.info("Done Cluster Health, status {}", clusterHealth.getStatus()); assertThat(clusterHealth.isTimedOut(), equalTo(false)); assertThat(clusterHealth.getStatus(), equalTo(ClusterHealthStatus.GREEN)); logger.info("Closing server1"); // kill the first server internalCluster().stopCurrentMasterNode(); assertBusy(() -> { assertEquals(false, client().admin().indices().prepareExists("test").get().isExists()); }); } private String getLocalNodeId(String name) { TransportService transportService = internalCluster().getInstance(TransportService.class, name); String nodeId = transportService.getLocalNode().getId(); assertThat(nodeId, not(nullValue())); return nodeId; } }
Fixes compilation issue in test
x-pack/plugin/index-lifecycle/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleInitialisationIT.java
Fixes compilation issue in test
<ide><path>-pack/plugin/index-lifecycle/src/test/java/org/elasticsearch/xpack/indexlifecycle/IndexLifecycleInitialisationIT.java <ide> <ide> import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; <ide> import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; <del>import org.elasticsearch.analysis.common.CommonAnalysisPlugin; <ide> import org.elasticsearch.cluster.ClusterState; <ide> import org.elasticsearch.cluster.health.ClusterHealthStatus; <ide> import org.elasticsearch.cluster.routing.RoutingNode; <ide> import org.elasticsearch.test.ESIntegTestCase.Scope; <ide> import org.elasticsearch.transport.TransportService; <ide> import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; <del>import org.elasticsearch.xpack.core.XPackClientPlugin; <del>import org.elasticsearch.xpack.core.XPackPlugin; <ide> import org.elasticsearch.xpack.core.XPackSettings; <ide> import org.elasticsearch.xpack.core.indexlifecycle.DeleteAction; <ide> import org.elasticsearch.xpack.core.indexlifecycle.ForceMergeAction; <ide> .put(SETTING_NUMBER_OF_REPLICAS, 0).put(LifecycleSettings.LIFECYCLE_NAME, "test").build(); <ide> Map<String, Phase> phases = new HashMap<>(); <ide> <del> Map<String, LifecycleAction> warmPhaseActions = Collections.singletonMap(ForceMergeAction.NAME, new ForceMergeAction(10000)); <add> Map<String, LifecycleAction> warmPhaseActions = Collections.singletonMap(ForceMergeAction.NAME, new ForceMergeAction(10000, false)); <ide> phases.put("warm", new Phase("warm", TimeValue.timeValueSeconds(2), warmPhaseActions)); <ide> <ide> Map<String, LifecycleAction> deletePhaseActions = Collections.singletonMap(DeleteAction.NAME, new DeleteAction());
Java
bsd-3-clause
d7186625ae45ee07d6f23d2dbc0f9b08cd085e7b
0
joansmith/basex,JensErat/basex,drmacro/basex,vincentml/basex,BaseXdb/basex,vincentml/basex,JensErat/basex,ksclarke/basex,drmacro/basex,joansmith/basex,drmacro/basex,JensErat/basex,ksclarke/basex,deshmnnit04/basex,joansmith/basex,vincentml/basex,ksclarke/basex,BaseXdb/basex,JensErat/basex,ksclarke/basex,drmacro/basex,ksclarke/basex,ksclarke/basex,joansmith/basex,JensErat/basex,ksclarke/basex,joansmith/basex,vincentml/basex,vincentml/basex,dimitarp/basex,BaseXdb/basex,dimitarp/basex,dimitarp/basex,JensErat/basex,dimitarp/basex,deshmnnit04/basex,drmacro/basex,BaseXdb/basex,deshmnnit04/basex,JensErat/basex,deshmnnit04/basex,ksclarke/basex,deshmnnit04/basex,joansmith/basex,dimitarp/basex,ksclarke/basex,deshmnnit04/basex,dimitarp/basex,vincentml/basex,drmacro/basex,vincentml/basex,vincentml/basex,joansmith/basex,dimitarp/basex,drmacro/basex,deshmnnit04/basex,deshmnnit04/basex,joansmith/basex,deshmnnit04/basex,drmacro/basex,deshmnnit04/basex,vincentml/basex,JensErat/basex,drmacro/basex,dimitarp/basex,drmacro/basex,deshmnnit04/basex,joansmith/basex,JensErat/basex,vincentml/basex,ksclarke/basex,BaseXdb/basex,dimitarp/basex,vincentml/basex,vincentml/basex,dimitarp/basex,dimitarp/basex,dimitarp/basex,BaseXdb/basex,BaseXdb/basex,ksclarke/basex,JensErat/basex,joansmith/basex,deshmnnit04/basex,BaseXdb/basex,BaseXdb/basex,drmacro/basex,drmacro/basex,JensErat/basex,joansmith/basex,BaseXdb/basex,BaseXdb/basex,joansmith/basex,JensErat/basex,ksclarke/basex,BaseXdb/basex
package org.basex.core.cmd; import static org.basex.core.Text.*; import java.util.Locale; import org.basex.core.*; import org.basex.util.Util; /** * Evaluates the 'set' command and modifies database properties. * * @author BaseX Team 2005-12, BSD License * @author Christian Gruen */ public final class Set extends AGet { /** * Default constructor. * @param key property * @param value value to set (optional, depending on the property) */ public Set(final Object key, final Object value) { super(User.READ, (key instanceof Object[] ? ((Object[]) key)[0] : key).toString(), value == null ? "" : value.toString()); } @Override protected boolean run() { final String key = args[0].toUpperCase(Locale.ENGLISH); final String val = args[1]; try { String v = prop.set(key, val); if(v == null && !context.client()) { // disallow modification of database path if any database is opened if(key.equals(MainProp.DBPATH[0]) && context.datas.size() > 0) { return error(INVALID_VALUE_X_X, key, val); } v = mprop.set(key, val); } return v == null ? error(prop.unknown(key)) : info(key + COLS + v); } catch(final Exception ex) { Util.debug(ex); return error(INVALID_VALUE_X_X, key, val); } } }
src/main/java/org/basex/core/cmd/Set.java
package org.basex.core.cmd; import static org.basex.core.Text.*; import java.util.Locale; import org.basex.core.*; import org.basex.util.Util; /** * Evaluates the 'set' command and modifies database properties. * * @author BaseX Team 2005-12, BSD License * @author Christian Gruen */ public final class Set extends AGet { /** * Default constructor. * @param key property * @param value value to set (optional, depending on the property) */ public Set(final Object key, final Object value) { super(User.READ, (key instanceof Object[] ? ((Object[]) key)[0] : key).toString(), value == null ? "" : value.toString()); } @Override protected boolean run() { final String key = args[0].toUpperCase(Locale.ENGLISH); final String val = args[1]; try { String v = prop.set(key, val); if(v == null && !context.client()) { // disallow modification of database path if any database is opened if(key.equals(MainProp.DBPATH[0]) && context.datas.size() > 0) { return error(INVALID_VALUE_X_X, key, val); } v = mprop.set(key, val); } return v == null ? error(prop.unknown(key)) : info(key + COLS + v); } catch(final Exception ex) { Util.debug(ex); return error(INVALID_VALUE_X_X, key, val); } } @Override public boolean updating(final Context ctx) { // command may set options that influence other commands return true; } }
[MOD] Commands, GH-458: mark SET command as non-updating
src/main/java/org/basex/core/cmd/Set.java
[MOD] Commands, GH-458: mark SET command as non-updating
<ide><path>rc/main/java/org/basex/core/cmd/Set.java <ide> return error(INVALID_VALUE_X_X, key, val); <ide> } <ide> } <del> <del> @Override <del> public boolean updating(final Context ctx) { <del> // command may set options that influence other commands <del> return true; <del> } <ide> }
Java
apache-2.0
36b6715fadedd4641abd362fba99a4f2a34b69a4
0
nicolas-raoul/apps-android-commons,psh/apps-android-commons,commons-app/apps-android-commons,maskaravivek/apps-android-commons,akaita/apps-android-commons,sandarumk/apps-android-commons,neslihanturan/apps-android-commons,misaochan/apps-android-commons,psh/apps-android-commons,domdomegg/apps-android-commons,sandarumk/apps-android-commons,RSBat/apps-android-commons,nicolas-raoul/apps-android-commons,neslihanturan/apps-android-commons,dbrant/apps-android-commons,akaita/apps-android-commons,domdomegg/apps-android-commons,akaita/apps-android-commons,domdomegg/apps-android-commons,commons-app/apps-android-commons,commons-app/apps-android-commons,whym/apps-android-commons,RSBat/apps-android-commons,misaochan/apps-android-commons,psh/apps-android-commons,dbrant/apps-android-commons,misaochan/apps-android-commons,maskaravivek/apps-android-commons,psh/apps-android-commons,misaochan/apps-android-commons,tobias47n9e/apps-android-commons,whym/apps-android-commons,psh/apps-android-commons,RSBat/apps-android-commons,neslihanturan/apps-android-commons,dbrant/apps-android-commons,neslihanturan/apps-android-commons,domdomegg/apps-android-commons,tobias47n9e/apps-android-commons,commons-app/apps-android-commons,maskaravivek/apps-android-commons,nicolas-raoul/apps-android-commons,misaochan/apps-android-commons,whym/apps-android-commons,maskaravivek/apps-android-commons,tobias47n9e/apps-android-commons,whym/apps-android-commons,commons-app/apps-android-commons,domdomegg/apps-android-commons,dbrant/apps-android-commons,sandarumk/apps-android-commons,neslihanturan/apps-android-commons,maskaravivek/apps-android-commons,nicolas-raoul/apps-android-commons,misaochan/apps-android-commons
package fr.free.nrw.commons.upload; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.Bundle; import android.preference.ListPreference; import android.preference.PreferenceManager; import android.support.v4.app.Fragment; import android.text.Editable; import android.text.TextWatcher; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.Spinner; import android.widget.TextView; import java.util.ArrayList; import java.util.List; import fr.free.nrw.commons.Prefs; import fr.free.nrw.commons.R; import fr.free.nrw.commons.Utils; public class SingleUploadFragment extends Fragment { public interface OnUploadActionInitiated { void uploadActionInitiated(String title, String description); } private EditText titleEdit; private EditText descEdit; private TextView licenseSummaryView; private Spinner licenseSpinner; private OnUploadActionInitiated uploadActionInitiatedHandler; private static final String TAG = SingleUploadFragment.class.getName(); @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.activity_share, menu); if(titleEdit != null) { menu.findItem(R.id.menu_upload_single).setEnabled(titleEdit.getText().length() != 0); } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { //What happens when the 'submit' icon is tapped case R.id.menu_upload_single: String title = titleEdit.getText().toString(); String desc = descEdit.getText().toString(); //Save the title/desc in short-lived cache so next time this fragment is loaded, we can access these SharedPreferences titleDesc = PreferenceManager.getDefaultSharedPreferences(getActivity()); SharedPreferences.Editor editor = titleDesc.edit(); editor.putString("Title", title); editor.putString("Desc", desc); editor.apply(); uploadActionInitiatedHandler.uploadActionInitiated(title, desc); return true; } return super.onOptionsItemSelected(item); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_single_upload, null); titleEdit = (EditText)rootView.findViewById(R.id.titleEdit); descEdit = (EditText)rootView.findViewById(R.id.descEdit); Button titleDescButton = (Button) rootView.findViewById(R.id.titleDescButton); licenseSpinner = (Spinner) rootView.findViewById(R.id.licenseSpinner); licenseSummaryView = (TextView)rootView.findViewById(R.id.share_license_summary); ArrayList<String> licenseItems = new ArrayList<>(); licenseItems.add(getString(R.string.license_name_cc0)); licenseItems.add(getString(R.string.license_name_cc_by)); licenseItems.add(getString(R.string.license_name_cc_by_sa)); licenseItems.add(getString(R.string.license_name_cc_by_four)); licenseItems.add(getString(R.string.license_name_cc_by_sa_four)); final SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getActivity()); final String license = prefs.getString(Prefs.DEFAULT_LICENSE, Prefs.Licenses.CC_BY_SA_3); Log.d("Single Upload fragment", license); ArrayAdapter<String> adapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_dropdown_item, licenseItems); licenseSpinner.setAdapter(adapter); int position = licenseItems.indexOf(getString(Utils.licenseNameFor(license))); Log.d("Single Upload fragment", "Position:"+position+" "+getString(Utils.licenseNameFor(license))); licenseSpinner.setSelection(position); licenseSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { String licenseName = parent.getItemAtPosition(position).toString(); String license = Prefs.Licenses.CC_BY_SA_3; // default value if(getString(R.string.license_name_cc0).equals(licenseName)) { license = Prefs.Licenses.CC0; } else if(getString(R.string.license_name_cc_by).equals(licenseName)) { license = Prefs.Licenses.CC_BY_3; } else if(getString(R.string.license_name_cc_by_sa).equals(licenseName)) { license = Prefs.Licenses.CC_BY_SA_3; } else if(getString(R.string.license_name_cc_by_four).equals(licenseName)) { license = Prefs.Licenses.CC_BY_4; } else if(getString(R.string.license_name_cc_by_sa_four).equals(licenseName)) { license = Prefs.Licenses.CC_BY_SA_4; } setLicenseSummary(license); SharedPreferences.Editor editor = prefs.edit(); editor.putString(Prefs.DEFAULT_LICENSE, license); editor.apply(); } @Override public void onNothingSelected(AdapterView<?> parent) { } }); titleDescButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { //Retrieve last title and desc entered SharedPreferences titleDesc = PreferenceManager.getDefaultSharedPreferences(getActivity()); String title = titleDesc.getString("Title", ""); String desc = titleDesc.getString("Desc", ""); Log.d(TAG, "Title: " + title + ", Desc: " + desc); titleEdit.setText(title); descEdit.setText(desc); } }); TextWatcher uploadEnabler = new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i2, int i3) {} @Override public void afterTextChanged(Editable editable) { if(getActivity() != null) { getActivity().invalidateOptionsMenu(); } } }; titleEdit.addTextChangedListener(uploadEnabler); setLicenseSummary(license); // Open license page on touch licenseSummaryView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { if (motionEvent.getActionMasked() == MotionEvent.ACTION_DOWN) { Intent intent = new Intent(); intent.setAction(Intent.ACTION_VIEW); intent.setData(Uri.parse(Utils.licenseUrlFor(license))); startActivity(intent); return true; } else { return false; } } }); return rootView; } private void setLicenseSummary(String license) { licenseSummaryView.setText(getString(R.string.share_license_summary, getString(Utils.licenseNameFor(license)))); } @Override public void onAttach(Activity activity) { super.onAttach(activity); uploadActionInitiatedHandler = (OnUploadActionInitiated) activity; } @Override public void onStop() { super.onStop(); // FIXME: Stops the keyboard from being shown 'stale' while moving out of this fragment into the next View target = getView().findFocus(); if (target != null) { InputMethodManager imm = (InputMethodManager) target.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(target.getWindowToken(), 0); } } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); } }
app/src/main/java/fr/free/nrw/commons/upload/SingleUploadFragment.java
package fr.free.nrw.commons.upload; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.net.Uri; import android.os.Bundle; import android.preference.ListPreference; import android.preference.PreferenceManager; import android.support.v4.app.Fragment; import android.text.Editable; import android.text.TextWatcher; import android.util.Log; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.inputmethod.InputMethodManager; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.Button; import android.widget.EditText; import android.widget.Spinner; import android.widget.TextView; import java.util.ArrayList; import java.util.List; import fr.free.nrw.commons.Prefs; import fr.free.nrw.commons.R; import fr.free.nrw.commons.Utils; public class SingleUploadFragment extends Fragment { public interface OnUploadActionInitiated { void uploadActionInitiated(String title, String description); } private EditText titleEdit; private EditText descEdit; private TextView licenseSummaryView; private Spinner licenseSpinner; private OnUploadActionInitiated uploadActionInitiatedHandler; private static final String TAG = SingleUploadFragment.class.getName(); @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { inflater.inflate(R.menu.activity_share, menu); if(titleEdit != null) { menu.findItem(R.id.menu_upload_single).setEnabled(titleEdit.getText().length() != 0); } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { //What happens when the 'submit' icon is tapped case R.id.menu_upload_single: String title = titleEdit.getText().toString(); String desc = descEdit.getText().toString(); //Save the title/desc in short-lived cache so next time this fragment is loaded, we can access these SharedPreferences titleDesc = PreferenceManager.getDefaultSharedPreferences(getActivity()); SharedPreferences.Editor editor = titleDesc.edit(); editor.putString("Title", title); editor.putString("Desc", desc); editor.apply(); uploadActionInitiatedHandler.uploadActionInitiated(title, desc); return true; } return super.onOptionsItemSelected(item); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { View rootView = inflater.inflate(R.layout.fragment_single_upload, null); titleEdit = (EditText)rootView.findViewById(R.id.titleEdit); descEdit = (EditText)rootView.findViewById(R.id.descEdit); Button titleDescButton = (Button) rootView.findViewById(R.id.titleDescButton); licenseSpinner = (Spinner) rootView.findViewById(R.id.licenseSpinner); licenseSummaryView = (TextView)rootView.findViewById(R.id.share_license_summary); ArrayList<String> licenseItems = new ArrayList<>(); licenseItems.add(getString(R.string.license_name_cc0)); licenseItems.add(getString(R.string.license_name_cc_by)); licenseItems.add(getString(R.string.license_name_cc_by_sa)); licenseItems.add(getString(R.string.license_name_cc_by_four)); licenseItems.add(getString(R.string.license_name_cc_by_sa_four)); final SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(getActivity()); final String license = prefs.getString(Prefs.DEFAULT_LICENSE, Prefs.Licenses.CC_BY_SA_3); Log.d("Single Upload fragment", license); ArrayAdapter<String> adapter = new ArrayAdapter<>(getActivity(), android.R.layout.simple_spinner_dropdown_item, licenseItems); licenseSpinner.setAdapter(adapter); int position = licenseItems.indexOf(getString(Utils.licenseNameFor(license))); Log.d("Single Upload fragment", "Position:"+position+" "+getString(Utils.licenseNameFor(license))); licenseSpinner.setSelection(position); licenseSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { @Override public void onItemSelected(AdapterView<?> parent, View view, int position, long id) { String licenseName = parent.getItemAtPosition(position).toString(); String license = Prefs.Licenses.CC_BY_SA_3; // default value if(getString(R.string.license_name_cc0).equals(licenseName)) { license = Prefs.Licenses.CC0; } else if(getString(R.string.license_name_cc_by).equals(licenseName)) { license = Prefs.Licenses.CC_BY_3; } else if(getString(R.string.license_name_cc_by_sa).equals(licenseName)) { license = Prefs.Licenses.CC_BY_SA_3; } else if(getString(R.string.license_name_cc_by_four).equals(licenseName)) { license = Prefs.Licenses.CC_BY_4; } else if(getString(R.string.license_name_cc_by_sa_four).equals(licenseName)) { license = Prefs.Licenses.CC_BY_SA_4; } setLicenseSummary(license); SharedPreferences.Editor editor = prefs.edit(); editor.putString(Prefs.DEFAULT_LICENSE, license); editor.commit(); } @Override public void onNothingSelected(AdapterView<?> parent) { } }); titleDescButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { //Retrieve last title and desc entered SharedPreferences titleDesc = PreferenceManager.getDefaultSharedPreferences(getActivity()); String title = titleDesc.getString("Title", ""); String desc = titleDesc.getString("Desc", ""); Log.d(TAG, "Title: " + title + ", Desc: " + desc); titleEdit.setText(title); descEdit.setText(desc); } }); TextWatcher uploadEnabler = new TextWatcher() { @Override public void beforeTextChanged(CharSequence charSequence, int i, int i2, int i3) { } @Override public void onTextChanged(CharSequence charSequence, int i, int i2, int i3) {} @Override public void afterTextChanged(Editable editable) { if(getActivity() != null) { getActivity().invalidateOptionsMenu(); } } }; titleEdit.addTextChangedListener(uploadEnabler); setLicenseSummary(license); // Open license page on touch licenseSummaryView.setOnTouchListener(new View.OnTouchListener() { @Override public boolean onTouch(View view, MotionEvent motionEvent) { if (motionEvent.getActionMasked() == MotionEvent.ACTION_DOWN) { Intent intent = new Intent(); intent.setAction(Intent.ACTION_VIEW); intent.setData(Uri.parse(Utils.licenseUrlFor(license))); startActivity(intent); return true; } else { return false; } } }); return rootView; } private void setLicenseSummary(String license) { licenseSummaryView.setText(getString(R.string.share_license_summary, getString(Utils.licenseNameFor(license)))); } @Override public void onAttach(Activity activity) { super.onAttach(activity); uploadActionInitiatedHandler = (OnUploadActionInitiated) activity; } @Override public void onStop() { super.onStop(); // FIXME: Stops the keyboard from being shown 'stale' while moving out of this fragment into the next View target = getView().findFocus(); if (target != null) { InputMethodManager imm = (InputMethodManager) target.getContext().getSystemService(Context.INPUT_METHOD_SERVICE); imm.hideSoftInputFromWindow(target.getWindowToken(), 0); } } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true); } }
Fix lint issue: Missing commit on Shared prefs editor
app/src/main/java/fr/free/nrw/commons/upload/SingleUploadFragment.java
Fix lint issue: Missing commit on Shared prefs editor
<ide><path>pp/src/main/java/fr/free/nrw/commons/upload/SingleUploadFragment.java <ide> setLicenseSummary(license); <ide> SharedPreferences.Editor editor = prefs.edit(); <ide> editor.putString(Prefs.DEFAULT_LICENSE, license); <del> editor.commit(); <add> editor.apply(); <ide> } <ide> <ide> @Override
Java
apache-2.0
18a746c2a3d878c5b56d94a44e3d7a9f5340fa32
0
DwayneJengSage/Bridge-Exporter-1,Sage-Bionetworks/Bridge-Exporter
package org.sagebionetworks.bridge.exporter.handler; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import com.amazonaws.services.dynamodbv2.document.Item; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import org.sagebionetworks.bridge.exporter.exceptions.BridgeExporterNonRetryableException; import org.sagebionetworks.bridge.exporter.synapse.ColumnDefinition; import org.sagebionetworks.client.exceptions.SynapseException; import org.sagebionetworks.repo.model.table.ColumnModel; import org.sagebionetworks.repo.model.table.ColumnType; import org.sagebionetworks.repo.model.table.TableEntity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sagebionetworks.bridge.exporter.exceptions.BridgeExporterException; import org.sagebionetworks.bridge.exporter.exceptions.SchemaNotFoundException; import org.sagebionetworks.bridge.exporter.metrics.Metrics; import org.sagebionetworks.bridge.exporter.util.BridgeExporterUtil; import org.sagebionetworks.bridge.exporter.worker.ExportSubtask; import org.sagebionetworks.bridge.exporter.worker.ExportTask; import org.sagebionetworks.bridge.exporter.worker.ExportWorkerManager; import org.sagebionetworks.bridge.exporter.worker.TsvInfo; import org.sagebionetworks.bridge.file.FileHelper; import org.sagebionetworks.bridge.exporter.synapse.SynapseHelper; import javax.annotation.Resource; /** * This is a handler who's solely responsible for a single table in Synapse. This handler is assigned a stream of DDB * records to create a TSV, then uploads the TSV to the Synapse table. If the Synapse Table doesn't exist, this handler * will create it. */ public abstract class SynapseExportHandler extends ExportHandler { private static final Logger LOG = LoggerFactory.getLogger(SynapseExportHandler.class); // Package-scoped to be available to unit tests. static List<ColumnModel> COMMON_COLUMN_LIST; private static List<ColumnDefinition> COLUMN_DEFINITION; @Resource(name = "synapseColumnDefinitions") public final void setSynapseColumnDefinitionsAndList(List<ColumnDefinition> synapseColumnDefinitions) { COLUMN_DEFINITION = synapseColumnDefinitions; ImmutableList.Builder<ColumnModel> columnListBuilder = ImmutableList.builder(); ColumnModel recordIdColumn = new ColumnModel(); recordIdColumn.setName("recordId"); recordIdColumn.setColumnType(ColumnType.STRING); recordIdColumn.setMaximumSize(36L); columnListBuilder.add(recordIdColumn); ColumnModel appVersionColumn = new ColumnModel(); appVersionColumn.setName("appVersion"); appVersionColumn.setColumnType(ColumnType.STRING); appVersionColumn.setMaximumSize(48L); columnListBuilder.add(appVersionColumn); ColumnModel phoneInfoColumn = new ColumnModel(); phoneInfoColumn.setName("phoneInfo"); phoneInfoColumn.setColumnType(ColumnType.STRING); phoneInfoColumn.setMaximumSize(48L); columnListBuilder.add(phoneInfoColumn); final List<ColumnModel> tempList = BridgeExporterUtil.convertToColumnList(COLUMN_DEFINITION); columnListBuilder.addAll(tempList); COMMON_COLUMN_LIST = columnListBuilder.build(); } private static final Joiner DATA_GROUP_JOINER = Joiner.on(',').useForNull(""); /** * Given the record (contained in the subtask), serialize the results and write to a TSV. If a TSV hasn't been * created for this handler for the parent task, this will also initialize that TSV. */ @Override public void handle(ExportSubtask subtask) throws BridgeExporterException, IOException, SchemaNotFoundException, SynapseException { String tableKey = getDdbTableKeyValue(); ExportTask task = subtask.getParentTask(); Metrics metrics = task.getMetrics(); String recordId = subtask.getRecordId(); try { // get TSV info (init if necessary) TsvInfo tsvInfo = initTsvForTask(task); tsvInfo.checkInitAndThrow(); // Construct row value map. Merge row values from common columns and getTsvRowValueMap() Map<String, String> rowValueMap = new HashMap<>(); rowValueMap.putAll(getCommonRowValueMap(subtask)); rowValueMap.putAll(getTsvRowValueMap(subtask)); // write to TSV tsvInfo.writeRow(rowValueMap); // add one record into tsv tsvInfo.addRecordId(recordId); metrics.incrementCounter(tableKey + ".lineCount"); } catch (BridgeExporterException | IOException | RuntimeException | SchemaNotFoundException | SynapseException ex) { // Log metrics and rethrow. metrics.incrementCounter(tableKey + ".errorCount"); LOG.error("Error processing record " + recordId + " for table " + tableKey + ": " + ex.getMessage(), ex); throw ex; } } // Gets the TSV for the task, initializing it if it hasn't been created yet. Also initializes the Synapse table if // it hasn't been created. private synchronized TsvInfo initTsvForTask(ExportTask task) { // check if the TSV is already saved in the task TsvInfo savedTsvInfo = getTsvInfoForTask(task); if (savedTsvInfo != null) { return savedTsvInfo; } TsvInfo tsvInfo; try { // get column name list List<String> columnNameList = getColumnNameList(task); // create TSV and writer FileHelper fileHelper = getManager().getFileHelper(); File tsvFile = fileHelper.newFile(task.getTmpDir(), getDdbTableKeyValue() + ".tsv"); PrintWriter tsvWriter = new PrintWriter(fileHelper.getWriter(tsvFile)); // create TSV info tsvInfo = new TsvInfo(columnNameList, tsvFile, tsvWriter); } catch (BridgeExporterException | FileNotFoundException | SchemaNotFoundException | SynapseException ex) { LOG.error("Error initializing TSV for table " + getDdbTableKeyValue() + ": " + ex.getMessage(), ex); tsvInfo = new TsvInfo(ex); } setTsvInfoForTask(task, tsvInfo); return tsvInfo; } // Gets the column name list from Synapse. If the Synapse table doesn't exist, this will create it. This is called // when initializing the TSV for a task. private List<String> getColumnNameList(ExportTask task) throws BridgeExporterException, SchemaNotFoundException, SynapseException { // Construct column definition list. Merge COMMON_COLUMN_LIST with getSynapseTableColumnList. List<ColumnModel> columnDefList = new ArrayList<>(); columnDefList.addAll(COMMON_COLUMN_LIST); columnDefList.addAll(getSynapseTableColumnList(task)); // Create or update table if necessary. String synapseTableId = getManager().getSynapseTableIdFromDdb(task, getDdbTableName(), getDdbTableKeyName(), getDdbTableKeyValue()); if (synapseTableId == null) { createNewTable(task, columnDefList); } else { updateTableIfNeeded(synapseTableId, columnDefList); } // Extract column names from column models List<String> columnNameList = new ArrayList<>(); //noinspection Convert2streamapi for (ColumnModel oneColumnDef : columnDefList) { columnNameList.add(oneColumnDef.getName()); } return columnNameList; } // Helper method to create the new Synapse table. private void createNewTable(ExportTask task, List<ColumnModel> columnDefList) throws BridgeExporterException, SynapseException { ExportWorkerManager manager = getManager(); SynapseHelper synapseHelper = manager.getSynapseHelper(); // Delegate table creation to SynapseHelper. long dataAccessTeamId = manager.getDataAccessTeamIdForStudy(getStudyId()); long principalId = manager.getSynapsePrincipalId(); String projectId = manager.getSynapseProjectIdForStudyAndTask(getStudyId(), task); String tableName = getDdbTableKeyValue(); String synapseTableId = synapseHelper.createTableWithColumnsAndAcls(columnDefList, dataAccessTeamId, principalId, projectId, tableName); // write back to DDB table manager.setSynapseTableIdToDdb(task, getDdbTableName(), getDdbTableKeyName(), getDdbTableKeyValue(), synapseTableId); } // Helper method to detect when a schema changes and updates the Synapse table accordingly. Will reject schema // changes that delete or modify columns. Optimized so if no columns were inserted, it won't modify the table. private void updateTableIfNeeded(String synapseTableId, List<ColumnModel> columnDefList) throws BridgeExporterException, SynapseException { ExportWorkerManager manager = getManager(); SynapseHelper synapseHelper = manager.getSynapseHelper(); // Get existing columns from table. List<ColumnModel> existingColumnList = synapseHelper.getColumnModelsForTableWithRetry(synapseTableId); // Compute the columns that were added, deleted, and kept. Use tree maps so logging will show a stable message. Map<String, ColumnModel> existingColumnsByName = new TreeMap<>(); for (ColumnModel oneExistingColumn : existingColumnList) { existingColumnsByName.put(oneExistingColumn.getName(), oneExistingColumn); } Map<String, ColumnModel> columnDefsByName = new TreeMap<>(); for (ColumnModel oneColumnDef : columnDefList) { columnDefsByName.put(oneColumnDef.getName(), oneColumnDef); } Set<String> addedColumnNameSet = Sets.difference(columnDefsByName.keySet(), existingColumnsByName.keySet()); Set<String> deletedColumnNameSet = Sets.difference(existingColumnsByName.keySet(), columnDefsByName.keySet()); Set<String> keptColumnNameSet = Sets.intersection(existingColumnsByName.keySet(), columnDefsByName.keySet()); // Were columns deleted? If so, log an error and shortcut. (Don't modify the table.) boolean shouldThrow = false; if (!deletedColumnNameSet.isEmpty()) { LOG.error("Table " + getDdbTableKeyValue() + " has deleted columns: " + BridgeExporterUtil.COMMA_SPACE_JOINER.join(deletedColumnNameSet)); shouldThrow = true; } // Similarly, were any columns changed? Set<String> modifiedColumnNameSet = new TreeSet<>(); for (String oneKeptColumnName : keptColumnNameSet) { // Validate that column types are the same. We can't use .equals() because ID is definitely // different. ColumnModel existingColumn = existingColumnsByName.get(oneKeptColumnName); ColumnModel columnDef = columnDefsByName.get(oneKeptColumnName); if (existingColumn.getColumnType() != columnDef.getColumnType()) { modifiedColumnNameSet.add(oneKeptColumnName); } // In very old tables created by a very old version of BridgeEX, some String columns were created with size // 1000 instead of 100. In order tables, they were manually resized to much smaller than 100. In either // case, if the column size is different, we need to set the column def's size to match the existing // column's size so we don't accidentally delete the column. if (!Objects.equals(existingColumn.getMaximumSize(), columnDef.getMaximumSize())) { columnDef.setMaximumSize(existingColumn.getMaximumSize()); } } if (!modifiedColumnNameSet.isEmpty()) { LOG.error("Table " + getDdbTableKeyValue() + " has modified columns: " + BridgeExporterUtil.COMMA_SPACE_JOINER.join(modifiedColumnNameSet)); shouldThrow = true; } if (shouldThrow) { throw new BridgeExporterNonRetryableException("Table has deleted and/or modified columns"); } // Optimization: Were any columns added? if (addedColumnNameSet.isEmpty()) { return; } // Make sure the columns have been created / get column IDs. List<ColumnModel> createdColumnList = synapseHelper.createColumnModelsWithRetry(columnDefList); // Update table. List<String> colIdList = new ArrayList<>(); //noinspection Convert2streamapi for (ColumnModel oneCreatedColumn : createdColumnList) { colIdList.add(oneCreatedColumn.getId()); } TableEntity table = synapseHelper.getTableWithRetry(synapseTableId); table.setColumnIds(colIdList); synapseHelper.updateTableWithRetry(table); } // Helper method to get row values that are common across all Synapse tables and handlers. private Map<String, String> getCommonRowValueMap(ExportSubtask subtask) { ExportTask task = subtask.getParentTask(); Item record = subtask.getOriginalRecord(); String recordId = subtask.getRecordId(); // get phone and app info PhoneAppVersionInfo phoneAppVersionInfo = PhoneAppVersionInfo.fromRecord(record); String appVersion = phoneAppVersionInfo.getAppVersion(); String phoneInfo = phoneAppVersionInfo.getPhoneInfo(); // construct row Map<String, String> rowValueMap = new HashMap<>(); rowValueMap.put("recordId", recordId); rowValueMap.put("appVersion", appVersion); rowValueMap.put("phoneInfo", phoneInfo); rowValueMap.putAll(BridgeExporterUtil.getRowValuesFromRecordBasedOnColumnDefinition(rowValueMap,record, COLUMN_DEFINITION, recordId, task)); return rowValueMap; } /** * This is called at the end of the record stream for a given export task. This will then upload the TSV to * Synapse. */ public void uploadToSynapseForTask(ExportTask task) throws BridgeExporterException, IOException, SynapseException { ExportWorkerManager manager = getManager(); TsvInfo tsvInfo = getTsvInfoForTask(task); if (tsvInfo == null) { // No TSV. This means we never wrote any records. Skip. return; } File tsvFile = tsvInfo.getFile(); tsvInfo.flushAndCloseWriter(); // filter on line count int lineCount = tsvInfo.getLineCount(); if (lineCount > 0) { String projectId = manager.getSynapseProjectIdForStudyAndTask(getStudyId(), task); String synapseTableId = manager.getSynapseTableIdFromDdb(task, getDdbTableName(), getDdbTableKeyName(), getDdbTableKeyValue()); long linesProcessed = manager.getSynapseHelper().uploadTsvFileToTable(projectId, synapseTableId, tsvFile); if (linesProcessed != lineCount) { throw new BridgeExporterException("Wrong number of lines processed importing to table=" + synapseTableId + ", expected=" + lineCount + ", actual=" + linesProcessed); } // call java sdk api to update records' exporter status postProcessTsv(tsvInfo); LOG.info("Done uploading to Synapse for table name=" + getDdbTableKeyValue() + ", id=" + synapseTableId); } // We've successfully processed the file. We can delete the file now. manager.getFileHelper().deleteFile(tsvFile); } /** Table name (excluding prefix) of the DDB table that holds Synapse table IDs. */ protected abstract String getDdbTableName(); /** Hash key name of the DDB table that holds Synapse table IDs. */ protected abstract String getDdbTableKeyName(); /** * Hash key value for the DDB table that holds the Synapse table IDs. Since this uniquely identifies the Synapse * table, and since Synapse table names need to be unique, this is also used as the Synapse table name. */ protected abstract String getDdbTableKeyValue(); /** * List of Synapse table column model objects, to be used to create both the column models and the Synapse table. * This excludes columns common to all Bridge tables defined in COMMON_COLUMN_LIST. */ protected abstract List<ColumnModel> getSynapseTableColumnList(ExportTask task) throws SchemaNotFoundException; /** Get the TSV saved in the task for this handler. */ protected abstract TsvInfo getTsvInfoForTask(ExportTask task); /** Save the TSV into the task for this handler. */ protected abstract void setTsvInfoForTask(ExportTask task, TsvInfo tsvInfo); /** Creates a row values for a single row from the given export task. */ protected abstract Map<String, String> getTsvRowValueMap(ExportSubtask subtask) throws BridgeExporterException, IOException, SchemaNotFoundException, SynapseException; /** * dummy method to implement by healthDataExportHandler to handle update record exporter status * @throws BridgeExporterException */ protected void postProcessTsv(TsvInfo tsvInfo) throws BridgeExporterException { } }
src/main/java/org/sagebionetworks/bridge/exporter/handler/SynapseExportHandler.java
package org.sagebionetworks.bridge.exporter.handler; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.TreeMap; import java.util.TreeSet; import com.amazonaws.services.dynamodbv2.document.Item; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.Sets; import org.sagebionetworks.bridge.exporter.exceptions.BridgeExporterNonRetryableException; import org.sagebionetworks.bridge.exporter.synapse.ColumnDefinition; import org.sagebionetworks.client.exceptions.SynapseException; import org.sagebionetworks.repo.model.table.ColumnModel; import org.sagebionetworks.repo.model.table.ColumnType; import org.sagebionetworks.repo.model.table.TableEntity; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.sagebionetworks.bridge.exporter.exceptions.BridgeExporterException; import org.sagebionetworks.bridge.exporter.exceptions.SchemaNotFoundException; import org.sagebionetworks.bridge.exporter.metrics.Metrics; import org.sagebionetworks.bridge.exporter.util.BridgeExporterUtil; import org.sagebionetworks.bridge.exporter.worker.ExportSubtask; import org.sagebionetworks.bridge.exporter.worker.ExportTask; import org.sagebionetworks.bridge.exporter.worker.ExportWorkerManager; import org.sagebionetworks.bridge.exporter.worker.TsvInfo; import org.sagebionetworks.bridge.file.FileHelper; import org.sagebionetworks.bridge.exporter.synapse.SynapseHelper; import javax.annotation.Resource; /** * This is a handler who's solely responsible for a single table in Synapse. This handler is assigned a stream of DDB * records to create a TSV, then uploads the TSV to the Synapse table. If the Synapse Table doesn't exist, this handler * will create it. */ public abstract class SynapseExportHandler extends ExportHandler { private static final Logger LOG = LoggerFactory.getLogger(SynapseExportHandler.class); // Package-scoped to be available to unit tests. static List<ColumnModel> COMMON_COLUMN_LIST; private static List<ColumnDefinition> COLUMN_DEFINITION; @Resource(name = "synapseColumnDefinitions") public final void setSynapseColumnDefinitionsAndList(List<ColumnDefinition> synapseColumnDefinitions) { COLUMN_DEFINITION = synapseColumnDefinitions; ImmutableList.Builder<ColumnModel> columnListBuilder = ImmutableList.builder(); ColumnModel recordIdColumn = new ColumnModel(); recordIdColumn.setName("recordId"); recordIdColumn.setColumnType(ColumnType.STRING); recordIdColumn.setMaximumSize(36L); columnListBuilder.add(recordIdColumn); ColumnModel appVersionColumn = new ColumnModel(); appVersionColumn.setName("appVersion"); appVersionColumn.setColumnType(ColumnType.STRING); appVersionColumn.setMaximumSize(48L); columnListBuilder.add(appVersionColumn); ColumnModel phoneInfoColumn = new ColumnModel(); phoneInfoColumn.setName("phoneInfo"); phoneInfoColumn.setColumnType(ColumnType.STRING); phoneInfoColumn.setMaximumSize(48L); columnListBuilder.add(phoneInfoColumn); final List<ColumnModel> tempList = BridgeExporterUtil.convertToColumnList(COLUMN_DEFINITION); columnListBuilder.addAll(tempList); COMMON_COLUMN_LIST = columnListBuilder.build(); } // static { // ImmutableList.Builder<ColumnModel> columnListBuilder = ImmutableList.builder(); // // ColumnModel recordIdColumn = new ColumnModel(); // recordIdColumn.setName("recordId"); // recordIdColumn.setColumnType(ColumnType.STRING); // recordIdColumn.setMaximumSize(36L); // columnListBuilder.add(recordIdColumn); // // ColumnModel appVersionColumn = new ColumnModel(); // appVersionColumn.setName("appVersion"); // appVersionColumn.setColumnType(ColumnType.STRING); // appVersionColumn.setMaximumSize(48L); // columnListBuilder.add(appVersionColumn); // // ColumnModel phoneInfoColumn = new ColumnModel(); // phoneInfoColumn.setName("phoneInfo"); // phoneInfoColumn.setColumnType(ColumnType.STRING); // phoneInfoColumn.setMaximumSize(48L); // columnListBuilder.add(phoneInfoColumn); // ColumnModel healthCodeColumn = new ColumnModel(); // healthCodeColumn.setName("healthCode"); // healthCodeColumn.setColumnType(ColumnType.STRING); // healthCodeColumn.setMaximumSize(36L); // columnListBuilder.add(healthCodeColumn); // // ColumnModel externalIdColumn = new ColumnModel(); // externalIdColumn.setName("externalId"); // externalIdColumn.setColumnType(ColumnType.STRING); // externalIdColumn.setMaximumSize(128L); // columnListBuilder.add(externalIdColumn); // // ColumnModel dataGroupsColumn = new ColumnModel(); // dataGroupsColumn.setName("dataGroups"); // dataGroupsColumn.setColumnType(ColumnType.STRING); // dataGroupsColumn.setMaximumSize(100L); // columnListBuilder.add(dataGroupsColumn); // // // NOTE: ColumnType.DATE is actually a timestamp. There is no calendar date type. // ColumnModel uploadDateColumn = new ColumnModel(); // uploadDateColumn.setName("uploadDate"); // uploadDateColumn.setColumnType(ColumnType.STRING); // uploadDateColumn.setMaximumSize(10L); // columnListBuilder.add(uploadDateColumn); // // ColumnModel createdOnColumn = new ColumnModel(); // createdOnColumn.setName("createdOn"); // createdOnColumn.setColumnType(ColumnType.DATE); // columnListBuilder.add(createdOnColumn); // // ColumnModel userSharingScopeColumn = new ColumnModel(); // userSharingScopeColumn.setName("userSharingScope"); // userSharingScopeColumn.setColumnType(ColumnType.STRING); // userSharingScopeColumn.setMaximumSize(48L); // columnListBuilder.add(userSharingScopeColumn); // final List<ColumnModel> tempList = BridgeExporterUtil.convertToColumnList(COLUMN_DEFINITION); // columnListBuilder.addAll(tempList); // // COMMON_COLUMN_LIST = columnListBuilder.build(); // } private static final Joiner DATA_GROUP_JOINER = Joiner.on(',').useForNull(""); /** * Given the record (contained in the subtask), serialize the results and write to a TSV. If a TSV hasn't been * created for this handler for the parent task, this will also initialize that TSV. */ @Override public void handle(ExportSubtask subtask) throws BridgeExporterException, IOException, SchemaNotFoundException, SynapseException { String tableKey = getDdbTableKeyValue(); ExportTask task = subtask.getParentTask(); Metrics metrics = task.getMetrics(); String recordId = subtask.getRecordId(); try { // get TSV info (init if necessary) TsvInfo tsvInfo = initTsvForTask(task); tsvInfo.checkInitAndThrow(); // Construct row value map. Merge row values from common columns and getTsvRowValueMap() Map<String, String> rowValueMap = new HashMap<>(); rowValueMap.putAll(getCommonRowValueMap(subtask)); rowValueMap.putAll(getTsvRowValueMap(subtask)); // write to TSV tsvInfo.writeRow(rowValueMap); // add one record into tsv tsvInfo.addRecordId(recordId); metrics.incrementCounter(tableKey + ".lineCount"); } catch (BridgeExporterException | IOException | RuntimeException | SchemaNotFoundException | SynapseException ex) { // Log metrics and rethrow. metrics.incrementCounter(tableKey + ".errorCount"); LOG.error("Error processing record " + recordId + " for table " + tableKey + ": " + ex.getMessage(), ex); throw ex; } } // Gets the TSV for the task, initializing it if it hasn't been created yet. Also initializes the Synapse table if // it hasn't been created. private synchronized TsvInfo initTsvForTask(ExportTask task) { // check if the TSV is already saved in the task TsvInfo savedTsvInfo = getTsvInfoForTask(task); if (savedTsvInfo != null) { return savedTsvInfo; } TsvInfo tsvInfo; try { // get column name list List<String> columnNameList = getColumnNameList(task); // create TSV and writer FileHelper fileHelper = getManager().getFileHelper(); File tsvFile = fileHelper.newFile(task.getTmpDir(), getDdbTableKeyValue() + ".tsv"); PrintWriter tsvWriter = new PrintWriter(fileHelper.getWriter(tsvFile)); // create TSV info tsvInfo = new TsvInfo(columnNameList, tsvFile, tsvWriter); } catch (BridgeExporterException | FileNotFoundException | SchemaNotFoundException | SynapseException ex) { LOG.error("Error initializing TSV for table " + getDdbTableKeyValue() + ": " + ex.getMessage(), ex); tsvInfo = new TsvInfo(ex); } setTsvInfoForTask(task, tsvInfo); return tsvInfo; } // Gets the column name list from Synapse. If the Synapse table doesn't exist, this will create it. This is called // when initializing the TSV for a task. private List<String> getColumnNameList(ExportTask task) throws BridgeExporterException, SchemaNotFoundException, SynapseException { // Construct column definition list. Merge COMMON_COLUMN_LIST with getSynapseTableColumnList. List<ColumnModel> columnDefList = new ArrayList<>(); columnDefList.addAll(COMMON_COLUMN_LIST); columnDefList.addAll(getSynapseTableColumnList(task)); // Create or update table if necessary. String synapseTableId = getManager().getSynapseTableIdFromDdb(task, getDdbTableName(), getDdbTableKeyName(), getDdbTableKeyValue()); if (synapseTableId == null) { createNewTable(task, columnDefList); } else { updateTableIfNeeded(synapseTableId, columnDefList); } // Extract column names from column models List<String> columnNameList = new ArrayList<>(); //noinspection Convert2streamapi for (ColumnModel oneColumnDef : columnDefList) { columnNameList.add(oneColumnDef.getName()); } return columnNameList; } // Helper method to create the new Synapse table. private void createNewTable(ExportTask task, List<ColumnModel> columnDefList) throws BridgeExporterException, SynapseException { ExportWorkerManager manager = getManager(); SynapseHelper synapseHelper = manager.getSynapseHelper(); // Delegate table creation to SynapseHelper. long dataAccessTeamId = manager.getDataAccessTeamIdForStudy(getStudyId()); long principalId = manager.getSynapsePrincipalId(); String projectId = manager.getSynapseProjectIdForStudyAndTask(getStudyId(), task); String tableName = getDdbTableKeyValue(); String synapseTableId = synapseHelper.createTableWithColumnsAndAcls(columnDefList, dataAccessTeamId, principalId, projectId, tableName); // write back to DDB table manager.setSynapseTableIdToDdb(task, getDdbTableName(), getDdbTableKeyName(), getDdbTableKeyValue(), synapseTableId); } // Helper method to detect when a schema changes and updates the Synapse table accordingly. Will reject schema // changes that delete or modify columns. Optimized so if no columns were inserted, it won't modify the table. private void updateTableIfNeeded(String synapseTableId, List<ColumnModel> columnDefList) throws BridgeExporterException, SynapseException { ExportWorkerManager manager = getManager(); SynapseHelper synapseHelper = manager.getSynapseHelper(); // Get existing columns from table. List<ColumnModel> existingColumnList = synapseHelper.getColumnModelsForTableWithRetry(synapseTableId); // Compute the columns that were added, deleted, and kept. Use tree maps so logging will show a stable message. Map<String, ColumnModel> existingColumnsByName = new TreeMap<>(); for (ColumnModel oneExistingColumn : existingColumnList) { existingColumnsByName.put(oneExistingColumn.getName(), oneExistingColumn); } Map<String, ColumnModel> columnDefsByName = new TreeMap<>(); for (ColumnModel oneColumnDef : columnDefList) { columnDefsByName.put(oneColumnDef.getName(), oneColumnDef); } Set<String> addedColumnNameSet = Sets.difference(columnDefsByName.keySet(), existingColumnsByName.keySet()); Set<String> deletedColumnNameSet = Sets.difference(existingColumnsByName.keySet(), columnDefsByName.keySet()); Set<String> keptColumnNameSet = Sets.intersection(existingColumnsByName.keySet(), columnDefsByName.keySet()); // Were columns deleted? If so, log an error and shortcut. (Don't modify the table.) boolean shouldThrow = false; if (!deletedColumnNameSet.isEmpty()) { LOG.error("Table " + getDdbTableKeyValue() + " has deleted columns: " + BridgeExporterUtil.COMMA_SPACE_JOINER.join(deletedColumnNameSet)); shouldThrow = true; } // Similarly, were any columns changed? Set<String> modifiedColumnNameSet = new TreeSet<>(); for (String oneKeptColumnName : keptColumnNameSet) { // Validate that column types are the same. We can't use .equals() because ID is definitely // different. ColumnModel existingColumn = existingColumnsByName.get(oneKeptColumnName); ColumnModel columnDef = columnDefsByName.get(oneKeptColumnName); if (existingColumn.getColumnType() != columnDef.getColumnType()) { modifiedColumnNameSet.add(oneKeptColumnName); } // In very old tables created by a very old version of BridgeEX, some String columns were created with size // 1000 instead of 100. In order tables, they were manually resized to much smaller than 100. In either // case, if the column size is different, we need to set the column def's size to match the existing // column's size so we don't accidentally delete the column. if (!Objects.equals(existingColumn.getMaximumSize(), columnDef.getMaximumSize())) { columnDef.setMaximumSize(existingColumn.getMaximumSize()); } } if (!modifiedColumnNameSet.isEmpty()) { LOG.error("Table " + getDdbTableKeyValue() + " has modified columns: " + BridgeExporterUtil.COMMA_SPACE_JOINER.join(modifiedColumnNameSet)); shouldThrow = true; } if (shouldThrow) { throw new BridgeExporterNonRetryableException("Table has deleted and/or modified columns"); } // Optimization: Were any columns added? if (addedColumnNameSet.isEmpty()) { return; } // Make sure the columns have been created / get column IDs. List<ColumnModel> createdColumnList = synapseHelper.createColumnModelsWithRetry(columnDefList); // Update table. List<String> colIdList = new ArrayList<>(); //noinspection Convert2streamapi for (ColumnModel oneCreatedColumn : createdColumnList) { colIdList.add(oneCreatedColumn.getId()); } TableEntity table = synapseHelper.getTableWithRetry(synapseTableId); table.setColumnIds(colIdList); synapseHelper.updateTableWithRetry(table); } // Helper method to get row values that are common across all Synapse tables and handlers. private Map<String, String> getCommonRowValueMap(ExportSubtask subtask) { ExportTask task = subtask.getParentTask(); Item record = subtask.getOriginalRecord(); String recordId = subtask.getRecordId(); // get phone and app info PhoneAppVersionInfo phoneAppVersionInfo = PhoneAppVersionInfo.fromRecord(record); String appVersion = phoneAppVersionInfo.getAppVersion(); String phoneInfo = phoneAppVersionInfo.getPhoneInfo(); // construct row Map<String, String> rowValueMap = new HashMap<>(); rowValueMap.put("recordId", recordId); rowValueMap.put("appVersion", appVersion); rowValueMap.put("phoneInfo", phoneInfo); rowValueMap.putAll(BridgeExporterUtil.getRowValuesFromRecordBasedOnColumnDefinition(rowValueMap,record, COLUMN_DEFINITION, recordId, task)); return rowValueMap; } /** * This is called at the end of the record stream for a given export task. This will then upload the TSV to * Synapse. */ public void uploadToSynapseForTask(ExportTask task) throws BridgeExporterException, IOException, SynapseException { ExportWorkerManager manager = getManager(); TsvInfo tsvInfo = getTsvInfoForTask(task); if (tsvInfo == null) { // No TSV. This means we never wrote any records. Skip. return; } File tsvFile = tsvInfo.getFile(); tsvInfo.flushAndCloseWriter(); // filter on line count int lineCount = tsvInfo.getLineCount(); if (lineCount > 0) { String projectId = manager.getSynapseProjectIdForStudyAndTask(getStudyId(), task); String synapseTableId = manager.getSynapseTableIdFromDdb(task, getDdbTableName(), getDdbTableKeyName(), getDdbTableKeyValue()); long linesProcessed = manager.getSynapseHelper().uploadTsvFileToTable(projectId, synapseTableId, tsvFile); if (linesProcessed != lineCount) { throw new BridgeExporterException("Wrong number of lines processed importing to table=" + synapseTableId + ", expected=" + lineCount + ", actual=" + linesProcessed); } // call java sdk api to update records' exporter status postProcessTsv(tsvInfo); LOG.info("Done uploading to Synapse for table name=" + getDdbTableKeyValue() + ", id=" + synapseTableId); } // We've successfully processed the file. We can delete the file now. manager.getFileHelper().deleteFile(tsvFile); } /** Table name (excluding prefix) of the DDB table that holds Synapse table IDs. */ protected abstract String getDdbTableName(); /** Hash key name of the DDB table that holds Synapse table IDs. */ protected abstract String getDdbTableKeyName(); /** * Hash key value for the DDB table that holds the Synapse table IDs. Since this uniquely identifies the Synapse * table, and since Synapse table names need to be unique, this is also used as the Synapse table name. */ protected abstract String getDdbTableKeyValue(); /** * List of Synapse table column model objects, to be used to create both the column models and the Synapse table. * This excludes columns common to all Bridge tables defined in COMMON_COLUMN_LIST. */ protected abstract List<ColumnModel> getSynapseTableColumnList(ExportTask task) throws SchemaNotFoundException; /** Get the TSV saved in the task for this handler. */ protected abstract TsvInfo getTsvInfoForTask(ExportTask task); /** Save the TSV into the task for this handler. */ protected abstract void setTsvInfoForTask(ExportTask task, TsvInfo tsvInfo); /** Creates a row values for a single row from the given export task. */ protected abstract Map<String, String> getTsvRowValueMap(ExportSubtask subtask) throws BridgeExporterException, IOException, SchemaNotFoundException, SynapseException; /** * dummy method to implement by healthDataExportHandler to handle update record exporter status * @throws BridgeExporterException */ protected void postProcessTsv(TsvInfo tsvInfo) throws BridgeExporterException { } }
cleanup
src/main/java/org/sagebionetworks/bridge/exporter/handler/SynapseExportHandler.java
cleanup
<ide><path>rc/main/java/org/sagebionetworks/bridge/exporter/handler/SynapseExportHandler.java <ide> COMMON_COLUMN_LIST = columnListBuilder.build(); <ide> } <ide> <del>// static { <del>// ImmutableList.Builder<ColumnModel> columnListBuilder = ImmutableList.builder(); <del>// <del>// ColumnModel recordIdColumn = new ColumnModel(); <del>// recordIdColumn.setName("recordId"); <del>// recordIdColumn.setColumnType(ColumnType.STRING); <del>// recordIdColumn.setMaximumSize(36L); <del>// columnListBuilder.add(recordIdColumn); <del>// <del>// ColumnModel appVersionColumn = new ColumnModel(); <del>// appVersionColumn.setName("appVersion"); <del>// appVersionColumn.setColumnType(ColumnType.STRING); <del>// appVersionColumn.setMaximumSize(48L); <del>// columnListBuilder.add(appVersionColumn); <del>// <del>// ColumnModel phoneInfoColumn = new ColumnModel(); <del>// phoneInfoColumn.setName("phoneInfo"); <del>// phoneInfoColumn.setColumnType(ColumnType.STRING); <del>// phoneInfoColumn.setMaximumSize(48L); <del>// columnListBuilder.add(phoneInfoColumn); <del> <del>// ColumnModel healthCodeColumn = new ColumnModel(); <del>// healthCodeColumn.setName("healthCode"); <del>// healthCodeColumn.setColumnType(ColumnType.STRING); <del>// healthCodeColumn.setMaximumSize(36L); <del>// columnListBuilder.add(healthCodeColumn); <del>// <del>// ColumnModel externalIdColumn = new ColumnModel(); <del>// externalIdColumn.setName("externalId"); <del>// externalIdColumn.setColumnType(ColumnType.STRING); <del>// externalIdColumn.setMaximumSize(128L); <del>// columnListBuilder.add(externalIdColumn); <del>// <del>// ColumnModel dataGroupsColumn = new ColumnModel(); <del>// dataGroupsColumn.setName("dataGroups"); <del>// dataGroupsColumn.setColumnType(ColumnType.STRING); <del>// dataGroupsColumn.setMaximumSize(100L); <del>// columnListBuilder.add(dataGroupsColumn); <del>// <del>// // NOTE: ColumnType.DATE is actually a timestamp. There is no calendar date type. <del>// ColumnModel uploadDateColumn = new ColumnModel(); <del>// uploadDateColumn.setName("uploadDate"); <del>// uploadDateColumn.setColumnType(ColumnType.STRING); <del>// uploadDateColumn.setMaximumSize(10L); <del>// columnListBuilder.add(uploadDateColumn); <del>// <del>// ColumnModel createdOnColumn = new ColumnModel(); <del>// createdOnColumn.setName("createdOn"); <del>// createdOnColumn.setColumnType(ColumnType.DATE); <del>// columnListBuilder.add(createdOnColumn); <del> <del> <del>// <del>// ColumnModel userSharingScopeColumn = new ColumnModel(); <del>// userSharingScopeColumn.setName("userSharingScope"); <del>// userSharingScopeColumn.setColumnType(ColumnType.STRING); <del>// userSharingScopeColumn.setMaximumSize(48L); <del>// columnListBuilder.add(userSharingScopeColumn); <del> <del>// final List<ColumnModel> tempList = BridgeExporterUtil.convertToColumnList(COLUMN_DEFINITION); <del>// columnListBuilder.addAll(tempList); <del>// <del>// COMMON_COLUMN_LIST = columnListBuilder.build(); <del>// } <del> <ide> private static final Joiner DATA_GROUP_JOINER = Joiner.on(',').useForNull(""); <ide> <ide> /**
Java
apache-2.0
27bfd5df1381a344bff23c6a61c81043353f31a2
0
TNG/ArchUnit,TNG/ArchUnit
/* * Copyright 2014-2020 TNG Technology Consulting GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tngtech.archunit.junit; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.List; import java.util.Optional; import java.util.function.Predicate; import java.util.stream.Stream; import com.tngtech.archunit.Internal; import com.tngtech.archunit.core.MayResolveTypesViaReflection; import com.tngtech.archunit.core.domain.JavaClass; import com.tngtech.archunit.core.importer.ClassFileImporter; import org.junit.platform.engine.EngineDiscoveryRequest; import org.junit.platform.engine.ExecutionRequest; import org.junit.platform.engine.Filter; import org.junit.platform.engine.TestDescriptor; import org.junit.platform.engine.UniqueId; import org.junit.platform.engine.discovery.ClassNameFilter; import org.junit.platform.engine.discovery.ClassSelector; import org.junit.platform.engine.discovery.ClasspathRootSelector; import org.junit.platform.engine.discovery.MethodSelector; import org.junit.platform.engine.discovery.PackageNameFilter; import org.junit.platform.engine.discovery.PackageSelector; import org.junit.platform.engine.discovery.UniqueIdSelector; import org.junit.platform.engine.support.hierarchical.HierarchicalTestEngine; import static com.tngtech.archunit.junit.ReflectionUtils.getAllFields; import static com.tngtech.archunit.junit.ReflectionUtils.getAllMethods; import static com.tngtech.archunit.junit.ReflectionUtils.withAnnotation; import static java.util.stream.Collectors.toList; /** * A simple test engine to discover and execute ArchUnit tests with JUnit 5. In particular the engine * uses a {@link ClassCache} to avoid the costly import process as much as possible. * <br><br> * Mark classes to be executed by the {@link ArchUnitTestEngine} with {@link AnalyzeClasses @AnalyzeClasses} and * rule fields or methods with {@link ArchTest @ArchTest}. Example: * <pre><code> *{@literal @}AnalyzeClasses(packages = "com.foo") * class MyArchTest { * {@literal @}ArchTest * public static final ArchRule myRule = classes()... * } * </code></pre> */ @Internal public final class ArchUnitTestEngine extends HierarchicalTestEngine<ArchUnitEngineExecutionContext> { static final String UNIQUE_ID = "archunit"; private SharedCache cache = new SharedCache(); // NOTE: We want to change this in tests -> no static/final reference @Override public String getId() { return UNIQUE_ID; } @Override public TestDescriptor discover(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId) { ArchUnitEngineDescriptor result = new ArchUnitEngineDescriptor(uniqueId); resolveRequestedClasspathRoot(discoveryRequest, uniqueId, result); resolveRequestedPackages(discoveryRequest, uniqueId, result); resolveRequestedClasses(discoveryRequest, uniqueId, result); resolveRequestedMethods(discoveryRequest, uniqueId, result); resolveRequestedFields(discoveryRequest, uniqueId, result); resolveRequestedUniqueIds(discoveryRequest, uniqueId, result); return result; } private void resolveRequestedClasspathRoot(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { Stream<JavaClass> classes = discoveryRequest.getSelectorsByType(ClasspathRootSelector.class).stream() .flatMap(this::getContainedClasses); filterCandidatesAndLoadClasses(classes, discoveryRequest) .forEach(clazz -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, clazz), cache.get())); } private void resolveRequestedPackages(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { String[] packages = discoveryRequest.getSelectorsByType(PackageSelector.class).stream() .map(PackageSelector::getPackageName) .toArray(String[]::new); Stream<JavaClass> classes = getContainedClasses(packages); filterCandidatesAndLoadClasses(classes, discoveryRequest) .forEach(clazz -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, clazz), cache.get())); } private Stream<Class<?>> filterCandidatesAndLoadClasses(Stream<JavaClass> classes, EngineDiscoveryRequest discoveryRequest) { return classes .filter(isAllowedBy(discoveryRequest)) .filter(this::isArchUnitTestCandidate) .flatMap(this::safelyReflect); } private void resolveRequestedClasses(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { discoveryRequest.getSelectorsByType(ClassSelector.class).stream() .map(ClassSelector::getJavaClass) .filter(this::isArchUnitTestCandidate) .forEach(clazz -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, clazz), cache.get())); } private void resolveRequestedMethods(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { discoveryRequest.getSelectorsByType(MethodSelector.class).stream() .filter(s -> s.getJavaMethod().isAnnotationPresent(ArchTest.class)) .forEach(selector -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, selector.getJavaClass(), selector.getJavaMethod()), cache.get())); } private void resolveRequestedFields(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { discoveryRequest.getSelectorsByType(FieldSelector.class).stream() .filter(s -> s.getJavaField().isAnnotationPresent(ArchTest.class)) .forEach(selector -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, selector.getJavaClass(), selector.getJavaField()), cache.get())); } private void resolveRequestedUniqueIds(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { discoveryRequest.getSelectorsByType(UniqueIdSelector.class).stream() .filter(selector -> selector.getUniqueId().getEngineId().equals(Optional.of(getId()))) .forEach(selector -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, selector.getUniqueId()), cache.get())); } private Stream<JavaClass> getContainedClasses(String[] packages) { return new ClassFileImporter().importPackages(packages).stream(); } private Stream<JavaClass> getContainedClasses(ClasspathRootSelector selector) { return new ClassFileImporter().importUrl(toUrl(selector.getClasspathRoot())).stream(); } private Predicate<JavaClass> isAllowedBy(EngineDiscoveryRequest discoveryRequest) { List<Predicate<String>> filters = Stream .concat(discoveryRequest.getFiltersByType(ClassNameFilter.class).stream(), discoveryRequest.getFiltersByType(PackageNameFilter.class).stream()) .map(Filter::toPredicate) .collect(toList()); return javaClass -> filters.stream().allMatch(p -> p.test(javaClass.getName())); } private boolean isArchUnitTestCandidate(JavaClass javaClass) { return javaClass.getAllMembers().stream().anyMatch(m -> m.isAnnotatedWith(ArchTest.class)); } @MayResolveTypesViaReflection(reason = "Within the ArchUnitTestEngine we may resolve types via reflection, since they are needed anyway") private Stream<Class<?>> safelyReflect(JavaClass javaClass) { try { return Stream.of(javaClass.reflect()); } catch (NoClassDefFoundError | RuntimeException e) { return Stream.empty(); } } private boolean isArchUnitTestCandidate(Class<?> clazz) { try { return !getAllFields(clazz, withAnnotation(ArchTest.class)).isEmpty() || !getAllMethods(clazz, withAnnotation(ArchTest.class)).isEmpty(); } catch (NoClassDefFoundError | Exception e) { return false; } } private URL toUrl(URI uri) { try { return uri.toURL(); } catch (MalformedURLException e) { throw new ArchTestInitializationException(e); } } @Override protected ArchUnitEngineExecutionContext createExecutionContext(ExecutionRequest request) { return new ArchUnitEngineExecutionContext(); } static class SharedCache { private static final ClassCache cache = new ClassCache(); ClassCache get() { return cache; } } }
archunit-junit/junit5/engine/src/main/java/com/tngtech/archunit/junit/ArchUnitTestEngine.java
/* * Copyright 2014-2020 TNG Technology Consulting GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tngtech.archunit.junit; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.List; import java.util.Optional; import java.util.function.Predicate; import java.util.stream.Stream; import java.util.stream.StreamSupport; import com.tngtech.archunit.Internal; import com.tngtech.archunit.core.MayResolveTypesViaReflection; import com.tngtech.archunit.core.domain.JavaClass; import com.tngtech.archunit.core.domain.JavaClasses; import com.tngtech.archunit.core.importer.ClassFileImporter; import org.junit.platform.engine.EngineDiscoveryRequest; import org.junit.platform.engine.ExecutionRequest; import org.junit.platform.engine.Filter; import org.junit.platform.engine.TestDescriptor; import org.junit.platform.engine.UniqueId; import org.junit.platform.engine.discovery.ClassNameFilter; import org.junit.platform.engine.discovery.ClassSelector; import org.junit.platform.engine.discovery.ClasspathRootSelector; import org.junit.platform.engine.discovery.MethodSelector; import org.junit.platform.engine.discovery.PackageNameFilter; import org.junit.platform.engine.discovery.PackageSelector; import org.junit.platform.engine.discovery.UniqueIdSelector; import org.junit.platform.engine.support.hierarchical.HierarchicalTestEngine; import static com.tngtech.archunit.junit.ReflectionUtils.getAllFields; import static com.tngtech.archunit.junit.ReflectionUtils.getAllMethods; import static com.tngtech.archunit.junit.ReflectionUtils.withAnnotation; import static java.util.stream.Collectors.toList; /** * A simple test engine to discover and execute ArchUnit tests with JUnit 5. In particular the engine * uses a {@link ClassCache} to avoid the costly import process as much as possible. * <br><br> * Mark classes to be executed by the {@link ArchUnitTestEngine} with {@link AnalyzeClasses @AnalyzeClasses} and * rule fields or methods with {@link ArchTest @ArchTest}. Example: * <pre><code> *{@literal @}AnalyzeClasses(packages = "com.foo") * class MyArchTest { * {@literal @}ArchTest * public static final ArchRule myRule = classes()... * } * </code></pre> */ @Internal public final class ArchUnitTestEngine extends HierarchicalTestEngine<ArchUnitEngineExecutionContext> { static final String UNIQUE_ID = "archunit"; private SharedCache cache = new SharedCache(); // NOTE: We want to change this in tests -> no static/final reference @Override public String getId() { return UNIQUE_ID; } @Override public TestDescriptor discover(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId) { ArchUnitEngineDescriptor result = new ArchUnitEngineDescriptor(uniqueId); resolveRequestedClasspathRoot(discoveryRequest, uniqueId, result); resolveRequestedPackages(discoveryRequest, uniqueId, result); resolveRequestedClasses(discoveryRequest, uniqueId, result); resolveRequestedMethods(discoveryRequest, uniqueId, result); resolveRequestedFields(discoveryRequest, uniqueId, result); resolveRequestedUniqueIds(discoveryRequest, uniqueId, result); return result; } private void resolveRequestedClasspathRoot(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { Stream<JavaClass> classes = discoveryRequest.getSelectorsByType(ClasspathRootSelector.class).stream() .flatMap(this::getContainedClasses); filterCandidatesAndLoadClasses(classes, discoveryRequest) .forEach(clazz -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, clazz), cache.get())); } private void resolveRequestedPackages(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { String[] packages = discoveryRequest.getSelectorsByType(PackageSelector.class).stream() .map(PackageSelector::getPackageName) .toArray(String[]::new); Stream<JavaClass> classes = getContainedClasses(packages); filterCandidatesAndLoadClasses(classes, discoveryRequest) .forEach(clazz -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, clazz), cache.get())); } private Stream<Class<?>> filterCandidatesAndLoadClasses(Stream<JavaClass> classes, EngineDiscoveryRequest discoveryRequest) { return classes .filter(isAllowedBy(discoveryRequest)) .filter(this::isArchUnitTestCandidate) .flatMap(this::safelyReflect); } private void resolveRequestedClasses(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { discoveryRequest.getSelectorsByType(ClassSelector.class).stream() .map(ClassSelector::getJavaClass) .filter(this::isArchUnitTestCandidate) .forEach(clazz -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, clazz), cache.get())); } private void resolveRequestedMethods(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { discoveryRequest.getSelectorsByType(MethodSelector.class).stream() .filter(s -> s.getJavaMethod().isAnnotationPresent(ArchTest.class)) .forEach(selector -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, selector.getJavaClass(), selector.getJavaMethod()), cache.get())); } private void resolveRequestedFields(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { discoveryRequest.getSelectorsByType(FieldSelector.class).stream() .filter(s -> s.getJavaField().isAnnotationPresent(ArchTest.class)) .forEach(selector -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, selector.getJavaClass(), selector.getJavaField()), cache.get())); } private void resolveRequestedUniqueIds(EngineDiscoveryRequest discoveryRequest, UniqueId uniqueId, ArchUnitEngineDescriptor result) { discoveryRequest.getSelectorsByType(UniqueIdSelector.class).stream() .filter(selector -> selector.getUniqueId().getEngineId().equals(Optional.of(getId()))) .forEach(selector -> ArchUnitTestDescriptor.resolve( result, ElementResolver.create(result, uniqueId, selector.getUniqueId()), cache.get())); } private Stream<JavaClass> getContainedClasses(String[] packages) { return stream(new ClassFileImporter().importPackages(packages)); } private Stream<JavaClass> getContainedClasses(ClasspathRootSelector selector) { return stream(new ClassFileImporter().importUrl(toUrl(selector.getClasspathRoot()))); } private Stream<JavaClass> stream(JavaClasses classes) { return StreamSupport.stream(classes.spliterator(), false); } private Predicate<JavaClass> isAllowedBy(EngineDiscoveryRequest discoveryRequest) { List<Predicate<String>> filters = Stream .concat(discoveryRequest.getFiltersByType(ClassNameFilter.class).stream(), discoveryRequest.getFiltersByType(PackageNameFilter.class).stream()) .map(Filter::toPredicate) .collect(toList()); return javaClass -> filters.stream().allMatch(p -> p.test(javaClass.getName())); } private boolean isArchUnitTestCandidate(JavaClass javaClass) { return javaClass.getAllMembers().stream().anyMatch(m -> m.isAnnotatedWith(ArchTest.class)); } @MayResolveTypesViaReflection(reason = "Within the ArchUnitTestEngine we may resolve types via reflection, since they are needed anyway") private Stream<Class<?>> safelyReflect(JavaClass javaClass) { try { return Stream.of(javaClass.reflect()); } catch (NoClassDefFoundError | RuntimeException e) { return Stream.empty(); } } private boolean isArchUnitTestCandidate(Class<?> clazz) { try { return !getAllFields(clazz, withAnnotation(ArchTest.class)).isEmpty() || !getAllMethods(clazz, withAnnotation(ArchTest.class)).isEmpty(); } catch (NoClassDefFoundError | Exception e) { return false; } } private URL toUrl(URI uri) { try { return uri.toURL(); } catch (MalformedURLException e) { throw new ArchTestInitializationException(e); } } @Override protected ArchUnitEngineExecutionContext createExecutionContext(ExecutionRequest request) { return new ArchUnitEngineExecutionContext(); } static class SharedCache { private static final ClassCache cache = new ClassCache(); ClassCache get() { return cache; } } }
simplify code that creates class stream #411
archunit-junit/junit5/engine/src/main/java/com/tngtech/archunit/junit/ArchUnitTestEngine.java
simplify code that creates class stream #411
<ide><path>rchunit-junit/junit5/engine/src/main/java/com/tngtech/archunit/junit/ArchUnitTestEngine.java <ide> import java.util.Optional; <ide> import java.util.function.Predicate; <ide> import java.util.stream.Stream; <del>import java.util.stream.StreamSupport; <ide> <ide> import com.tngtech.archunit.Internal; <ide> import com.tngtech.archunit.core.MayResolveTypesViaReflection; <ide> import com.tngtech.archunit.core.domain.JavaClass; <del>import com.tngtech.archunit.core.domain.JavaClasses; <ide> import com.tngtech.archunit.core.importer.ClassFileImporter; <ide> import org.junit.platform.engine.EngineDiscoveryRequest; <ide> import org.junit.platform.engine.ExecutionRequest; <ide> } <ide> <ide> private Stream<JavaClass> getContainedClasses(String[] packages) { <del> return stream(new ClassFileImporter().importPackages(packages)); <add> return new ClassFileImporter().importPackages(packages).stream(); <ide> } <ide> <ide> private Stream<JavaClass> getContainedClasses(ClasspathRootSelector selector) { <del> return stream(new ClassFileImporter().importUrl(toUrl(selector.getClasspathRoot()))); <del> } <del> <del> private Stream<JavaClass> stream(JavaClasses classes) { <del> return StreamSupport.stream(classes.spliterator(), false); <add> return new ClassFileImporter().importUrl(toUrl(selector.getClasspathRoot())).stream(); <ide> } <ide> <ide> private Predicate<JavaClass> isAllowedBy(EngineDiscoveryRequest discoveryRequest) {
Java
mit
ee051a751893d5fc7a79aba1a51da406a2e3c4da
0
jenkinsci/ec2-plugin,mkozell/ec2-plugin,jenkinsci/ec2-plugin,mkozell/ec2-plugin,jenkinsci/ec2-plugin,jenkinsci/ec2-plugin,mkozell/ec2-plugin,mkozell/ec2-plugin
/* * The MIT License * * Copyright (c) 2004-, Kohsuke Kawaguchi, Sun Microsystems, Inc., and a number of other of contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the * Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package hudson.plugins.ec2; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.AmazonEC2Exception; import com.amazonaws.services.ec2.model.BlockDeviceMapping; import com.amazonaws.services.ec2.model.CancelSpotInstanceRequestsRequest; import com.amazonaws.services.ec2.model.CreateTagsRequest; import com.amazonaws.services.ec2.model.CreditSpecificationRequest; import com.amazonaws.services.ec2.model.DescribeImagesRequest; import com.amazonaws.services.ec2.model.DescribeInstancesRequest; import com.amazonaws.services.ec2.model.DescribeInstancesResult; import com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest; import com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult; import com.amazonaws.services.ec2.model.DescribeSpotInstanceRequestsRequest; import com.amazonaws.services.ec2.model.DescribeSubnetsRequest; import com.amazonaws.services.ec2.model.DescribeSubnetsResult; import com.amazonaws.services.ec2.model.Filter; import com.amazonaws.services.ec2.model.IamInstanceProfileSpecification; import com.amazonaws.services.ec2.model.Image; import com.amazonaws.services.ec2.model.Instance; import com.amazonaws.services.ec2.model.InstanceMarketOptionsRequest; import com.amazonaws.services.ec2.model.InstanceNetworkInterfaceSpecification; import com.amazonaws.services.ec2.model.InstanceStateName; import com.amazonaws.services.ec2.model.InstanceType; import com.amazonaws.services.ec2.model.KeyPair; import com.amazonaws.services.ec2.model.LaunchSpecification; import com.amazonaws.services.ec2.model.MarketType; import com.amazonaws.services.ec2.model.Placement; import com.amazonaws.services.ec2.model.RequestSpotInstancesRequest; import com.amazonaws.services.ec2.model.RequestSpotInstancesResult; import com.amazonaws.services.ec2.model.Reservation; import com.amazonaws.services.ec2.model.ResourceType; import com.amazonaws.services.ec2.model.RunInstancesRequest; import com.amazonaws.services.ec2.model.SecurityGroup; import com.amazonaws.services.ec2.model.ShutdownBehavior; import com.amazonaws.services.ec2.model.SpotInstanceRequest; import com.amazonaws.services.ec2.model.SpotMarketOptions; import com.amazonaws.services.ec2.model.SpotPlacement; import com.amazonaws.services.ec2.model.StartInstancesRequest; import com.amazonaws.services.ec2.model.StartInstancesResult; import com.amazonaws.services.ec2.model.Subnet; import com.amazonaws.services.ec2.model.Tag; import com.amazonaws.services.ec2.model.TagSpecification; import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.Util; import hudson.XmlFile; import hudson.model.Describable; import hudson.model.Descriptor; import hudson.model.Descriptor.FormException; import hudson.model.Failure; import hudson.model.Hudson; import hudson.model.Label; import hudson.model.Node; import hudson.model.Saveable; import hudson.model.TaskListener; import hudson.model.labels.LabelAtom; import hudson.model.listeners.SaveableListener; import hudson.plugins.ec2.util.AmazonEC2Factory; import hudson.plugins.ec2.util.DeviceMappingParser; import hudson.plugins.ec2.util.EC2AgentConfig; import hudson.plugins.ec2.util.EC2AgentFactory; import hudson.plugins.ec2.util.MinimumInstanceChecker; import hudson.plugins.ec2.util.MinimumNumberOfInstancesTimeRangeConfig; import hudson.security.Permission; import hudson.slaves.NodeProperty; import hudson.slaves.NodePropertyDescriptor; import hudson.util.DescribableList; import hudson.util.FormValidation; import hudson.util.ListBoxModel; import hudson.util.Secret; import jenkins.model.Jenkins; import jenkins.model.JenkinsLocationConfiguration; import jenkins.slaves.iterators.api.NodeIterator; import org.apache.commons.lang.StringUtils; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.interceptor.RequirePOST; import javax.annotation.CheckForNull; import javax.servlet.ServletException; import java.io.IOException; import java.io.PrintStream; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Template of {@link EC2AbstractSlave} to launch. * * @author Kohsuke Kawaguchi */ public class SlaveTemplate implements Describable<SlaveTemplate> { private static final Logger LOGGER = Logger.getLogger(SlaveTemplate.class.getName()); public String ami; public final String description; public final String zone; public final SpotConfiguration spotConfig; public final String securityGroups; public final String remoteFS; public final InstanceType type; public final boolean ebsOptimized; public final boolean monitoring; public final boolean t2Unlimited; public final String labels; public final Node.Mode mode; public final String initScript; public final String tmpDir; public final String userData; public final String numExecutors; public final String remoteAdmin; public final String jvmopts; public final String subnetId; public final String idleTerminationMinutes; public final String iamInstanceProfile; public final boolean deleteRootOnTermination; public final boolean useEphemeralDevices; public final String customDeviceMapping; public int instanceCap; private final int minimumNumberOfInstances; private MinimumNumberOfInstancesTimeRangeConfig minimumNumberOfInstancesTimeRangeConfig; private final int minimumNumberOfSpareInstances; public final boolean stopOnTerminate; private final List<EC2Tag> tags; public ConnectionStrategy connectionStrategy; public HostKeyVerificationStrategyEnum hostKeyVerificationStrategy; public final boolean associatePublicIp; protected transient EC2Cloud parent; public AMITypeData amiType; public int launchTimeout; public boolean connectBySSHProcess; public int maxTotalUses; private /* lazily initialized */ DescribableList<NodeProperty<?>, NodePropertyDescriptor> nodeProperties; public int nextSubnet; public String currentSubnetId; public Tenancy tenancy; public EbsEncryptRootVolume ebsEncryptRootVolume; private transient/* almost final */ Set<LabelAtom> labelSet; private transient/* almost final */Set<String> securityGroupSet; /* FIXME: Ideally these would be List<String>, but Jenkins currently * doesn't offer a usable way to represent those in forms. Instead * the values are interpreted as a comma separated list. * * https://issues.jenkins.io/browse/JENKINS-27901 */ @CheckForNull private String amiOwners; @CheckForNull private String amiUsers; @CheckForNull private List<EC2Filter> amiFilters; /* * Necessary to handle reading from old configurations. The UnixData object is created in readResolve() */ @Deprecated public transient String sshPort; @Deprecated public transient String rootCommandPrefix; @Deprecated public transient String slaveCommandPrefix; @Deprecated public transient String slaveCommandSuffix; @Deprecated public boolean usePrivateDnsName; @Deprecated public boolean connectUsingPublicIp; @Deprecated public transient boolean useDedicatedTenancy; @DataBoundConstructor public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, int minimumNumberOfSpareInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses, List<? extends NodeProperty<?>> nodeProperties, HostKeyVerificationStrategyEnum hostKeyVerificationStrategy, Tenancy tenancy, EbsEncryptRootVolume ebsEncryptRootVolume) { if(StringUtils.isNotBlank(remoteAdmin) || StringUtils.isNotBlank(jvmopts) || StringUtils.isNotBlank(tmpDir)){ LOGGER.log(Level.FINE, "As remoteAdmin, jvmopts or tmpDir is not blank, we must ensure the user has ADMINISTER rights."); // Can be null during tests Jenkins j = Jenkins.getInstanceOrNull(); if (j != null) j.checkPermission(Jenkins.ADMINISTER); } this.ami = ami; this.zone = zone; this.spotConfig = spotConfig; this.securityGroups = securityGroups; this.remoteFS = remoteFS; this.amiType = amiType; this.type = type; this.ebsOptimized = ebsOptimized; this.labels = Util.fixNull(labelString); this.mode = mode != null ? mode : Node.Mode.NORMAL; this.description = description; this.initScript = initScript; this.tmpDir = tmpDir; this.userData = StringUtils.trimToEmpty(userData); this.numExecutors = Util.fixNull(numExecutors).trim(); this.remoteAdmin = remoteAdmin; this.jvmopts = jvmopts; this.stopOnTerminate = stopOnTerminate; this.subnetId = subnetId; this.tags = tags; this.idleTerminationMinutes = idleTerminationMinutes; this.associatePublicIp = associatePublicIp; this.connectionStrategy = connectionStrategy == null ? ConnectionStrategy.PRIVATE_IP : connectionStrategy; this.useDedicatedTenancy = tenancy == Tenancy.Dedicated; this.connectBySSHProcess = connectBySSHProcess; this.maxTotalUses = maxTotalUses; this.nodeProperties = new DescribableList<>(Saveable.NOOP, Util.fixNull(nodeProperties)); this.monitoring = monitoring; this.nextSubnet = 0; this.usePrivateDnsName = this.connectionStrategy.equals(ConnectionStrategy.PRIVATE_DNS); this.connectUsingPublicIp = this.connectionStrategy.equals(ConnectionStrategy.PUBLIC_IP); this.minimumNumberOfInstances = minimumNumberOfInstances; this.minimumNumberOfSpareInstances = minimumNumberOfSpareInstances; if (null == instanceCapStr || instanceCapStr.isEmpty()) { this.instanceCap = Integer.MAX_VALUE; } else { this.instanceCap = Integer.parseInt(instanceCapStr); } try { this.launchTimeout = Integer.parseInt(launchTimeoutStr); } catch (NumberFormatException nfe) { this.launchTimeout = Integer.MAX_VALUE; } this.iamInstanceProfile = iamInstanceProfile; this.deleteRootOnTermination = deleteRootOnTermination; this.useEphemeralDevices = useEphemeralDevices; this.customDeviceMapping = customDeviceMapping; this.t2Unlimited = t2Unlimited; this.hostKeyVerificationStrategy = hostKeyVerificationStrategy != null ? hostKeyVerificationStrategy : HostKeyVerificationStrategyEnum.CHECK_NEW_SOFT; this.tenancy = tenancy != null ? tenancy : Tenancy.Default; this.ebsEncryptRootVolume = ebsEncryptRootVolume != null ? ebsEncryptRootVolume : EbsEncryptRootVolume.DEFAULT; readResolve(); // initialize } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, int minimumNumberOfSpareInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses, List<? extends NodeProperty<?>> nodeProperties, HostKeyVerificationStrategyEnum hostKeyVerificationStrategy, Tenancy tenancy) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, minimumNumberOfSpareInstances, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, nodeProperties, hostKeyVerificationStrategy, tenancy, null); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, int minimumNumberOfSpareInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses, List<? extends NodeProperty<?>> nodeProperties, HostKeyVerificationStrategyEnum hostKeyVerificationStrategy) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, minimumNumberOfSpareInstances, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, nodeProperties, hostKeyVerificationStrategy, Tenancy.backwardsCompatible(useDedicatedTenancy)); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, int minimumNumberOfSpareInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses, List<? extends NodeProperty<?>> nodeProperties) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, minimumNumberOfSpareInstances, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, nodeProperties, null); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses,List<? extends NodeProperty<?>> nodeProperties ) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, 0, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, nodeProperties); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, Collections.emptyList()); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, 0, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean connectUsingPublicIp, boolean monitoring, boolean t2Unlimited) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, ConnectionStrategy.backwardsCompatible(usePrivateDnsName, connectUsingPublicIp, associatePublicIp), -1); } public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean connectUsingPublicIp) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, connectUsingPublicIp, false, false); } public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, false, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, false); } public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, false); } /** * Backward compatible constructor for reloading previous version data */ public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, String sshPort, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, String rootCommandPrefix, String slaveCommandPrefix, String slaveCommandSuffix, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, String launchTimeoutStr) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, new UnixData(rootCommandPrefix, slaveCommandPrefix, slaveCommandSuffix, sshPort), jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, useEphemeralDevices, false, launchTimeoutStr, false, null); } public boolean isConnectBySSHProcess() { // See // src/main/resources/hudson/plugins/ec2/SlaveTemplate/help-connectBySSHProcess.html return connectBySSHProcess; } public EC2Cloud getParent() { return parent; } public String getLabelString() { return labels; } public Node.Mode getMode() { return mode; } public String getDisplayName() { return String.format("EC2 (%s) - %s", parent.getDisplayName(), description); } public String getSlaveName(String instanceId) { final String agentName = String.format("%s (%s)", getDisplayName(), instanceId); try { Jenkins.checkGoodName(agentName); return agentName; } catch (Failure e) { return instanceId; } } String getZone() { return zone; } public String getSecurityGroupString() { return securityGroups; } public Set<String> getSecurityGroupSet() { return securityGroupSet; } public Set<String> parseSecurityGroups() { if (securityGroups == null || "".equals(securityGroups.trim())) { return Collections.emptySet(); } else { return new HashSet<String>(Arrays.asList(securityGroups.split("\\s*,\\s*"))); } } public int getNumExecutors() { try { return Integer.parseInt(numExecutors); } catch (NumberFormatException e) { return EC2AbstractSlave.toNumExecutors(type); } } public int getSshPort() { try { String sshPort = ""; if (amiType.isUnix()) { sshPort = ((UnixData) amiType).getSshPort(); } if (amiType.isMac()) { sshPort = ((MacData) amiType).getSshPort(); } return Integer.parseInt(sshPort); } catch (NumberFormatException e) { return 22; } } public String getRemoteAdmin() { return remoteAdmin; } public String getRootCommandPrefix() { return (amiType.isUnix() ? ((UnixData) amiType).getRootCommandPrefix() : (amiType.isMac() ? ((MacData) amiType).getRootCommandPrefix():"")); } public String getSlaveCommandPrefix() { return (amiType.isUnix() ? ((UnixData) amiType).getSlaveCommandPrefix() : (amiType.isMac() ? ((MacData) amiType).getSlaveCommandPrefix() : "")); } public String getSlaveCommandSuffix() { return (amiType.isUnix() ? ((UnixData) amiType).getSlaveCommandSuffix() : (amiType.isMac() ? ((MacData) amiType).getSlaveCommandSuffix() : "")); } public String chooseSubnetId() { if (StringUtils.isBlank(subnetId)) { return null; } else { String[] subnetIdList= getSubnetId().split(" "); // Round-robin subnet selection. currentSubnetId = subnetIdList[nextSubnet]; nextSubnet = (nextSubnet + 1) % subnetIdList.length; return currentSubnetId; } } public String chooseSubnetId(boolean rotateSubnet) { if (rotateSubnet) { return chooseSubnetId(); } else { return this.currentSubnetId; } } public String getSubnetId() { return subnetId; } public String getCurrentSubnetId() { return currentSubnetId; } public boolean getAssociatePublicIp() { return associatePublicIp; } @Deprecated @DataBoundSetter public void setConnectUsingPublicIp(boolean connectUsingPublicIp) { this.connectUsingPublicIp = connectUsingPublicIp; this.connectionStrategy = ConnectionStrategy.backwardsCompatible(this.usePrivateDnsName, this.connectUsingPublicIp, this.associatePublicIp); } @Deprecated @DataBoundSetter public void setUsePrivateDnsName(boolean usePrivateDnsName) { this.usePrivateDnsName = usePrivateDnsName; this.connectionStrategy = ConnectionStrategy.backwardsCompatible(this.usePrivateDnsName, this.connectUsingPublicIp, this.associatePublicIp); } @Deprecated public boolean getUsePrivateDnsName() { return usePrivateDnsName; } @Deprecated public boolean isConnectUsingPublicIp() { return connectUsingPublicIp; } public List<EC2Tag> getTags() { if (null == tags) return null; return Collections.unmodifiableList(tags); } public String getidleTerminationMinutes() { return idleTerminationMinutes; } public Set<LabelAtom> getLabelSet() { return labelSet; } public String getAmi() { return ami; } public void setAmi(String ami) { this.ami = ami; } public AMITypeData getAmiType() { return amiType; } public void setAmiType(AMITypeData amiType) { this.amiType = amiType; } public int getMinimumNumberOfInstances() { return minimumNumberOfInstances; } public int getMinimumNumberOfSpareInstances() { return minimumNumberOfSpareInstances; } public MinimumNumberOfInstancesTimeRangeConfig getMinimumNumberOfInstancesTimeRangeConfig() { return minimumNumberOfInstancesTimeRangeConfig; } @DataBoundSetter public void setMinimumNumberOfInstancesTimeRangeConfig(MinimumNumberOfInstancesTimeRangeConfig minimumNumberOfInstancesTimeRangeConfig) { this.minimumNumberOfInstancesTimeRangeConfig = minimumNumberOfInstancesTimeRangeConfig; } public int getInstanceCap() { return instanceCap; } public int getSpotBlockReservationDuration() { if (spotConfig == null) return 0; return spotConfig.getSpotBlockReservationDuration(); } public String getSpotBlockReservationDurationStr() { if (spotConfig == null) { return ""; } else { int dur = getSpotBlockReservationDuration(); if (dur == 0) return ""; return String.valueOf(getSpotBlockReservationDuration()); } } public String getInstanceCapStr() { if (instanceCap == Integer.MAX_VALUE) { return ""; } else { return String.valueOf(instanceCap); } } public String getSpotMaxBidPrice() { if (spotConfig == null) return null; return SpotConfiguration.normalizeBid(spotConfig.getSpotMaxBidPrice()); } public String getIamInstanceProfile() { return iamInstanceProfile; } @DataBoundSetter public void setHostKeyVerificationStrategy(HostKeyVerificationStrategyEnum hostKeyVerificationStrategy) { this.hostKeyVerificationStrategy = (hostKeyVerificationStrategy != null) ? hostKeyVerificationStrategy : HostKeyVerificationStrategyEnum.CHECK_NEW_SOFT; } @NonNull public HostKeyVerificationStrategyEnum getHostKeyVerificationStrategy() { return hostKeyVerificationStrategy != null ? hostKeyVerificationStrategy : HostKeyVerificationStrategyEnum.CHECK_NEW_SOFT; } @CheckForNull public String getAmiOwners() { return amiOwners; } @DataBoundSetter public void setAmiOwners(String amiOwners) { this.amiOwners = amiOwners; } @CheckForNull public String getAmiUsers() { return amiUsers; } @DataBoundSetter public void setAmiUsers(String amiUsers) { this.amiUsers = amiUsers; } @CheckForNull public List<EC2Filter> getAmiFilters() { return amiFilters; } @DataBoundSetter public void setAmiFilters(List<EC2Filter> amiFilters) { this.amiFilters = amiFilters; } @Override public String toString() { return "SlaveTemplate{" + "description='" + description + '\'' + ", labels='" + labels + '\'' + '}'; } public int getMaxTotalUses() { return maxTotalUses; } public Tenancy getTenancyAttribute() { return tenancy; } public DescribableList<NodeProperty<?>, NodePropertyDescriptor> getNodeProperties() { return Objects.requireNonNull(nodeProperties); } public enum ProvisionOptions { ALLOW_CREATE, FORCE_CREATE } /** * Provisions a new EC2 agent or starts a previously stopped on-demand instance. * * @return always non-null. This needs to be then added to {@link Hudson#addNode(Node)}. */ @NonNull public List<EC2AbstractSlave> provision(int number, EnumSet<ProvisionOptions> provisionOptions) throws AmazonClientException, IOException { final Image image = getImage(); if (this.spotConfig != null) { if (provisionOptions.contains(ProvisionOptions.ALLOW_CREATE) || provisionOptions.contains(ProvisionOptions.FORCE_CREATE)) return provisionSpot(image, number, provisionOptions); return Collections.emptyList(); } return provisionOndemand(image, number, provisionOptions); } /** * Safely we can pickup only instance that is not known by Jenkins at all. */ private boolean checkInstance(Instance instance) { for (EC2AbstractSlave node : NodeIterator.nodes(EC2AbstractSlave.class)) { if ( (node.getInstanceId().equals(instance.getInstanceId())) && (! (instance.getState().getName().equalsIgnoreCase(InstanceStateName.Stopped.toString()) )) ){ logInstanceCheck(instance, ". false - found existing corresponding Jenkins agent: " + node.getInstanceId()); return false; } } logInstanceCheck(instance, " true - Instance is not connected to Jenkins"); return true; } private void logInstanceCheck(Instance instance, String message) { logProvisionInfo("checkInstance: " + instance.getInstanceId() + "." + message); } private boolean isSameIamInstanceProfile(Instance instance) { return StringUtils.isBlank(getIamInstanceProfile()) || (instance.getIamInstanceProfile() != null && instance.getIamInstanceProfile().getArn().equals(getIamInstanceProfile())); } private boolean isTerminatingOrShuttindDown(String instanceStateName) { return instanceStateName.equalsIgnoreCase(InstanceStateName.Terminated.toString()) || instanceStateName.equalsIgnoreCase(InstanceStateName.ShuttingDown.toString()); } private void logProvisionInfo(String message) { LOGGER.info(this + ". " + message); } HashMap<RunInstancesRequest, List<Filter>> makeRunInstancesRequestAndFilters(Image image, int number, AmazonEC2 ec2) throws IOException { return makeRunInstancesRequestAndFilters(image, number, ec2, true); } @Deprecated HashMap<RunInstancesRequest, List<Filter>> makeRunInstancesRequestAndFilters(int number, AmazonEC2 ec2) throws IOException { return makeRunInstancesRequestAndFilters(getImage(), number, ec2); } HashMap<RunInstancesRequest, List<Filter>> makeRunInstancesRequestAndFilters(Image image, int number, AmazonEC2 ec2, boolean rotateSubnet) throws IOException { String imageId = image.getImageId(); RunInstancesRequest riRequest = new RunInstancesRequest(imageId, 1, number).withInstanceType(type); riRequest.setEbsOptimized(ebsOptimized); riRequest.setMonitoring(monitoring); if (t2Unlimited){ CreditSpecificationRequest creditRequest = new CreditSpecificationRequest(); creditRequest.setCpuCredits("unlimited"); riRequest.setCreditSpecification(creditRequest); } setupBlockDeviceMappings(image, riRequest.getBlockDeviceMappings()); if(stopOnTerminate){ riRequest.setInstanceInitiatedShutdownBehavior(ShutdownBehavior.Stop); logProvisionInfo("Setting Instance Initiated Shutdown Behavior : ShutdownBehavior.Stop"); }else{ riRequest.setInstanceInitiatedShutdownBehavior(ShutdownBehavior.Terminate); logProvisionInfo("Setting Instance Initiated Shutdown Behavior : ShutdownBehavior.Terminate"); } List<Filter> diFilters = new ArrayList<>(); diFilters.add(new Filter("image-id").withValues(imageId)); diFilters.add(new Filter("instance-type").withValues(type.toString())); KeyPair keyPair = getKeyPair(ec2); if (keyPair == null){ logProvisionInfo("Could not retrieve a valid key pair."); return null; } riRequest.setUserData(Base64.getEncoder().encodeToString(userData.getBytes(StandardCharsets.UTF_8))); riRequest.setKeyName(keyPair.getKeyName()); diFilters.add(new Filter("key-name").withValues(keyPair.getKeyName())); if (StringUtils.isNotBlank(getZone())) { Placement placement = new Placement(getZone()); if (getTenancyAttribute().equals(Tenancy.Dedicated)) { placement.setTenancy("dedicated"); } riRequest.setPlacement(placement); diFilters.add(new Filter("availability-zone").withValues(getZone())); } if(getTenancyAttribute().equals(Tenancy.Host)){ Placement placement = new Placement(); placement.setTenancy("host"); riRequest.setPlacement(placement); diFilters.add(new Filter("tenancy").withValues(placement.getTenancy())); }else if(getTenancyAttribute().equals(Tenancy.Default)){ Placement placement = new Placement(); placement.setTenancy("default"); riRequest.setPlacement(placement); diFilters.add(new Filter("tenancy").withValues(placement.getTenancy())); } String subnetId = chooseSubnetId(rotateSubnet); InstanceNetworkInterfaceSpecification net = new InstanceNetworkInterfaceSpecification(); if (StringUtils.isNotBlank(subnetId)) { if (getAssociatePublicIp()) { net.setSubnetId(subnetId); } else { riRequest.setSubnetId(subnetId); } diFilters.add(new Filter("subnet-id").withValues(subnetId)); /* * If we have a subnet ID then we can only use VPC security groups */ if (!getSecurityGroupSet().isEmpty()) { List<String> groupIds = getEc2SecurityGroups(ec2); if (!groupIds.isEmpty()) { if (getAssociatePublicIp()) { net.setGroups(groupIds); } else { riRequest.setSecurityGroupIds(groupIds); } diFilters.add(new Filter("instance.group-id").withValues(groupIds)); } } } else { List<String> groupIds = getSecurityGroupsBy("group-name", securityGroupSet, ec2) .getSecurityGroups() .stream().map(SecurityGroup::getGroupId) .collect(Collectors.toList()); if (getAssociatePublicIp()) { net.setGroups(groupIds); } else { riRequest.setSecurityGroups(securityGroupSet); } if (!groupIds.isEmpty()) { diFilters.add(new Filter("instance.group-id").withValues(groupIds)); } } net.setAssociatePublicIpAddress(getAssociatePublicIp()); net.setDeviceIndex(0); if (getAssociatePublicIp()) { riRequest.withNetworkInterfaces(net); } HashSet<Tag> instTags = buildTags(EC2Cloud.EC2_SLAVE_TYPE_DEMAND); for (Tag tag : instTags) { diFilters.add(new Filter("tag:" + tag.getKey()).withValues(tag.getValue())); } if (StringUtils.isNotBlank(getIamInstanceProfile())) { riRequest.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(getIamInstanceProfile())); } List<TagSpecification> tagList = new ArrayList<>(); TagSpecification tagSpecification = new TagSpecification(); tagSpecification.setTags(instTags); tagList.add(tagSpecification.clone().withResourceType(ResourceType.Instance)); tagList.add(tagSpecification.clone().withResourceType(ResourceType.Volume)); riRequest.setTagSpecifications(tagList); HashMap<RunInstancesRequest, List<Filter>> ret = new HashMap<>(); ret.put(riRequest, diFilters); return ret; } @Deprecated HashMap<RunInstancesRequest, List<Filter>> makeRunInstancesRequestAndFilters(int number, AmazonEC2 ec2, boolean rotateSubnet) throws IOException { return makeRunInstancesRequestAndFilters(getImage(), number, ec2, rotateSubnet); } /** * Provisions an On-demand EC2 agent by launching a new instance or starting a previously-stopped instance. */ private List<EC2AbstractSlave> provisionOndemand(Image image, int number, EnumSet<ProvisionOptions> provisionOptions) throws IOException { return provisionOndemand(image, number, provisionOptions, false, false); } /** * Provisions an On-demand EC2 agent by launching a new instance or starting a previously-stopped instance. */ private List<EC2AbstractSlave> provisionOndemand(Image image, int number, EnumSet<ProvisionOptions> provisionOptions, boolean spotWithoutBidPrice, boolean fallbackSpotToOndemand) throws IOException { AmazonEC2 ec2 = getParent().connect(); logProvisionInfo("Considering launching"); HashMap<RunInstancesRequest, List<Filter>> runInstancesRequestFilterMap = makeRunInstancesRequestAndFilters(image, number, ec2); Map.Entry<RunInstancesRequest, List<Filter>> entry = runInstancesRequestFilterMap.entrySet().iterator().next(); RunInstancesRequest riRequest = entry.getKey(); List<Filter> diFilters = entry.getValue(); DescribeInstancesRequest diRequest = new DescribeInstancesRequest().withFilters(diFilters); logProvisionInfo("Looking for existing instances with describe-instance: " + diRequest); DescribeInstancesResult diResult = ec2.describeInstances(diRequest); List<Instance> orphansOrStopped = findOrphansOrStopped(diResult, number); if (orphansOrStopped.isEmpty() && !provisionOptions.contains(ProvisionOptions.FORCE_CREATE) && !provisionOptions.contains(ProvisionOptions.ALLOW_CREATE)) { logProvisionInfo("No existing instance found - but cannot create new instance"); return null; } wakeOrphansOrStoppedUp(ec2, orphansOrStopped); if (orphansOrStopped.size() == number) { return toSlaves(orphansOrStopped); } riRequest.setMaxCount(number - orphansOrStopped.size()); List<Instance> newInstances; if (spotWithoutBidPrice) { InstanceMarketOptionsRequest instanceMarketOptionsRequest = new InstanceMarketOptionsRequest().withMarketType(MarketType.Spot); if (getSpotBlockReservationDuration() != 0) { SpotMarketOptions spotOptions = new SpotMarketOptions().withBlockDurationMinutes(getSpotBlockReservationDuration() * 60); instanceMarketOptionsRequest.setSpotOptions(spotOptions); } riRequest.setInstanceMarketOptions(instanceMarketOptionsRequest); try { newInstances = ec2.runInstances(riRequest).getReservation().getInstances(); } catch (AmazonEC2Exception e) { if (fallbackSpotToOndemand && e.getErrorCode().equals("InsufficientInstanceCapacity")) { logProvisionInfo("There is no spot capacity available matching your request, falling back to on-demand instance."); riRequest.setInstanceMarketOptions(new InstanceMarketOptionsRequest()); newInstances = ec2.runInstances(riRequest).getReservation().getInstances(); } else { throw e; } } } else { newInstances = ec2.runInstances(riRequest).getReservation().getInstances(); } // Have to create a new instance if (newInstances.isEmpty()) { logProvisionInfo("No new instances were created"); } newInstances.addAll(orphansOrStopped); return toSlaves(newInstances); } void wakeOrphansOrStoppedUp(AmazonEC2 ec2, List<Instance> orphansOrStopped) { List<String> instances = new ArrayList<>(); for(Instance instance : orphansOrStopped) { if (instance.getState().getName().equalsIgnoreCase(InstanceStateName.Stopping.toString()) || instance.getState().getName().equalsIgnoreCase(InstanceStateName.Stopped.toString())) { logProvisionInfo("Found stopped instances - will start it: " + instance); instances.add(instance.getInstanceId()); } else { // Should be pending or running at this point, just let it come up logProvisionInfo("Found existing pending or running: " + instance.getState().getName() + " instance: " + instance); } } if (!instances.isEmpty()) { StartInstancesRequest siRequest = new StartInstancesRequest(instances); StartInstancesResult siResult = ec2.startInstances(siRequest); logProvisionInfo("Result of starting stopped instances:" + siResult); } } List<EC2AbstractSlave> toSlaves(List<Instance> newInstances) throws IOException { try { List<EC2AbstractSlave> slaves = new ArrayList<>(newInstances.size()); for (Instance instance : newInstances) { slaves.add(newOndemandSlave(instance)); logProvisionInfo("Return instance: " + instance); } return slaves; } catch (FormException e) { throw new AssertionError(e); // we should have discovered all // configuration issues upfront } } List<Instance> findOrphansOrStopped(DescribeInstancesResult diResult, int number) { List<Instance> orphansOrStopped = new ArrayList<>(); int count = 0; for (Reservation reservation : diResult.getReservations()) { for (Instance instance : reservation.getInstances()) { if (!isSameIamInstanceProfile(instance)) { logInstanceCheck(instance, ". false - IAM Instance profile does not match: " + instance.getIamInstanceProfile()); continue; } if (isTerminatingOrShuttindDown(instance.getState().getName())) { logInstanceCheck(instance, ". false - Instance is terminated or shutting down"); continue; } if (checkInstance(instance)) { logProvisionInfo("Found existing instance: " + instance); orphansOrStopped.add(instance); count++; } if (count == number) { return orphansOrStopped; } } } return orphansOrStopped; } private void setupRootDevice(Image image, List<BlockDeviceMapping> deviceMappings) { if (!"ebs".equals(image.getRootDeviceType())) { return; } // get the root device (only one expected in the blockmappings) final List<BlockDeviceMapping> rootDeviceMappings = image.getBlockDeviceMappings(); if (rootDeviceMappings.size() == 0) { LOGGER.warning("AMI missing block devices"); return; } BlockDeviceMapping rootMapping = rootDeviceMappings.get(0); LOGGER.info("AMI had " + rootMapping.getDeviceName()); LOGGER.info(rootMapping.getEbs().toString()); // Create a shadow of the AMI mapping (doesn't like reusing rootMapping directly) BlockDeviceMapping newMapping = rootMapping.clone(); if (deleteRootOnTermination) { // Check if the root device is already in the mapping and update it for (final BlockDeviceMapping mapping : deviceMappings) { LOGGER.info("Request had " + mapping.getDeviceName()); if (rootMapping.getDeviceName().equals(mapping.getDeviceName())) { mapping.getEbs().setDeleteOnTermination(Boolean.TRUE); return; } } // pass deleteRootOnTermination to shadow of the AMI mapping newMapping.getEbs().setDeleteOnTermination(Boolean.TRUE); } newMapping.getEbs().setEncrypted(ebsEncryptRootVolume.getValue()); String message = String.format("EBS default encryption value set to: %s (%s)", ebsEncryptRootVolume.getDisplayText(), ebsEncryptRootVolume.getValue()); logProvisionInfo(message); deviceMappings.add(0, newMapping); } private List<BlockDeviceMapping> getNewEphemeralDeviceMapping(Image image) { final List<BlockDeviceMapping> oldDeviceMapping = image.getBlockDeviceMappings(); final Set<String> occupiedDevices = new HashSet<>(); for (final BlockDeviceMapping mapping : oldDeviceMapping) { occupiedDevices.add(mapping.getDeviceName()); } final List<String> available = new ArrayList<>( Arrays.asList("ephemeral0", "ephemeral1", "ephemeral2", "ephemeral3")); final List<BlockDeviceMapping> newDeviceMapping = new ArrayList<>(4); for (char suffix = 'b'; suffix <= 'z' && !available.isEmpty(); suffix++) { final String deviceName = String.format("/dev/xvd%s", suffix); if (occupiedDevices.contains(deviceName)) continue; final BlockDeviceMapping newMapping = new BlockDeviceMapping().withDeviceName(deviceName).withVirtualName( available.get(0)); newDeviceMapping.add(newMapping); available.remove(0); } return newDeviceMapping; } private void setupEphemeralDeviceMapping(Image image, List<BlockDeviceMapping> deviceMappings) { // Don't wipe out pre-existing mappings deviceMappings.addAll(getNewEphemeralDeviceMapping(image)); } @NonNull private static List<String> makeImageAttributeList(@CheckForNull String attr) { return Stream.of(Util.tokenize(Util.fixNull(attr))) .collect(Collectors.toList()); } @NonNull private DescribeImagesRequest makeDescribeImagesRequest() throws AmazonClientException { List<String> imageIds = Util.fixEmptyAndTrim(ami) == null ? Collections.emptyList() : Collections.singletonList(ami); List<String> owners = makeImageAttributeList(amiOwners); List<String> users = makeImageAttributeList(amiUsers); List<Filter> filters = EC2Filter.toFilterList(amiFilters); // Raise an exception if there were no search attributes. // This is legal but not what anyone wants - it will // launch random recently created public AMIs. int numAttrs = Stream.of(imageIds, owners, users, filters) .collect(Collectors.summingInt(List::size)); if (numAttrs == 0) { throw new AmazonClientException("Neither AMI ID nor AMI search attributes provided"); } return new DescribeImagesRequest() .withImageIds(imageIds) .withOwners(owners) .withExecutableUsers(users) .withFilters(filters); } @NonNull private Image getImage() throws AmazonClientException { DescribeImagesRequest request = makeDescribeImagesRequest(); LOGGER.info("Getting image for request " + request); List<Image> images = getParent().connect().describeImages(request).getImages(); if (images.isEmpty()) { throw new AmazonClientException("Unable to find image for request " + request); } // Sort in reverse by creation date to get latest image images.sort(Comparator.comparing(Image::getCreationDate).reversed()); return images.get(0); } private void setupCustomDeviceMapping(List<BlockDeviceMapping> deviceMappings) { if (StringUtils.isNotBlank(customDeviceMapping)) { deviceMappings.addAll(DeviceMappingParser.parse(customDeviceMapping)); } } /** * Provision a new agent for an EC2 spot instance to call back to Jenkins */ private List<EC2AbstractSlave> provisionSpot(Image image, int number, EnumSet<ProvisionOptions> provisionOptions) throws IOException { if (!spotConfig.useBidPrice) { return provisionOndemand(image, 1, provisionOptions, true, spotConfig.getFallbackToOndemand()); } AmazonEC2 ec2 = getParent().connect(); String imageId = image.getImageId(); try { LOGGER.info("Launching " + imageId + " for template " + description); KeyPair keyPair = getKeyPair(ec2); RequestSpotInstancesRequest spotRequest = new RequestSpotInstancesRequest(); // Validate spot bid before making the request if (getSpotMaxBidPrice() == null) { throw new AmazonClientException("Invalid Spot price specified: " + getSpotMaxBidPrice()); } spotRequest.setSpotPrice(getSpotMaxBidPrice()); spotRequest.setInstanceCount(number); LaunchSpecification launchSpecification = new LaunchSpecification(); launchSpecification.setImageId(imageId); launchSpecification.setInstanceType(type); launchSpecification.setEbsOptimized(ebsOptimized); launchSpecification.setMonitoringEnabled(monitoring); if (StringUtils.isNotBlank(getZone())) { SpotPlacement placement = new SpotPlacement(getZone()); launchSpecification.setPlacement(placement); } InstanceNetworkInterfaceSpecification net = new InstanceNetworkInterfaceSpecification(); String subnetId = chooseSubnetId(); if (StringUtils.isNotBlank(subnetId)) { net.setSubnetId(subnetId); /* * If we have a subnet ID then we can only use VPC security groups */ if (!securityGroupSet.isEmpty()) { List<String> groupIds = getEc2SecurityGroups(ec2); if (!groupIds.isEmpty()) { net.setGroups(groupIds); } } } else { if (!securityGroupSet.isEmpty()) { List<String> groupIds = getSecurityGroupsBy("group-name", securityGroupSet, ec2) .getSecurityGroups() .stream().map(SecurityGroup::getGroupId) .collect(Collectors.toList()); net.setGroups(groupIds); } } String userDataString = Base64.getEncoder().encodeToString(userData.getBytes(StandardCharsets.UTF_8)); launchSpecification.setUserData(userDataString); launchSpecification.setKeyName(keyPair.getKeyName()); launchSpecification.setInstanceType(type.toString()); net.setAssociatePublicIpAddress(getAssociatePublicIp()); net.setDeviceIndex(0); launchSpecification.withNetworkInterfaces(net); HashSet<Tag> instTags = buildTags(EC2Cloud.EC2_SLAVE_TYPE_SPOT); if (StringUtils.isNotBlank(getIamInstanceProfile())) { launchSpecification.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(getIamInstanceProfile())); } setupBlockDeviceMappings(image, launchSpecification.getBlockDeviceMappings()); spotRequest.setLaunchSpecification(launchSpecification); if (getSpotBlockReservationDuration() != 0) { spotRequest.setBlockDurationMinutes(getSpotBlockReservationDuration() * 60); } RequestSpotInstancesResult reqResult; try { // Make the request for a new Spot instance reqResult = ec2.requestSpotInstances(spotRequest); } catch (AmazonEC2Exception e) { if (spotConfig.getFallbackToOndemand() && e.getErrorCode().equals("MaxSpotInstanceCountExceeded")) { logProvisionInfo("There is no spot capacity available matching your request, falling back to on-demand instance."); return provisionOndemand(image, number, provisionOptions); } else { throw e; } } List<SpotInstanceRequest> reqInstances = reqResult.getSpotInstanceRequests(); if (reqInstances.isEmpty()) { throw new AmazonClientException("No spot instances found"); } List<EC2AbstractSlave> slaves = new ArrayList<>(reqInstances.size()); for(SpotInstanceRequest spotInstReq : reqInstances) { if (spotInstReq == null) { throw new AmazonClientException("Spot instance request is null"); } String slaveName = spotInstReq.getSpotInstanceRequestId(); if (spotConfig.getFallbackToOndemand()) { for (int i = 0; i < 2 && spotInstReq.getStatus().getCode().equals("pending-evaluation"); i++) { LOGGER.info("Spot request " + slaveName + " is still pending evaluation"); Thread.sleep(5000); LOGGER.info("Fetching info about spot request " + slaveName); DescribeSpotInstanceRequestsRequest describeRequest = new DescribeSpotInstanceRequestsRequest().withSpotInstanceRequestIds(slaveName); spotInstReq = ec2.describeSpotInstanceRequests(describeRequest).getSpotInstanceRequests().get(0); } List<String> spotRequestBadCodes = Arrays.asList("capacity-not-available", "capacity-oversubscribed", "price-too-low"); if (spotRequestBadCodes.contains(spotInstReq.getStatus().getCode())) { LOGGER.info("There is no spot capacity available matching your request, falling back to on-demand instance."); List<String> requestsToCancel = reqInstances.stream().map(SpotInstanceRequest::getSpotInstanceRequestId).collect(Collectors.toList()); CancelSpotInstanceRequestsRequest cancelRequest = new CancelSpotInstanceRequestsRequest(requestsToCancel); ec2.cancelSpotInstanceRequests(cancelRequest); return provisionOndemand(image, number, provisionOptions); } } // Now that we have our Spot request, we can set tags on it updateRemoteTags(ec2, instTags, "InvalidSpotInstanceRequestID.NotFound", spotInstReq.getSpotInstanceRequestId()); // That was a remote request - we should also update our local instance data spotInstReq.setTags(instTags); LOGGER.info("Spot instance id in provision: " + spotInstReq.getSpotInstanceRequestId()); slaves.add(newSpotSlave(spotInstReq)); } return slaves; } catch (FormException e) { throw new AssertionError(); // we should have discovered all // configuration issues upfront } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } private void setupBlockDeviceMappings(Image image, List<BlockDeviceMapping> blockDeviceMappings) { setupRootDevice(image, blockDeviceMappings); if (useEphemeralDevices) { setupEphemeralDeviceMapping(image, blockDeviceMappings); } else { setupCustomDeviceMapping(blockDeviceMappings); } } private HashSet<Tag> buildTags(String slaveType) { boolean hasCustomTypeTag = false; boolean hasJenkinsServerUrlTag = false; HashSet<Tag> instTags = new HashSet<>(); if (tags != null && !tags.isEmpty()) { for (EC2Tag t : tags) { instTags.add(new Tag(t.getName(), t.getValue())); if (StringUtils.equals(t.getName(), EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE)) { hasCustomTypeTag = true; } if (StringUtils.equals(t.getName(), EC2Tag.TAG_NAME_JENKINS_SERVER_URL)) { hasJenkinsServerUrlTag = true; } } } if (!hasCustomTypeTag) { instTags.add(new Tag(EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE, EC2Cloud.getSlaveTypeTagValue( slaveType, description))); } JenkinsLocationConfiguration jenkinsLocation = JenkinsLocationConfiguration.get(); if (!hasJenkinsServerUrlTag && jenkinsLocation.getUrl() != null) { instTags.add(new Tag(EC2Tag.TAG_NAME_JENKINS_SERVER_URL, jenkinsLocation.getUrl())); } return instTags; } protected EC2OndemandSlave newOndemandSlave(Instance inst) throws FormException, IOException { EC2AgentConfig.OnDemand config = new EC2AgentConfig.OnDemandBuilder() .withName(getSlaveName(inst.getInstanceId())) .withInstanceId(inst.getInstanceId()) .withDescription(description) .withRemoteFS(remoteFS) .withNumExecutors(getNumExecutors()) .withLabelString(labels) .withMode(mode) .withInitScript(initScript) .withTmpDir(tmpDir) .withNodeProperties(nodeProperties.toList()) .withRemoteAdmin(remoteAdmin) .withJvmopts(jvmopts) .withStopOnTerminate(stopOnTerminate) .withIdleTerminationMinutes(idleTerminationMinutes) .withPublicDNS(inst.getPublicDnsName()) .withPrivateDNS(inst.getPrivateDnsName()) .withTags(EC2Tag.fromAmazonTags(inst.getTags())) .withCloudName(parent.name) .withLaunchTimeout(getLaunchTimeout()) .withAmiType(amiType) .withConnectionStrategy(connectionStrategy) .withMaxTotalUses(maxTotalUses) .withTenancyAttribute(tenancy) .build(); return EC2AgentFactory.getInstance().createOnDemandAgent(config); } protected EC2SpotSlave newSpotSlave(SpotInstanceRequest sir) throws FormException, IOException { EC2AgentConfig.Spot config = new EC2AgentConfig.SpotBuilder() .withName(getSlaveName(sir.getSpotInstanceRequestId())) .withSpotInstanceRequestId(sir.getSpotInstanceRequestId()) .withDescription(description) .withRemoteFS(remoteFS) .withNumExecutors(getNumExecutors()) .withMode(mode) .withInitScript(initScript) .withTmpDir(tmpDir) .withLabelString(labels) .withNodeProperties(nodeProperties.toList()) .withRemoteAdmin(remoteAdmin) .withJvmopts(jvmopts) .withIdleTerminationMinutes(idleTerminationMinutes) .withTags(EC2Tag.fromAmazonTags(sir.getTags())) .withCloudName(parent.name) .withLaunchTimeout(getLaunchTimeout()) .withAmiType(amiType) .withConnectionStrategy(connectionStrategy) .withMaxTotalUses(maxTotalUses) .build(); return EC2AgentFactory.getInstance().createSpotAgent(config); } /** * Get a KeyPair from the configured information for the agent template */ @CheckForNull private KeyPair getKeyPair(AmazonEC2 ec2) throws IOException, AmazonClientException { EC2PrivateKey ec2PrivateKey = getParent().resolvePrivateKey(); if (ec2PrivateKey == null) { throw new AmazonClientException("No keypair credential found. Please configure a credential in the Jenkins configuration."); } KeyPair keyPair = ec2PrivateKey.find(ec2); if (keyPair == null) { throw new AmazonClientException("No matching keypair found on EC2. Is the EC2 private key a valid one?"); } return keyPair; } /** * Update the tags stored in EC2 with the specified information. Re-try 5 times if instances isn't up by * catchErrorCode - e.g. InvalidSpotInstanceRequestID.NotFound or InvalidInstanceRequestID.NotFound * * @param ec2 * @param instTags * @param catchErrorCode * @param params * @throws InterruptedException */ private void updateRemoteTags(AmazonEC2 ec2, Collection<Tag> instTags, String catchErrorCode, String... params) throws InterruptedException { for (int i = 0; i < 5; i++) { try { CreateTagsRequest tagRequest = new CreateTagsRequest(); tagRequest.withResources(params).setTags(instTags); ec2.createTags(tagRequest); break; } catch (AmazonServiceException e) { if (e.getErrorCode().equals(catchErrorCode)) { Thread.sleep(5000); continue; } LOGGER.log(Level.SEVERE, e.getErrorMessage(), e); } } } /** * Get a list of security group ids for the agent */ private List<String> getEc2SecurityGroups(AmazonEC2 ec2) throws AmazonClientException { List<String> groupIds = new ArrayList<>(); DescribeSecurityGroupsResult groupResult = getSecurityGroupsBy("group-name", securityGroupSet, ec2); if (groupResult.getSecurityGroups().size() == 0) { groupResult = getSecurityGroupsBy("group-id", securityGroupSet, ec2); } for (SecurityGroup group : groupResult.getSecurityGroups()) { if (group.getVpcId() != null && !group.getVpcId().isEmpty()) { List<Filter> filters = new ArrayList<>(); filters.add(new Filter("vpc-id").withValues(group.getVpcId())); filters.add(new Filter("state").withValues("available")); filters.add(new Filter("subnet-id").withValues(getCurrentSubnetId())); DescribeSubnetsRequest subnetReq = new DescribeSubnetsRequest(); subnetReq.withFilters(filters); DescribeSubnetsResult subnetResult = ec2.describeSubnets(subnetReq); List<Subnet> subnets = subnetResult.getSubnets(); if (subnets != null && !subnets.isEmpty()) { groupIds.add(group.getGroupId()); } } } if (securityGroupSet.size() != groupIds.size()) { throw new AmazonClientException("Security groups must all be VPC security groups to work in a VPC context"); } return groupIds; } private DescribeSecurityGroupsResult getSecurityGroupsBy(String filterName, Set<String> filterValues, AmazonEC2 ec2) { DescribeSecurityGroupsRequest groupReq = new DescribeSecurityGroupsRequest(); groupReq.withFilters(new Filter(filterName).withValues(filterValues)); return ec2.describeSecurityGroups(groupReq); } /** * Provisions a new EC2 agent based on the currently running instance on EC2, instead of starting a new one. */ public EC2AbstractSlave attach(String instanceId, TaskListener listener) throws AmazonClientException, IOException { PrintStream logger = listener.getLogger(); AmazonEC2 ec2 = getParent().connect(); try { logger.println("Attaching to " + instanceId); LOGGER.info("Attaching to " + instanceId); DescribeInstancesRequest request = new DescribeInstancesRequest(); request.setInstanceIds(Collections.singletonList(instanceId)); Instance inst = ec2.describeInstances(request).getReservations().get(0).getInstances().get(0); return newOndemandSlave(inst); } catch (FormException e) { throw new AssertionError(); // we should have discovered all // configuration issues upfront } } /** * Initializes data structure that we don't persist. */ protected Object readResolve() { Jenkins.get().checkPermission(Jenkins.ADMINISTER); labelSet = Label.parse(labels); securityGroupSet = parseSecurityGroups(); /** * In releases of this plugin prior to 1.18, template-specific instance caps could be configured but were not * enforced. As a result, it was possible to have the instance cap for a template be configured to 0 (zero) with * no ill effects. Starting with version 1.18, template-specific instance caps are enforced, so if a * configuration has a cap of zero for a template, no instances will be launched from that template. Since there * is no practical value of intentionally setting the cap to zero, this block will override such a setting to a * value that means 'no cap'. */ if (instanceCap == 0) { instanceCap = Integer.MAX_VALUE; } if (amiType == null) { amiType = new UnixData(rootCommandPrefix, slaveCommandPrefix, slaveCommandSuffix, sshPort); } // 1.43 new parameters if (connectionStrategy == null ) { connectionStrategy = ConnectionStrategy.backwardsCompatible(usePrivateDnsName, connectUsingPublicIp, associatePublicIp); } if (maxTotalUses == 0) { maxTotalUses = -1; } if (nodeProperties == null) { nodeProperties = new DescribableList<>(Saveable.NOOP); } if (tenancy == null) { tenancy = Tenancy.Default; } // migration of old value to new variable. if (useDedicatedTenancy) { tenancy = Tenancy.Dedicated; } if (ebsEncryptRootVolume == null) { ebsEncryptRootVolume = EbsEncryptRootVolume.DEFAULT; } return this; } public Descriptor<SlaveTemplate> getDescriptor() { return Jenkins.get().getDescriptor(getClass()); } public int getLaunchTimeout() { return launchTimeout <= 0 ? Integer.MAX_VALUE : launchTimeout; } public String getLaunchTimeoutStr() { if (launchTimeout == Integer.MAX_VALUE) { return ""; } else { return String.valueOf(launchTimeout); } } public boolean isWindowsSlave() { return amiType.isWindows(); } public boolean isUnixSlave() { return amiType.isUnix(); } public boolean isMacAgent() { return amiType.isMac(); } public Secret getAdminPassword() { return amiType.isWindows() ? ((WindowsData) amiType).getPassword() : Secret.fromString(""); } public boolean isUseHTTPS() { return amiType.isWindows() && ((WindowsData) amiType).isUseHTTPS(); } /** * * @param ec2 * @param allSubnets if true, uses all subnets defined for this SlaveTemplate as the filter, else will only use the current subnet * @return DescribeInstancesResult of DescribeInstanceRequst constructed from this SlaveTemplate's configs */ DescribeInstancesResult getDescribeInstanceResult(AmazonEC2 ec2, boolean allSubnets) throws IOException { HashMap<RunInstancesRequest, List<Filter>> runInstancesRequestFilterMap = makeRunInstancesRequestAndFilters(getImage(), 1, ec2, false); Map.Entry<RunInstancesRequest, List<Filter>> entry = runInstancesRequestFilterMap.entrySet().iterator().next(); List<Filter> diFilters = entry.getValue(); if (allSubnets) { /* remove any existing subnet-id filters */ List<Filter> rmvFilters = new ArrayList<>(); for (Filter f : diFilters) { if (f.getName().equals("subnet-id")) { rmvFilters.add(f); } } for (Filter f : rmvFilters) { diFilters.remove(f); } /* Add filter using all subnets defined for this SlaveTemplate */ Filter subnetFilter = new Filter("subnet-id"); subnetFilter.setValues(Arrays.asList(getSubnetId().split(" "))); diFilters.add(subnetFilter); } DescribeInstancesRequest diRequest = new DescribeInstancesRequest().withFilters(diFilters); return ec2.describeInstances(diRequest); } public boolean isAllowSelfSignedCertificate() { return amiType.isWindows() && ((WindowsData) amiType).isAllowSelfSignedCertificate(); } @Extension public static final class OnSaveListener extends SaveableListener { @Override public void onChange(Saveable o, XmlFile file) { if (o instanceof Jenkins) { MinimumInstanceChecker.checkForMinimumInstances(); } } } @Extension public static final class DescriptorImpl extends Descriptor<SlaveTemplate> { @Override public String getDisplayName() { return ""; } public List<Descriptor<AMITypeData>> getAMITypeDescriptors() { return Jenkins.get().getDescriptorList(AMITypeData.class); } /** * Since this shares much of the configuration with {@link EC2Computer}, check its help page, too. */ @Override public String getHelpFile(String fieldName) { String p = super.getHelpFile(fieldName); if (p != null) return p; Descriptor slaveDescriptor = Jenkins.get().getDescriptor(EC2OndemandSlave.class); if (slaveDescriptor != null) { p = slaveDescriptor.getHelpFile(fieldName); if (p != null) return p; } slaveDescriptor = Jenkins.get().getDescriptor(EC2SpotSlave.class); if (slaveDescriptor != null) return slaveDescriptor.getHelpFile(fieldName); return null; } @Restricted(NoExternalUse.class) public FormValidation doCheckDescription(@QueryParameter String value) { try { Jenkins.checkGoodName(value); return FormValidation.ok(); } catch (Failure e) { return FormValidation.error(e.getMessage()); } } @Restricted(NoExternalUse.class) public FormValidation doCheckRemoteAdmin(@QueryParameter String value){ if(StringUtils.isBlank(value) || Jenkins.get().hasPermission(Jenkins.ADMINISTER)){ return FormValidation.ok(); }else{ return FormValidation.error(Messages.General_MissingPermission()); } } @Restricted(NoExternalUse.class) public FormValidation doCheckTmpDir(@QueryParameter String value){ if(StringUtils.isBlank(value) || Jenkins.get().hasPermission(Jenkins.ADMINISTER)){ return FormValidation.ok(); } else { return FormValidation.error(Messages.General_MissingPermission()); } } @Restricted(NoExternalUse.class) public FormValidation doCheckJvmopts(@QueryParameter String value){ if(StringUtils.isBlank(value) || Jenkins.get().hasPermission(Jenkins.ADMINISTER)){ return FormValidation.ok(); } else { return FormValidation.error(Messages.General_MissingPermission()); } } /*** * Check that the AMI requested is available in the cloud and can be used. */ @RequirePOST public FormValidation doValidateAmi(@QueryParameter boolean useInstanceProfileForCredentials, @QueryParameter String credentialsId, @QueryParameter String ec2endpoint, @QueryParameter String region, final @QueryParameter String ami, @QueryParameter String roleArn, @QueryParameter String roleSessionName) throws IOException { checkPermission(EC2Cloud.PROVISION); AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, credentialsId, roleArn, roleSessionName, region); AmazonEC2 ec2; if (region != null) { ec2 = AmazonEC2Factory.getInstance().connect(credentialsProvider, AmazonEC2Cloud.getEc2EndpointUrl(region)); } else { ec2 = AmazonEC2Factory.getInstance().connect(credentialsProvider, new URL(ec2endpoint)); } try { Image img = CloudHelper.getAmiImage(ec2, ami); if (img == null) { return FormValidation.error("No such AMI, or not usable with this accessId: " + ami); } String ownerAlias = img.getImageOwnerAlias(); return FormValidation.ok(img.getImageLocation() + (ownerAlias != null ? " by " + ownerAlias : "")); } catch (AmazonClientException e) { return FormValidation.error(e.getMessage()); } } private void checkPermission(Permission p) { final EC2Cloud ancestorObject = Stapler.getCurrentRequest().findAncestorObject(EC2Cloud.class); if (ancestorObject != null) { ancestorObject.checkPermission(p); } else { Jenkins.get().checkPermission(p); } } public FormValidation doCheckLabelString(@QueryParameter String value, @QueryParameter Node.Mode mode) { if (mode == Node.Mode.EXCLUSIVE && (value == null || value.trim().isEmpty())) { return FormValidation.warning("You may want to assign labels to this node;" + " it's marked to only run jobs that are exclusively tied to itself or a label."); } return FormValidation.ok(); } public FormValidation doCheckIdleTerminationMinutes(@QueryParameter String value) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= -59) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("Idle Termination time must be a greater than -59 (or null)"); } public FormValidation doCheckMaxTotalUses(@QueryParameter String value) { try { int val = Integer.parseInt(value); if (val >= -1) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("Maximum Total Uses must be greater or equal to -1"); } public FormValidation doCheckMinimumNumberOfInstances(@QueryParameter String value, @QueryParameter String instanceCapStr) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= 0) { int instanceCap; try { instanceCap = Integer.parseInt(instanceCapStr); } catch (NumberFormatException ignore) { instanceCap = Integer.MAX_VALUE; } if (val > instanceCap) { return FormValidation .error("Minimum number of instances must not be larger than AMI Instance Cap %d", instanceCap); } return FormValidation.ok(); } } catch (NumberFormatException ignore) { } return FormValidation.error("Minimum number of instances must be a non-negative integer (or null)"); } public FormValidation doCheckMinimumNoInstancesActiveTimeRangeFrom(@QueryParameter String value) { try { MinimumNumberOfInstancesTimeRangeConfig.validateLocalTimeString(value); return FormValidation.ok(); } catch (IllegalArgumentException e) { return FormValidation.error("Please enter value in format 'h:mm a' or 'HH:mm'"); } } public FormValidation doCheckMinimumNoInstancesActiveTimeRangeTo(@QueryParameter String value) { try { MinimumNumberOfInstancesTimeRangeConfig.validateLocalTimeString(value); return FormValidation.ok(); } catch (IllegalArgumentException e) { return FormValidation.error("Please enter value in format 'h:mm a' or 'HH:mm'"); } } // For some reason, all days will validate against this method so no need to repeat for each day. public FormValidation doCheckMonday(@QueryParameter boolean monday, @QueryParameter boolean tuesday, @QueryParameter boolean wednesday, @QueryParameter boolean thursday, @QueryParameter boolean friday, @QueryParameter boolean saturday, @QueryParameter boolean sunday) { if (!(monday || tuesday || wednesday || thursday || friday || saturday || sunday)) { return FormValidation.warning("At least one day should be checked or minimum number of instances won't be active"); } return FormValidation.ok(); } public FormValidation doCheckMinimumNumberOfSpareInstances(@QueryParameter String value, @QueryParameter String instanceCapStr) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= 0) { int instanceCap; try { instanceCap = Integer.parseInt(instanceCapStr); } catch (NumberFormatException ignore) { instanceCap = Integer.MAX_VALUE; } if (val > instanceCap) { return FormValidation .error("Minimum number of spare instances must not be larger than AMI Instance Cap %d", instanceCap); } return FormValidation.ok(); } } catch (NumberFormatException ignore) { } return FormValidation.error("Minimum number of spare instances must be a non-negative integer (or null)"); } public FormValidation doCheckInstanceCapStr(@QueryParameter String value) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val > 0) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("InstanceCap must be a non-negative integer (or null)"); } /* * Validate the Spot Block Duration to be between 0 & 6 hours as specified in the AWS API */ public FormValidation doCheckSpotBlockReservationDurationStr(@QueryParameter String value) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= 0 && val <= 6) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("Spot Block Reservation Duration must be an integer between 0 & 6"); } public FormValidation doCheckLaunchTimeoutStr(@QueryParameter String value) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= 0) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("Launch Timeout must be a non-negative integer (or null)"); } @RequirePOST public ListBoxModel doFillZoneItems(@QueryParameter boolean useInstanceProfileForCredentials, @QueryParameter String credentialsId, @QueryParameter String region, @QueryParameter String roleArn, @QueryParameter String roleSessionName) throws IOException, ServletException { checkPermission(EC2Cloud.PROVISION); AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, credentialsId, roleArn, roleSessionName, region); return EC2AbstractSlave.fillZoneItems(credentialsProvider, region); } public String getDefaultTenancy() { // new templates default to the most secure strategy return Tenancy.Default.name(); } /* * Validate the Spot Max Bid Price to ensure that it is a floating point number >= .001 */ public FormValidation doCheckSpotMaxBidPrice(@QueryParameter String spotMaxBidPrice) { if (SpotConfiguration.normalizeBid(spotMaxBidPrice) != null) { return FormValidation.ok(); } return FormValidation.error("Not a correct bid price"); } public String getDefaultConnectionStrategy() { return ConnectionStrategy.PRIVATE_IP.name(); } public List<NodePropertyDescriptor> getNodePropertyDescriptors() { return NodePropertyDescriptor.for_(NodeProperty.all(), EC2AbstractSlave.class); } public ListBoxModel doFillConnectionStrategyItems(@QueryParameter String connectionStrategy) { return Stream.of(ConnectionStrategy.values()) .map(v -> { if (v.name().equals(connectionStrategy)) { return new ListBoxModel.Option(v.toString(), v.name(), true); } else { return new ListBoxModel.Option(v.toString(), v.name(), false); } }) .collect(Collectors.toCollection(ListBoxModel::new)); } public FormValidation doCheckConnectionStrategy(@QueryParameter String connectionStrategy) { return Stream.of(ConnectionStrategy.values()) .filter(v -> v.name().equals(connectionStrategy)) .findFirst() .map(s -> FormValidation.ok()) .orElse(FormValidation.error("Could not find selected connection strategy")); } public String getDefaultHostKeyVerificationStrategy() { // new templates default to the most secure strategy return HostKeyVerificationStrategyEnum.CHECK_NEW_HARD.name(); } public ListBoxModel doFillHostKeyVerificationStrategyItems(@QueryParameter String hostKeyVerificationStrategy) { return Stream.of(HostKeyVerificationStrategyEnum.values()) .map(v -> { if (v.name().equals(hostKeyVerificationStrategy)) { return new ListBoxModel.Option(v.getDisplayText(), v.name(), true); } else { return new ListBoxModel.Option(v.getDisplayText(), v.name(), false); } }) .collect(Collectors.toCollection(ListBoxModel::new)); } public FormValidation doCheckHostKeyVerificationStrategy(@QueryParameter String hostKeyVerificationStrategy) { Stream<HostKeyVerificationStrategyEnum> stream = Stream.of(HostKeyVerificationStrategyEnum.values()); Stream<HostKeyVerificationStrategyEnum> filteredStream = stream.filter(v -> v.name().equals(hostKeyVerificationStrategy)); Optional<HostKeyVerificationStrategyEnum> matched = filteredStream.findFirst(); Optional<FormValidation> okResult = matched.map(s -> FormValidation.ok()); return okResult.orElse(FormValidation.error(String.format("Could not find selected host key verification (%s)", hostKeyVerificationStrategy))); } public ListBoxModel doFillTenancyItems(@QueryParameter String tenancy) { return Stream.of(Tenancy.values()) .map(v -> { if (v.name().equals(tenancy)) { return new ListBoxModel.Option(v.name(), v.name(), true); } else { return new ListBoxModel.Option(v.name(), v.name(), false); } }) .collect(Collectors.toCollection(ListBoxModel::new)); } public String getDefaultEbsEncryptRootVolume() { return EbsEncryptRootVolume.DEFAULT.getDisplayText(); } public ListBoxModel doFillEbsEncryptRootVolumeItems(@QueryParameter String ebsEncryptRootVolume ) { return Stream.of(EbsEncryptRootVolume.values()) .map(v -> { if (v.name().equals(ebsEncryptRootVolume)) { return new ListBoxModel.Option(v.getDisplayText(), v.name(), true); } else { return new ListBoxModel.Option(v.getDisplayText(), v.name(), false); } }) .collect(Collectors.toCollection(ListBoxModel::new)); } public FormValidation doEbsEncryptRootVolume(@QueryParameter String ebsEncryptRootVolume) { Stream<EbsEncryptRootVolume> stream = Stream.of(EbsEncryptRootVolume.values()); Stream<EbsEncryptRootVolume> filteredStream = stream.filter(v -> v.name().equals(ebsEncryptRootVolume)); Optional<EbsEncryptRootVolume> matched = filteredStream.findFirst(); Optional<FormValidation> okResult = matched.map(s -> FormValidation.ok()); return okResult.orElse(FormValidation.error(String.format("Could not find selected option (%s)", ebsEncryptRootVolume))); } } }
src/main/java/hudson/plugins/ec2/SlaveTemplate.java
/* * The MIT License * * Copyright (c) 2004-, Kohsuke Kawaguchi, Sun Microsystems, Inc., and a number of other of contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated * documentation files (the "Software"), to deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the * Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE * WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package hudson.plugins.ec2; import com.amazonaws.AmazonClientException; import com.amazonaws.AmazonServiceException; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.services.ec2.AmazonEC2; import com.amazonaws.services.ec2.model.AmazonEC2Exception; import com.amazonaws.services.ec2.model.BlockDeviceMapping; import com.amazonaws.services.ec2.model.CancelSpotInstanceRequestsRequest; import com.amazonaws.services.ec2.model.CreateTagsRequest; import com.amazonaws.services.ec2.model.CreditSpecificationRequest; import com.amazonaws.services.ec2.model.DescribeImagesRequest; import com.amazonaws.services.ec2.model.DescribeInstancesRequest; import com.amazonaws.services.ec2.model.DescribeInstancesResult; import com.amazonaws.services.ec2.model.DescribeSecurityGroupsRequest; import com.amazonaws.services.ec2.model.DescribeSecurityGroupsResult; import com.amazonaws.services.ec2.model.DescribeSpotInstanceRequestsRequest; import com.amazonaws.services.ec2.model.DescribeSubnetsRequest; import com.amazonaws.services.ec2.model.DescribeSubnetsResult; import com.amazonaws.services.ec2.model.Filter; import com.amazonaws.services.ec2.model.IamInstanceProfileSpecification; import com.amazonaws.services.ec2.model.Image; import com.amazonaws.services.ec2.model.Instance; import com.amazonaws.services.ec2.model.InstanceMarketOptionsRequest; import com.amazonaws.services.ec2.model.InstanceNetworkInterfaceSpecification; import com.amazonaws.services.ec2.model.InstanceStateName; import com.amazonaws.services.ec2.model.InstanceType; import com.amazonaws.services.ec2.model.KeyPair; import com.amazonaws.services.ec2.model.LaunchSpecification; import com.amazonaws.services.ec2.model.MarketType; import com.amazonaws.services.ec2.model.Placement; import com.amazonaws.services.ec2.model.RequestSpotInstancesRequest; import com.amazonaws.services.ec2.model.RequestSpotInstancesResult; import com.amazonaws.services.ec2.model.Reservation; import com.amazonaws.services.ec2.model.ResourceType; import com.amazonaws.services.ec2.model.RunInstancesRequest; import com.amazonaws.services.ec2.model.SecurityGroup; import com.amazonaws.services.ec2.model.ShutdownBehavior; import com.amazonaws.services.ec2.model.SpotInstanceRequest; import com.amazonaws.services.ec2.model.SpotMarketOptions; import com.amazonaws.services.ec2.model.SpotPlacement; import com.amazonaws.services.ec2.model.StartInstancesRequest; import com.amazonaws.services.ec2.model.StartInstancesResult; import com.amazonaws.services.ec2.model.Subnet; import com.amazonaws.services.ec2.model.Tag; import com.amazonaws.services.ec2.model.TagSpecification; import edu.umd.cs.findbugs.annotations.NonNull; import hudson.Extension; import hudson.Util; import hudson.XmlFile; import hudson.model.Describable; import hudson.model.Descriptor; import hudson.model.Descriptor.FormException; import hudson.model.Failure; import hudson.model.Hudson; import hudson.model.Label; import hudson.model.Node; import hudson.model.Saveable; import hudson.model.TaskListener; import hudson.model.labels.LabelAtom; import hudson.model.listeners.SaveableListener; import hudson.plugins.ec2.util.AmazonEC2Factory; import hudson.plugins.ec2.util.DeviceMappingParser; import hudson.plugins.ec2.util.EC2AgentConfig; import hudson.plugins.ec2.util.EC2AgentFactory; import hudson.plugins.ec2.util.MinimumInstanceChecker; import hudson.plugins.ec2.util.MinimumNumberOfInstancesTimeRangeConfig; import hudson.security.Permission; import hudson.slaves.NodeProperty; import hudson.slaves.NodePropertyDescriptor; import hudson.util.DescribableList; import hudson.util.FormValidation; import hudson.util.ListBoxModel; import hudson.util.Secret; import jenkins.model.Jenkins; import jenkins.model.JenkinsLocationConfiguration; import jenkins.slaves.iterators.api.NodeIterator; import org.apache.commons.lang.StringUtils; import org.kohsuke.accmod.Restricted; import org.kohsuke.accmod.restrictions.NoExternalUse; import org.kohsuke.stapler.DataBoundConstructor; import org.kohsuke.stapler.DataBoundSetter; import org.kohsuke.stapler.QueryParameter; import org.kohsuke.stapler.Stapler; import org.kohsuke.stapler.interceptor.RequirePOST; import javax.annotation.CheckForNull; import javax.servlet.ServletException; import java.io.IOException; import java.io.PrintStream; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import java.util.stream.Collectors; import java.util.stream.Stream; /** * Template of {@link EC2AbstractSlave} to launch. * * @author Kohsuke Kawaguchi */ public class SlaveTemplate implements Describable<SlaveTemplate> { private static final Logger LOGGER = Logger.getLogger(SlaveTemplate.class.getName()); public String ami; public final String description; public final String zone; public final SpotConfiguration spotConfig; public final String securityGroups; public final String remoteFS; public final InstanceType type; public final boolean ebsOptimized; public final boolean monitoring; public final boolean t2Unlimited; public final String labels; public final Node.Mode mode; public final String initScript; public final String tmpDir; public final String userData; public final String numExecutors; public final String remoteAdmin; public final String jvmopts; public final String subnetId; public final String idleTerminationMinutes; public final String iamInstanceProfile; public final boolean deleteRootOnTermination; public final boolean useEphemeralDevices; public final String customDeviceMapping; public int instanceCap; private final int minimumNumberOfInstances; private MinimumNumberOfInstancesTimeRangeConfig minimumNumberOfInstancesTimeRangeConfig; private final int minimumNumberOfSpareInstances; public final boolean stopOnTerminate; private final List<EC2Tag> tags; public ConnectionStrategy connectionStrategy; public HostKeyVerificationStrategyEnum hostKeyVerificationStrategy; public final boolean associatePublicIp; protected transient EC2Cloud parent; public AMITypeData amiType; public int launchTimeout; public boolean connectBySSHProcess; public int maxTotalUses; private /* lazily initialized */ DescribableList<NodeProperty<?>, NodePropertyDescriptor> nodeProperties; public int nextSubnet; public String currentSubnetId; public Tenancy tenancy; public EbsEncryptRootVolume ebsEncryptRootVolume; private transient/* almost final */ Set<LabelAtom> labelSet; private transient/* almost final */Set<String> securityGroupSet; /* FIXME: Ideally these would be List<String>, but Jenkins currently * doesn't offer a usable way to represent those in forms. Instead * the values are interpreted as a comma separated list. * * https://issues.jenkins.io/browse/JENKINS-27901 */ @CheckForNull private String amiOwners; @CheckForNull private String amiUsers; @CheckForNull private List<EC2Filter> amiFilters; /* * Necessary to handle reading from old configurations. The UnixData object is created in readResolve() */ @Deprecated public transient String sshPort; @Deprecated public transient String rootCommandPrefix; @Deprecated public transient String slaveCommandPrefix; @Deprecated public transient String slaveCommandSuffix; @Deprecated public boolean usePrivateDnsName; @Deprecated public boolean connectUsingPublicIp; @Deprecated public transient boolean useDedicatedTenancy; @DataBoundConstructor public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, int minimumNumberOfSpareInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses, List<? extends NodeProperty<?>> nodeProperties, HostKeyVerificationStrategyEnum hostKeyVerificationStrategy, Tenancy tenancy, EbsEncryptRootVolume ebsEncryptRootVolume) { if(StringUtils.isNotBlank(remoteAdmin) || StringUtils.isNotBlank(jvmopts) || StringUtils.isNotBlank(tmpDir)){ LOGGER.log(Level.FINE, "As remoteAdmin, jvmopts or tmpDir is not blank, we must ensure the user has ADMINISTER rights."); // Can be null during tests Jenkins j = Jenkins.getInstanceOrNull(); if (j != null) j.checkPermission(Jenkins.ADMINISTER); } this.ami = ami; this.zone = zone; this.spotConfig = spotConfig; this.securityGroups = securityGroups; this.remoteFS = remoteFS; this.amiType = amiType; this.type = type; this.ebsOptimized = ebsOptimized; this.labels = Util.fixNull(labelString); this.mode = mode != null ? mode : Node.Mode.NORMAL; this.description = description; this.initScript = initScript; this.tmpDir = tmpDir; this.userData = StringUtils.trimToEmpty(userData); this.numExecutors = Util.fixNull(numExecutors).trim(); this.remoteAdmin = remoteAdmin; this.jvmopts = jvmopts; this.stopOnTerminate = stopOnTerminate; this.subnetId = subnetId; this.tags = tags; this.idleTerminationMinutes = idleTerminationMinutes; this.associatePublicIp = associatePublicIp; this.connectionStrategy = connectionStrategy == null ? ConnectionStrategy.PRIVATE_IP : connectionStrategy; this.useDedicatedTenancy = tenancy == Tenancy.Dedicated; this.connectBySSHProcess = connectBySSHProcess; this.maxTotalUses = maxTotalUses; this.nodeProperties = new DescribableList<>(Saveable.NOOP, Util.fixNull(nodeProperties)); this.monitoring = monitoring; this.nextSubnet = 0; this.usePrivateDnsName = this.connectionStrategy.equals(ConnectionStrategy.PRIVATE_DNS); this.connectUsingPublicIp = this.connectionStrategy.equals(ConnectionStrategy.PUBLIC_IP); this.minimumNumberOfInstances = minimumNumberOfInstances; this.minimumNumberOfSpareInstances = minimumNumberOfSpareInstances; if (null == instanceCapStr || instanceCapStr.isEmpty()) { this.instanceCap = Integer.MAX_VALUE; } else { this.instanceCap = Integer.parseInt(instanceCapStr); } try { this.launchTimeout = Integer.parseInt(launchTimeoutStr); } catch (NumberFormatException nfe) { this.launchTimeout = Integer.MAX_VALUE; } this.iamInstanceProfile = iamInstanceProfile; this.deleteRootOnTermination = deleteRootOnTermination; this.useEphemeralDevices = useEphemeralDevices; this.customDeviceMapping = customDeviceMapping; this.t2Unlimited = t2Unlimited; this.hostKeyVerificationStrategy = hostKeyVerificationStrategy != null ? hostKeyVerificationStrategy : HostKeyVerificationStrategyEnum.CHECK_NEW_SOFT; this.tenancy = tenancy != null ? tenancy : Tenancy.Default; this.ebsEncryptRootVolume = ebsEncryptRootVolume != null ? ebsEncryptRootVolume : EbsEncryptRootVolume.DEFAULT; readResolve(); // initialize } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, int minimumNumberOfSpareInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses, List<? extends NodeProperty<?>> nodeProperties, HostKeyVerificationStrategyEnum hostKeyVerificationStrategy, Tenancy tenancy) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, minimumNumberOfSpareInstances, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, nodeProperties, hostKeyVerificationStrategy, tenancy, null); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, int minimumNumberOfSpareInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses, List<? extends NodeProperty<?>> nodeProperties, HostKeyVerificationStrategyEnum hostKeyVerificationStrategy) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, minimumNumberOfSpareInstances, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, nodeProperties, hostKeyVerificationStrategy, Tenancy.backwardsCompatible(useDedicatedTenancy)); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, int minimumNumberOfSpareInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses, List<? extends NodeProperty<?>> nodeProperties) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, minimumNumberOfSpareInstances, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, nodeProperties, null); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses,List<? extends NodeProperty<?>> nodeProperties ) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, 0, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, nodeProperties); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, int minimumNumberOfInstances, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, minimumNumberOfInstances, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses, Collections.emptyList()); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean monitoring, boolean t2Unlimited, ConnectionStrategy connectionStrategy, int maxTotalUses) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, 0, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, connectionStrategy, maxTotalUses); } @Deprecated public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean connectUsingPublicIp, boolean monitoring, boolean t2Unlimited) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, monitoring, t2Unlimited, ConnectionStrategy.backwardsCompatible(usePrivateDnsName, connectUsingPublicIp, associatePublicIp), -1); } public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean deleteRootOnTermination, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess, boolean connectUsingPublicIp) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, deleteRootOnTermination, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, connectUsingPublicIp, false, false); } public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping, boolean connectBySSHProcess) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, false, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, connectBySSHProcess, false); } public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, AMITypeData amiType, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, boolean useDedicatedTenancy, String launchTimeoutStr, boolean associatePublicIp, String customDeviceMapping) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, amiType, jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, useEphemeralDevices, useDedicatedTenancy, launchTimeoutStr, associatePublicIp, customDeviceMapping, false); } /** * Backward compatible constructor for reloading previous version data */ public SlaveTemplate(String ami, String zone, SpotConfiguration spotConfig, String securityGroups, String remoteFS, String sshPort, InstanceType type, boolean ebsOptimized, String labelString, Node.Mode mode, String description, String initScript, String tmpDir, String userData, String numExecutors, String remoteAdmin, String rootCommandPrefix, String slaveCommandPrefix, String slaveCommandSuffix, String jvmopts, boolean stopOnTerminate, String subnetId, List<EC2Tag> tags, String idleTerminationMinutes, boolean usePrivateDnsName, String instanceCapStr, String iamInstanceProfile, boolean useEphemeralDevices, String launchTimeoutStr) { this(ami, zone, spotConfig, securityGroups, remoteFS, type, ebsOptimized, labelString, mode, description, initScript, tmpDir, userData, numExecutors, remoteAdmin, new UnixData(rootCommandPrefix, slaveCommandPrefix, slaveCommandSuffix, sshPort), jvmopts, stopOnTerminate, subnetId, tags, idleTerminationMinutes, usePrivateDnsName, instanceCapStr, iamInstanceProfile, useEphemeralDevices, false, launchTimeoutStr, false, null); } public boolean isConnectBySSHProcess() { // See // src/main/resources/hudson/plugins/ec2/SlaveTemplate/help-connectBySSHProcess.html return connectBySSHProcess; } public EC2Cloud getParent() { return parent; } public String getLabelString() { return labels; } public Node.Mode getMode() { return mode; } public String getDisplayName() { return String.format("EC2 (%s) - %s", parent.getDisplayName(), description); } public String getSlaveName(String instanceId) { final String agentName = String.format("%s (%s)", getDisplayName(), instanceId); try { Jenkins.checkGoodName(agentName); return agentName; } catch (Failure e) { return instanceId; } } String getZone() { return zone; } public String getSecurityGroupString() { return securityGroups; } public Set<String> getSecurityGroupSet() { return securityGroupSet; } public Set<String> parseSecurityGroups() { if (securityGroups == null || "".equals(securityGroups.trim())) { return Collections.emptySet(); } else { return new HashSet<String>(Arrays.asList(securityGroups.split("\\s*,\\s*"))); } } public int getNumExecutors() { try { return Integer.parseInt(numExecutors); } catch (NumberFormatException e) { return EC2AbstractSlave.toNumExecutors(type); } } public int getSshPort() { try { String sshPort = ""; if (amiType.isUnix()) { sshPort = ((UnixData) amiType).getSshPort(); } if (amiType.isMac()) { sshPort = ((MacData) amiType).getSshPort(); } return Integer.parseInt(sshPort); } catch (NumberFormatException e) { return 22; } } public String getRemoteAdmin() { return remoteAdmin; } public String getRootCommandPrefix() { return (amiType.isUnix() ? ((UnixData) amiType).getRootCommandPrefix() : (amiType.isMac() ? ((MacData) amiType).getRootCommandPrefix():"")); } public String getSlaveCommandPrefix() { return (amiType.isUnix() ? ((UnixData) amiType).getSlaveCommandPrefix() : (amiType.isMac() ? ((MacData) amiType).getSlaveCommandPrefix() : "")); } public String getSlaveCommandSuffix() { return (amiType.isUnix() ? ((UnixData) amiType).getSlaveCommandSuffix() : (amiType.isMac() ? ((MacData) amiType).getSlaveCommandSuffix() : "")); } public String chooseSubnetId() { if (StringUtils.isBlank(subnetId)) { return null; } else { String[] subnetIdList= getSubnetId().split(" "); // Round-robin subnet selection. currentSubnetId = subnetIdList[nextSubnet]; nextSubnet = (nextSubnet + 1) % subnetIdList.length; return currentSubnetId; } } public String chooseSubnetId(boolean rotateSubnet) { if (rotateSubnet) { return chooseSubnetId(); } else { return this.currentSubnetId; } } public String getSubnetId() { return subnetId; } public String getCurrentSubnetId() { return currentSubnetId; } public boolean getAssociatePublicIp() { return associatePublicIp; } @Deprecated @DataBoundSetter public void setConnectUsingPublicIp(boolean connectUsingPublicIp) { this.connectUsingPublicIp = connectUsingPublicIp; this.connectionStrategy = ConnectionStrategy.backwardsCompatible(this.usePrivateDnsName, this.connectUsingPublicIp, this.associatePublicIp); } @Deprecated @DataBoundSetter public void setUsePrivateDnsName(boolean usePrivateDnsName) { this.usePrivateDnsName = usePrivateDnsName; this.connectionStrategy = ConnectionStrategy.backwardsCompatible(this.usePrivateDnsName, this.connectUsingPublicIp, this.associatePublicIp); } @Deprecated public boolean getUsePrivateDnsName() { return usePrivateDnsName; } @Deprecated public boolean isConnectUsingPublicIp() { return connectUsingPublicIp; } public List<EC2Tag> getTags() { if (null == tags) return null; return Collections.unmodifiableList(tags); } public String getidleTerminationMinutes() { return idleTerminationMinutes; } public Set<LabelAtom> getLabelSet() { return labelSet; } public String getAmi() { return ami; } public void setAmi(String ami) { this.ami = ami; } public AMITypeData getAmiType() { return amiType; } public void setAmiType(AMITypeData amiType) { this.amiType = amiType; } public int getMinimumNumberOfInstances() { return minimumNumberOfInstances; } public int getMinimumNumberOfSpareInstances() { return minimumNumberOfSpareInstances; } public MinimumNumberOfInstancesTimeRangeConfig getMinimumNumberOfInstancesTimeRangeConfig() { return minimumNumberOfInstancesTimeRangeConfig; } @DataBoundSetter public void setMinimumNumberOfInstancesTimeRangeConfig(MinimumNumberOfInstancesTimeRangeConfig minimumNumberOfInstancesTimeRangeConfig) { this.minimumNumberOfInstancesTimeRangeConfig = minimumNumberOfInstancesTimeRangeConfig; } public int getInstanceCap() { return instanceCap; } public int getSpotBlockReservationDuration() { if (spotConfig == null) return 0; return spotConfig.getSpotBlockReservationDuration(); } public String getSpotBlockReservationDurationStr() { if (spotConfig == null) { return ""; } else { int dur = getSpotBlockReservationDuration(); if (dur == 0) return ""; return String.valueOf(getSpotBlockReservationDuration()); } } public String getInstanceCapStr() { if (instanceCap == Integer.MAX_VALUE) { return ""; } else { return String.valueOf(instanceCap); } } public String getSpotMaxBidPrice() { if (spotConfig == null) return null; return SpotConfiguration.normalizeBid(spotConfig.getSpotMaxBidPrice()); } public String getIamInstanceProfile() { return iamInstanceProfile; } @DataBoundSetter public void setHostKeyVerificationStrategy(HostKeyVerificationStrategyEnum hostKeyVerificationStrategy) { this.hostKeyVerificationStrategy = (hostKeyVerificationStrategy != null) ? hostKeyVerificationStrategy : HostKeyVerificationStrategyEnum.CHECK_NEW_SOFT; } @NonNull public HostKeyVerificationStrategyEnum getHostKeyVerificationStrategy() { return hostKeyVerificationStrategy != null ? hostKeyVerificationStrategy : HostKeyVerificationStrategyEnum.CHECK_NEW_SOFT; } @CheckForNull public String getAmiOwners() { return amiOwners; } @DataBoundSetter public void setAmiOwners(String amiOwners) { this.amiOwners = amiOwners; } @CheckForNull public String getAmiUsers() { return amiUsers; } @DataBoundSetter public void setAmiUsers(String amiUsers) { this.amiUsers = amiUsers; } @CheckForNull public List<EC2Filter> getAmiFilters() { return amiFilters; } @DataBoundSetter public void setAmiFilters(List<EC2Filter> amiFilters) { this.amiFilters = amiFilters; } @Override public String toString() { return "SlaveTemplate{" + "description='" + description + '\'' + ", labels='" + labels + '\'' + '}'; } public int getMaxTotalUses() { return maxTotalUses; } public Tenancy getTenancyAttribute() { return tenancy; } public DescribableList<NodeProperty<?>, NodePropertyDescriptor> getNodeProperties() { return Objects.requireNonNull(nodeProperties); } public enum ProvisionOptions { ALLOW_CREATE, FORCE_CREATE } /** * Provisions a new EC2 agent or starts a previously stopped on-demand instance. * * @return always non-null. This needs to be then added to {@link Hudson#addNode(Node)}. */ @NonNull public List<EC2AbstractSlave> provision(int number, EnumSet<ProvisionOptions> provisionOptions) throws AmazonClientException, IOException { final Image image = getImage(); if (this.spotConfig != null) { if (provisionOptions.contains(ProvisionOptions.ALLOW_CREATE) || provisionOptions.contains(ProvisionOptions.FORCE_CREATE)) return provisionSpot(image, number, provisionOptions); return Collections.emptyList(); } return provisionOndemand(image, number, provisionOptions); } /** * Safely we can pickup only instance that is not known by Jenkins at all. */ private boolean checkInstance(Instance instance) { for (EC2AbstractSlave node : NodeIterator.nodes(EC2AbstractSlave.class)) { if ( (node.getInstanceId().equals(instance.getInstanceId())) && (! (instance.getState().getName().equalsIgnoreCase(InstanceStateName.Stopped.toString()) )) ){ logInstanceCheck(instance, ". false - found existing corresponding Jenkins agent: " + node.getInstanceId()); return false; } } logInstanceCheck(instance, " true - Instance is not connected to Jenkins"); return true; } private void logInstanceCheck(Instance instance, String message) { logProvisionInfo("checkInstance: " + instance.getInstanceId() + "." + message); } private boolean isSameIamInstanceProfile(Instance instance) { return StringUtils.isBlank(getIamInstanceProfile()) || (instance.getIamInstanceProfile() != null && instance.getIamInstanceProfile().getArn().equals(getIamInstanceProfile())); } private boolean isTerminatingOrShuttindDown(String instanceStateName) { return instanceStateName.equalsIgnoreCase(InstanceStateName.Terminated.toString()) || instanceStateName.equalsIgnoreCase(InstanceStateName.ShuttingDown.toString()); } private void logProvisionInfo(String message) { LOGGER.info(this + ". " + message); } HashMap<RunInstancesRequest, List<Filter>> makeRunInstancesRequestAndFilters(Image image, int number, AmazonEC2 ec2) throws IOException { return makeRunInstancesRequestAndFilters(image, number, ec2, true); } @Deprecated HashMap<RunInstancesRequest, List<Filter>> makeRunInstancesRequestAndFilters(int number, AmazonEC2 ec2) throws IOException { return makeRunInstancesRequestAndFilters(getImage(), number, ec2); } HashMap<RunInstancesRequest, List<Filter>> makeRunInstancesRequestAndFilters(Image image, int number, AmazonEC2 ec2, boolean rotateSubnet) throws IOException { String imageId = image.getImageId(); RunInstancesRequest riRequest = new RunInstancesRequest(imageId, 1, number).withInstanceType(type); riRequest.setEbsOptimized(ebsOptimized); riRequest.setMonitoring(monitoring); if (t2Unlimited){ CreditSpecificationRequest creditRequest = new CreditSpecificationRequest(); creditRequest.setCpuCredits("unlimited"); riRequest.setCreditSpecification(creditRequest); } setupBlockDeviceMappings(image, riRequest.getBlockDeviceMappings()); if(stopOnTerminate){ riRequest.setInstanceInitiatedShutdownBehavior(ShutdownBehavior.Stop); logProvisionInfo("Setting Instance Initiated Shutdown Behavior : ShutdownBehavior.Stop"); }else{ riRequest.setInstanceInitiatedShutdownBehavior(ShutdownBehavior.Terminate); logProvisionInfo("Setting Instance Initiated Shutdown Behavior : ShutdownBehavior.Terminate"); } List<Filter> diFilters = new ArrayList<>(); diFilters.add(new Filter("image-id").withValues(imageId)); diFilters.add(new Filter("instance-type").withValues(type.toString())); KeyPair keyPair = getKeyPair(ec2); if (keyPair == null){ logProvisionInfo("Could not retrieve a valid key pair."); return null; } riRequest.setUserData(Base64.getEncoder().encodeToString(userData.getBytes(StandardCharsets.UTF_8))); riRequest.setKeyName(keyPair.getKeyName()); diFilters.add(new Filter("key-name").withValues(keyPair.getKeyName())); if (StringUtils.isNotBlank(getZone())) { Placement placement = new Placement(getZone()); if (getTenancyAttribute().equals(Tenancy.Dedicated)) { placement.setTenancy("dedicated"); } riRequest.setPlacement(placement); diFilters.add(new Filter("availability-zone").withValues(getZone())); } if(getTenancyAttribute().equals(Tenancy.Host)){ Placement placement = new Placement(); placement.setTenancy("host"); riRequest.setPlacement(placement); diFilters.add(new Filter("tenancy").withValues(placement.getTenancy())); }else if(getTenancyAttribute().equals(Tenancy.Default)){ Placement placement = new Placement(); placement.setTenancy("default"); riRequest.setPlacement(placement); diFilters.add(new Filter("tenancy").withValues(placement.getTenancy())); } String subnetId = chooseSubnetId(rotateSubnet); InstanceNetworkInterfaceSpecification net = new InstanceNetworkInterfaceSpecification(); if (StringUtils.isNotBlank(subnetId)) { if (getAssociatePublicIp()) { net.setSubnetId(subnetId); } else { riRequest.setSubnetId(subnetId); } diFilters.add(new Filter("subnet-id").withValues(subnetId)); /* * If we have a subnet ID then we can only use VPC security groups */ if (!getSecurityGroupSet().isEmpty()) { List<String> groupIds = getEc2SecurityGroups(ec2); if (!groupIds.isEmpty()) { if (getAssociatePublicIp()) { net.setGroups(groupIds); } else { riRequest.setSecurityGroupIds(groupIds); } diFilters.add(new Filter("instance.group-id").withValues(groupIds)); } } } else { List<String> groupIds = getSecurityGroupsBy("group-name", securityGroupSet, ec2) .getSecurityGroups() .stream().map(SecurityGroup::getGroupId) .collect(Collectors.toList()); if (getAssociatePublicIp()) { net.setGroups(groupIds); } else { riRequest.setSecurityGroups(securityGroupSet); } if (!groupIds.isEmpty()) { diFilters.add(new Filter("instance.group-id").withValues(groupIds)); } } net.setAssociatePublicIpAddress(getAssociatePublicIp()); net.setDeviceIndex(0); if (getAssociatePublicIp()) { riRequest.withNetworkInterfaces(net); } HashSet<Tag> instTags = buildTags(EC2Cloud.EC2_SLAVE_TYPE_DEMAND); for (Tag tag : instTags) { diFilters.add(new Filter("tag:" + tag.getKey()).withValues(tag.getValue())); } if (StringUtils.isNotBlank(getIamInstanceProfile())) { riRequest.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(getIamInstanceProfile())); } List<TagSpecification> tagList = new ArrayList<>(); TagSpecification tagSpecification = new TagSpecification(); tagSpecification.setTags(instTags); tagList.add(tagSpecification.clone().withResourceType(ResourceType.Instance)); tagList.add(tagSpecification.clone().withResourceType(ResourceType.Volume)); riRequest.setTagSpecifications(tagList); HashMap<RunInstancesRequest, List<Filter>> ret = new HashMap<>(); ret.put(riRequest, diFilters); return ret; } @Deprecated HashMap<RunInstancesRequest, List<Filter>> makeRunInstancesRequestAndFilters(int number, AmazonEC2 ec2, boolean rotateSubnet) throws IOException { return makeRunInstancesRequestAndFilters(getImage(), number, ec2, rotateSubnet); } /** * Provisions an On-demand EC2 agent by launching a new instance or starting a previously-stopped instance. */ private List<EC2AbstractSlave> provisionOndemand(Image image, int number, EnumSet<ProvisionOptions> provisionOptions) throws IOException { return provisionOndemand(image, number, provisionOptions, false, false); } /** * Provisions an On-demand EC2 agent by launching a new instance or starting a previously-stopped instance. */ private List<EC2AbstractSlave> provisionOndemand(Image image, int number, EnumSet<ProvisionOptions> provisionOptions, boolean spotWithoutBidPrice, boolean fallbackSpotToOndemand) throws IOException { AmazonEC2 ec2 = getParent().connect(); logProvisionInfo("Considering launching"); HashMap<RunInstancesRequest, List<Filter>> runInstancesRequestFilterMap = makeRunInstancesRequestAndFilters(image, number, ec2); Map.Entry<RunInstancesRequest, List<Filter>> entry = runInstancesRequestFilterMap.entrySet().iterator().next(); RunInstancesRequest riRequest = entry.getKey(); List<Filter> diFilters = entry.getValue(); DescribeInstancesRequest diRequest = new DescribeInstancesRequest().withFilters(diFilters); logProvisionInfo("Looking for existing instances with describe-instance: " + diRequest); DescribeInstancesResult diResult = ec2.describeInstances(diRequest); List<Instance> orphansOrStopped = findOrphansOrStopped(diResult, number); if (orphansOrStopped.isEmpty() && !provisionOptions.contains(ProvisionOptions.FORCE_CREATE) && !provisionOptions.contains(ProvisionOptions.ALLOW_CREATE)) { logProvisionInfo("No existing instance found - but cannot create new instance"); return null; } wakeOrphansOrStoppedUp(ec2, orphansOrStopped); if (orphansOrStopped.size() == number) { return toSlaves(orphansOrStopped); } riRequest.setMaxCount(number - orphansOrStopped.size()); List<Instance> newInstances; if (spotWithoutBidPrice) { InstanceMarketOptionsRequest instanceMarketOptionsRequest = new InstanceMarketOptionsRequest().withMarketType(MarketType.Spot); if (getSpotBlockReservationDuration() != 0) { SpotMarketOptions spotOptions = new SpotMarketOptions().withBlockDurationMinutes(getSpotBlockReservationDuration() * 60); instanceMarketOptionsRequest.setSpotOptions(spotOptions); } riRequest.setInstanceMarketOptions(instanceMarketOptionsRequest); try { newInstances = ec2.runInstances(riRequest).getReservation().getInstances(); } catch (AmazonEC2Exception e) { if (fallbackSpotToOndemand && e.getErrorCode().equals("InsufficientInstanceCapacity")) { logProvisionInfo("There is no spot capacity available matching your request, falling back to on-demand instance."); riRequest.setInstanceMarketOptions(new InstanceMarketOptionsRequest()); newInstances = ec2.runInstances(riRequest).getReservation().getInstances(); } else { throw e; } } } else { newInstances = ec2.runInstances(riRequest).getReservation().getInstances(); } // Have to create a new instance if (newInstances.isEmpty()) { logProvisionInfo("No new instances were created"); } newInstances.addAll(orphansOrStopped); return toSlaves(newInstances); } void wakeOrphansOrStoppedUp(AmazonEC2 ec2, List<Instance> orphansOrStopped) { List<String> instances = new ArrayList<>(); for(Instance instance : orphansOrStopped) { if (instance.getState().getName().equalsIgnoreCase(InstanceStateName.Stopping.toString()) || instance.getState().getName().equalsIgnoreCase(InstanceStateName.Stopped.toString())) { logProvisionInfo("Found stopped instances - will start it: " + instance); instances.add(instance.getInstanceId()); } else { // Should be pending or running at this point, just let it come up logProvisionInfo("Found existing pending or running: " + instance.getState().getName() + " instance: " + instance); } } if (!instances.isEmpty()) { StartInstancesRequest siRequest = new StartInstancesRequest(instances); StartInstancesResult siResult = ec2.startInstances(siRequest); logProvisionInfo("Result of starting stopped instances:" + siResult); } } List<EC2AbstractSlave> toSlaves(List<Instance> newInstances) throws IOException { try { List<EC2AbstractSlave> slaves = new ArrayList<>(newInstances.size()); for (Instance instance : newInstances) { slaves.add(newOndemandSlave(instance)); logProvisionInfo("Return instance: " + instance); } return slaves; } catch (FormException e) { throw new AssertionError(e); // we should have discovered all // configuration issues upfront } } List<Instance> findOrphansOrStopped(DescribeInstancesResult diResult, int number) { List<Instance> orphansOrStopped = new ArrayList<>(); int count = 0; for (Reservation reservation : diResult.getReservations()) { for (Instance instance : reservation.getInstances()) { if (!isSameIamInstanceProfile(instance)) { logInstanceCheck(instance, ". false - IAM Instance profile does not match: " + instance.getIamInstanceProfile()); continue; } if (isTerminatingOrShuttindDown(instance.getState().getName())) { logInstanceCheck(instance, ". false - Instance is terminated or shutting down"); continue; } if (checkInstance(instance)) { logProvisionInfo("Found existing instance: " + instance); orphansOrStopped.add(instance); count++; } if (count == number) { return orphansOrStopped; } } } return orphansOrStopped; } private void setupRootDevice(Image image, List<BlockDeviceMapping> deviceMappings) { if (!"ebs".equals(image.getRootDeviceType())) { return; } // get the root device (only one expected in the blockmappings) final List<BlockDeviceMapping> rootDeviceMappings = image.getBlockDeviceMappings(); if (rootDeviceMappings.size() == 0) { LOGGER.warning("AMI missing block devices"); return; } BlockDeviceMapping rootMapping = rootDeviceMappings.get(0); LOGGER.info("AMI had " + rootMapping.getDeviceName()); LOGGER.info(rootMapping.getEbs().toString()); // Create a shadow of the AMI mapping (doesn't like reusing rootMapping directly) BlockDeviceMapping newMapping = rootMapping.clone(); if (deleteRootOnTermination) { // Check if the root device is already in the mapping and update it for (final BlockDeviceMapping mapping : deviceMappings) { LOGGER.info("Request had " + mapping.getDeviceName()); if (rootMapping.getDeviceName().equals(mapping.getDeviceName())) { mapping.getEbs().setDeleteOnTermination(Boolean.TRUE); return; } } // pass deleteRootOnTermination to shadow of the AMI mapping newMapping.getEbs().setDeleteOnTermination(Boolean.TRUE); } newMapping.getEbs().setEncrypted(ebsEncryptRootVolume.getValue()); String message = String.format("EBS default encryption value set to: %s (%s)", ebsEncryptRootVolume.getDisplayText(), ebsEncryptRootVolume.getValue()); logProvisionInfo(message); deviceMappings.add(0, newMapping); } private List<BlockDeviceMapping> getNewEphemeralDeviceMapping(Image image) { final List<BlockDeviceMapping> oldDeviceMapping = image.getBlockDeviceMappings(); final Set<String> occupiedDevices = new HashSet<>(); for (final BlockDeviceMapping mapping : oldDeviceMapping) { occupiedDevices.add(mapping.getDeviceName()); } final List<String> available = new ArrayList<>( Arrays.asList("ephemeral0", "ephemeral1", "ephemeral2", "ephemeral3")); final List<BlockDeviceMapping> newDeviceMapping = new ArrayList<>(4); for (char suffix = 'b'; suffix <= 'z' && !available.isEmpty(); suffix++) { final String deviceName = String.format("/dev/xvd%s", suffix); if (occupiedDevices.contains(deviceName)) continue; final BlockDeviceMapping newMapping = new BlockDeviceMapping().withDeviceName(deviceName).withVirtualName( available.get(0)); newDeviceMapping.add(newMapping); available.remove(0); } return newDeviceMapping; } private void setupEphemeralDeviceMapping(Image image, List<BlockDeviceMapping> deviceMappings) { // Don't wipe out pre-existing mappings deviceMappings.addAll(getNewEphemeralDeviceMapping(image)); } @NonNull private static List<String> makeImageAttributeList(@CheckForNull String attr) { return Stream.of(Util.tokenize(Util.fixNull(attr))) .collect(Collectors.toList()); } @NonNull private DescribeImagesRequest makeDescribeImagesRequest() throws AmazonClientException { List<String> imageIds = Util.fixEmptyAndTrim(ami) == null ? Collections.emptyList() : Collections.singletonList(ami); List<String> owners = makeImageAttributeList(amiOwners); List<String> users = makeImageAttributeList(amiUsers); List<Filter> filters = EC2Filter.toFilterList(amiFilters); // Raise an exception if there were no search attributes. // This is legal but not what anyone wants - it will // launch random recently created public AMIs. int numAttrs = Stream.of(imageIds, owners, users, filters) .collect(Collectors.summingInt(List::size)); if (numAttrs == 0) { throw new AmazonClientException("Neither AMI ID nor AMI search attributes provided"); } return new DescribeImagesRequest() .withImageIds(imageIds) .withOwners(owners) .withExecutableUsers(users) .withFilters(filters); } @NonNull private Image getImage() throws AmazonClientException { DescribeImagesRequest request = makeDescribeImagesRequest(); LOGGER.info("Getting image for request " + request); List<Image> images = getParent().connect().describeImages(request).getImages(); if (images.isEmpty()) { throw new AmazonClientException("Unable to find image for request " + request); } // Sort in reverse by creation date to get latest image images.sort(Comparator.comparing(Image::getCreationDate).reversed()); return images.get(0); } private void setupCustomDeviceMapping(List<BlockDeviceMapping> deviceMappings) { if (StringUtils.isNotBlank(customDeviceMapping)) { deviceMappings.addAll(DeviceMappingParser.parse(customDeviceMapping)); } } /** * Provision a new agent for an EC2 spot instance to call back to Jenkins */ private List<EC2AbstractSlave> provisionSpot(Image image, int number, EnumSet<ProvisionOptions> provisionOptions) throws IOException { if (!spotConfig.useBidPrice) { return provisionOndemand(image, 1, provisionOptions, true, spotConfig.getFallbackToOndemand()); } AmazonEC2 ec2 = getParent().connect(); String imageId = image.getImageId(); try { LOGGER.info("Launching " + imageId + " for template " + description); KeyPair keyPair = getKeyPair(ec2); RequestSpotInstancesRequest spotRequest = new RequestSpotInstancesRequest(); // Validate spot bid before making the request if (getSpotMaxBidPrice() == null) { throw new AmazonClientException("Invalid Spot price specified: " + getSpotMaxBidPrice()); } spotRequest.setSpotPrice(getSpotMaxBidPrice()); spotRequest.setInstanceCount(number); LaunchSpecification launchSpecification = new LaunchSpecification(); launchSpecification.setImageId(imageId); launchSpecification.setInstanceType(type); launchSpecification.setEbsOptimized(ebsOptimized); launchSpecification.setMonitoringEnabled(monitoring); if (StringUtils.isNotBlank(getZone())) { SpotPlacement placement = new SpotPlacement(getZone()); launchSpecification.setPlacement(placement); } InstanceNetworkInterfaceSpecification net = new InstanceNetworkInterfaceSpecification(); String subnetId = chooseSubnetId(); if (StringUtils.isNotBlank(subnetId)) { net.setSubnetId(subnetId); /* * If we have a subnet ID then we can only use VPC security groups */ if (!securityGroupSet.isEmpty()) { List<String> groupIds = getEc2SecurityGroups(ec2); if (!groupIds.isEmpty()) { net.setGroups(groupIds); } } } else { if (!securityGroupSet.isEmpty()) { List<String> groupIds = getSecurityGroupsBy("group-name", securityGroupSet, ec2) .getSecurityGroups() .stream().map(SecurityGroup::getGroupId) .collect(Collectors.toList()); net.setGroups(groupIds); } } String userDataString = Base64.getEncoder().encodeToString(userData.getBytes(StandardCharsets.UTF_8)); launchSpecification.setUserData(userDataString); launchSpecification.setKeyName(keyPair.getKeyName()); launchSpecification.setInstanceType(type.toString()); net.setAssociatePublicIpAddress(getAssociatePublicIp()); net.setDeviceIndex(0); launchSpecification.withNetworkInterfaces(net); HashSet<Tag> instTags = buildTags(EC2Cloud.EC2_SLAVE_TYPE_SPOT); if (StringUtils.isNotBlank(getIamInstanceProfile())) { launchSpecification.setIamInstanceProfile(new IamInstanceProfileSpecification().withArn(getIamInstanceProfile())); } setupBlockDeviceMappings(image, launchSpecification.getBlockDeviceMappings()); spotRequest.setLaunchSpecification(launchSpecification); if (getSpotBlockReservationDuration() != 0) { spotRequest.setBlockDurationMinutes(getSpotBlockReservationDuration() * 60); } RequestSpotInstancesResult reqResult; try { // Make the request for a new Spot instance reqResult = ec2.requestSpotInstances(spotRequest); } catch (AmazonEC2Exception e) { if (spotConfig.getFallbackToOndemand() && e.getErrorCode().equals("MaxSpotInstanceCountExceeded")) { logProvisionInfo("There is no spot capacity available matching your request, falling back to on-demand instance."); return provisionOndemand(image, number, provisionOptions); } else { throw e; } } List<SpotInstanceRequest> reqInstances = reqResult.getSpotInstanceRequests(); if (reqInstances.isEmpty()) { throw new AmazonClientException("No spot instances found"); } List<EC2AbstractSlave> slaves = new ArrayList<>(reqInstances.size()); for(SpotInstanceRequest spotInstReq : reqInstances) { if (spotInstReq == null) { throw new AmazonClientException("Spot instance request is null"); } String slaveName = spotInstReq.getSpotInstanceRequestId(); if (spotConfig.getFallbackToOndemand()) { for (int i = 0; i < 2 && spotInstReq.getStatus().getCode().equals("pending-evaluation"); i++) { LOGGER.info("Spot request " + slaveName + " is still pending evaluation"); Thread.sleep(5000); LOGGER.info("Fetching info about spot request " + slaveName); DescribeSpotInstanceRequestsRequest describeRequest = new DescribeSpotInstanceRequestsRequest().withSpotInstanceRequestIds(slaveName); spotInstReq = ec2.describeSpotInstanceRequests(describeRequest).getSpotInstanceRequests().get(0); } List<String> spotRequestBadCodes = Arrays.asList("capacity-not-available", "capacity-oversubscribed", "price-too-low"); if (spotRequestBadCodes.contains(spotInstReq.getStatus().getCode())) { LOGGER.info("There is no spot capacity available matching your request, falling back to on-demand instance."); List<String> requestsToCancel = reqInstances.stream().map(SpotInstanceRequest::getSpotInstanceRequestId).collect(Collectors.toList()); CancelSpotInstanceRequestsRequest cancelRequest = new CancelSpotInstanceRequestsRequest(requestsToCancel); ec2.cancelSpotInstanceRequests(cancelRequest); return provisionOndemand(image, number, provisionOptions); } } // Now that we have our Spot request, we can set tags on it updateRemoteTags(ec2, instTags, "InvalidSpotInstanceRequestID.NotFound", spotInstReq.getSpotInstanceRequestId()); // That was a remote request - we should also update our local instance data spotInstReq.setTags(instTags); LOGGER.info("Spot instance id in provision: " + spotInstReq.getSpotInstanceRequestId()); slaves.add(newSpotSlave(spotInstReq)); } return slaves; } catch (FormException e) { throw new AssertionError(); // we should have discovered all // configuration issues upfront } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } } private void setupBlockDeviceMappings(Image image, List<BlockDeviceMapping> blockDeviceMappings) { setupRootDevice(image, blockDeviceMappings); if (useEphemeralDevices) { setupEphemeralDeviceMapping(image, blockDeviceMappings); } else { setupCustomDeviceMapping(blockDeviceMappings); } } private HashSet<Tag> buildTags(String slaveType) { boolean hasCustomTypeTag = false; boolean hasJenkinsServerUrlTag = false; HashSet<Tag> instTags = new HashSet<>(); if (tags != null && !tags.isEmpty()) { for (EC2Tag t : tags) { instTags.add(new Tag(t.getName(), t.getValue())); if (StringUtils.equals(t.getName(), EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE)) { hasCustomTypeTag = true; } if (StringUtils.equals(t.getName(), EC2Tag.TAG_NAME_JENKINS_SERVER_URL)) { hasJenkinsServerUrlTag = true; } } } if (!hasCustomTypeTag) { instTags.add(new Tag(EC2Tag.TAG_NAME_JENKINS_SLAVE_TYPE, EC2Cloud.getSlaveTypeTagValue( slaveType, description))); } JenkinsLocationConfiguration jenkinsLocation = JenkinsLocationConfiguration.get(); if (!hasJenkinsServerUrlTag && jenkinsLocation.getUrl() != null) { instTags.add(new Tag(EC2Tag.TAG_NAME_JENKINS_SERVER_URL, jenkinsLocation.getUrl())); } return instTags; } protected EC2OndemandSlave newOndemandSlave(Instance inst) throws FormException, IOException { EC2AgentConfig.OnDemand config = new EC2AgentConfig.OnDemandBuilder() .withName(getSlaveName(inst.getInstanceId())) .withInstanceId(inst.getInstanceId()) .withDescription(description) .withRemoteFS(remoteFS) .withNumExecutors(getNumExecutors()) .withLabelString(labels) .withMode(mode) .withInitScript(initScript) .withTmpDir(tmpDir) .withNodeProperties(nodeProperties.toList()) .withRemoteAdmin(remoteAdmin) .withJvmopts(jvmopts) .withStopOnTerminate(stopOnTerminate) .withIdleTerminationMinutes(idleTerminationMinutes) .withPublicDNS(inst.getPublicDnsName()) .withPrivateDNS(inst.getPrivateDnsName()) .withTags(EC2Tag.fromAmazonTags(inst.getTags())) .withCloudName(parent.name) .withLaunchTimeout(getLaunchTimeout()) .withAmiType(amiType) .withConnectionStrategy(connectionStrategy) .withMaxTotalUses(maxTotalUses) .withTenancyAttribute(tenancy) .build(); return EC2AgentFactory.getInstance().createOnDemandAgent(config); } protected EC2SpotSlave newSpotSlave(SpotInstanceRequest sir) throws FormException, IOException { EC2AgentConfig.Spot config = new EC2AgentConfig.SpotBuilder() .withName(getSlaveName(sir.getSpotInstanceRequestId())) .withSpotInstanceRequestId(sir.getSpotInstanceRequestId()) .withDescription(description) .withRemoteFS(remoteFS) .withNumExecutors(getNumExecutors()) .withMode(mode) .withInitScript(initScript) .withTmpDir(tmpDir) .withLabelString(labels) .withNodeProperties(nodeProperties.toList()) .withRemoteAdmin(remoteAdmin) .withJvmopts(jvmopts) .withIdleTerminationMinutes(idleTerminationMinutes) .withTags(EC2Tag.fromAmazonTags(sir.getTags())) .withCloudName(parent.name) .withLaunchTimeout(getLaunchTimeout()) .withAmiType(amiType) .withConnectionStrategy(connectionStrategy) .withMaxTotalUses(maxTotalUses) .build(); return EC2AgentFactory.getInstance().createSpotAgent(config); } /** * Get a KeyPair from the configured information for the agent template */ @CheckForNull private KeyPair getKeyPair(AmazonEC2 ec2) throws IOException, AmazonClientException { EC2PrivateKey ec2PrivateKey = getParent().resolvePrivateKey(); if (ec2PrivateKey == null) { throw new AmazonClientException("No keypair credential found. Please configure a credential in the Jenkins configuration."); } KeyPair keyPair = ec2PrivateKey.find(ec2); if (keyPair == null) { throw new AmazonClientException("No matching keypair found on EC2. Is the EC2 private key a valid one?"); } return keyPair; } /** * Update the tags stored in EC2 with the specified information. Re-try 5 times if instances isn't up by * catchErrorCode - e.g. InvalidSpotInstanceRequestID.NotFound or InvalidInstanceRequestID.NotFound * * @param ec2 * @param instTags * @param catchErrorCode * @param params * @throws InterruptedException */ private void updateRemoteTags(AmazonEC2 ec2, Collection<Tag> instTags, String catchErrorCode, String... params) throws InterruptedException { for (int i = 0; i < 5; i++) { try { CreateTagsRequest tagRequest = new CreateTagsRequest(); tagRequest.withResources(params).setTags(instTags); ec2.createTags(tagRequest); break; } catch (AmazonServiceException e) { if (e.getErrorCode().equals(catchErrorCode)) { Thread.sleep(5000); continue; } LOGGER.log(Level.SEVERE, e.getErrorMessage(), e); } } } /** * Get a list of security group ids for the agent */ private List<String> getEc2SecurityGroups(AmazonEC2 ec2) throws AmazonClientException { List<String> groupIds = new ArrayList<>(); DescribeSecurityGroupsResult groupResult = getSecurityGroupsBy("group-name", securityGroupSet, ec2); if (groupResult.getSecurityGroups().size() == 0) { groupResult = getSecurityGroupsBy("group-id", securityGroupSet, ec2); } for (SecurityGroup group : groupResult.getSecurityGroups()) { if (group.getVpcId() != null && !group.getVpcId().isEmpty()) { List<Filter> filters = new ArrayList<>(); filters.add(new Filter("vpc-id").withValues(group.getVpcId())); filters.add(new Filter("state").withValues("available")); filters.add(new Filter("subnet-id").withValues(getCurrentSubnetId())); DescribeSubnetsRequest subnetReq = new DescribeSubnetsRequest(); subnetReq.withFilters(filters); DescribeSubnetsResult subnetResult = ec2.describeSubnets(subnetReq); List<Subnet> subnets = subnetResult.getSubnets(); if (subnets != null && !subnets.isEmpty()) { groupIds.add(group.getGroupId()); } } } if (securityGroupSet.size() != groupIds.size()) { throw new AmazonClientException("Security groups must all be VPC security groups to work in a VPC context"); } return groupIds; } private DescribeSecurityGroupsResult getSecurityGroupsBy(String filterName, Set<String> filterValues, AmazonEC2 ec2) { DescribeSecurityGroupsRequest groupReq = new DescribeSecurityGroupsRequest(); groupReq.withFilters(new Filter(filterName).withValues(filterValues)); return ec2.describeSecurityGroups(groupReq); } /** * Provisions a new EC2 agent based on the currently running instance on EC2, instead of starting a new one. */ public EC2AbstractSlave attach(String instanceId, TaskListener listener) throws AmazonClientException, IOException { PrintStream logger = listener.getLogger(); AmazonEC2 ec2 = getParent().connect(); try { logger.println("Attaching to " + instanceId); LOGGER.info("Attaching to " + instanceId); DescribeInstancesRequest request = new DescribeInstancesRequest(); request.setInstanceIds(Collections.singletonList(instanceId)); Instance inst = ec2.describeInstances(request).getReservations().get(0).getInstances().get(0); return newOndemandSlave(inst); } catch (FormException e) { throw new AssertionError(); // we should have discovered all // configuration issues upfront } } /** * Initializes data structure that we don't persist. */ protected Object readResolve() { Jenkins.get().checkPermission(Jenkins.ADMINISTER); labelSet = Label.parse(labels); securityGroupSet = parseSecurityGroups(); /** * In releases of this plugin prior to 1.18, template-specific instance caps could be configured but were not * enforced. As a result, it was possible to have the instance cap for a template be configured to 0 (zero) with * no ill effects. Starting with version 1.18, template-specific instance caps are enforced, so if a * configuration has a cap of zero for a template, no instances will be launched from that template. Since there * is no practical value of intentionally setting the cap to zero, this block will override such a setting to a * value that means 'no cap'. */ if (instanceCap == 0) { instanceCap = Integer.MAX_VALUE; } if (amiType == null) { amiType = new UnixData(rootCommandPrefix, slaveCommandPrefix, slaveCommandSuffix, sshPort); } // 1.43 new parameters if (connectionStrategy == null ) { connectionStrategy = ConnectionStrategy.backwardsCompatible(usePrivateDnsName, connectUsingPublicIp, associatePublicIp); } if (maxTotalUses == 0) { maxTotalUses = -1; } if (nodeProperties == null) { nodeProperties = new DescribableList<>(Saveable.NOOP); } if (tenancy == null) { tenancy = Tenancy.Default; } // migration of old value to new variable. if (useDedicatedTenancy) { tenancy = Tenancy.Dedicated; } if (ebsEncryptRootVolume == null) { ebsEncryptRootVolume = EbsEncryptRootVolume.DEFAULT; } return this; } public Descriptor<SlaveTemplate> getDescriptor() { return Jenkins.get().getDescriptor(getClass()); } public int getLaunchTimeout() { return launchTimeout <= 0 ? Integer.MAX_VALUE : launchTimeout; } public String getLaunchTimeoutStr() { if (launchTimeout == Integer.MAX_VALUE) { return ""; } else { return String.valueOf(launchTimeout); } } public boolean isWindowsSlave() { return amiType.isWindows(); } public boolean isUnixSlave() { return amiType.isUnix(); } public boolean isMacAgent() { return amiType.isMac(); } public Secret getAdminPassword() { return amiType.isWindows() ? ((WindowsData) amiType).getPassword() : Secret.fromString(""); } public boolean isUseHTTPS() { return amiType.isWindows() && ((WindowsData) amiType).isUseHTTPS(); } /** * * @param ec2 * @param allSubnets if true, uses all subnets defined for this SlaveTemplate as the filter, else will only use the current subnet * @return DescribeInstancesResult of DescribeInstanceRequst constructed from this SlaveTemplate's configs */ DescribeInstancesResult getDescribeInstanceResult(AmazonEC2 ec2, boolean allSubnets) throws IOException { HashMap<RunInstancesRequest, List<Filter>> runInstancesRequestFilterMap = makeRunInstancesRequestAndFilters(getImage(), 1, ec2, false); Map.Entry<RunInstancesRequest, List<Filter>> entry = runInstancesRequestFilterMap.entrySet().iterator().next(); List<Filter> diFilters = entry.getValue(); if (allSubnets) { /* remove any existing subnet-id filters */ List<Filter> rmvFilters = new ArrayList<>(); for (Filter f : diFilters) { if (f.getName().equals("subnet-id")) { rmvFilters.add(f); } } for (Filter f : rmvFilters) { diFilters.remove(f); } /* Add filter using all subnets defined for this SlaveTemplate */ Filter subnetFilter = new Filter("subnet-id"); subnetFilter.setValues(Arrays.asList(getSubnetId().split(" "))); diFilters.add(subnetFilter); } DescribeInstancesRequest diRequest = new DescribeInstancesRequest().withFilters(diFilters); return ec2.describeInstances(diRequest); } public boolean isAllowSelfSignedCertificate() { return amiType.isWindows() && ((WindowsData) amiType).isAllowSelfSignedCertificate(); } @Extension public static final class OnSaveListener extends SaveableListener { @Override public void onChange(Saveable o, XmlFile file) { if (o instanceof Jenkins) { MinimumInstanceChecker.checkForMinimumInstances(); } } } @Extension public static final class DescriptorImpl extends Descriptor<SlaveTemplate> { @Override public String getDisplayName() { return ""; } public List<Descriptor<AMITypeData>> getAMITypeDescriptors() { return Jenkins.get().getDescriptorList(AMITypeData.class); } /** * Since this shares much of the configuration with {@link EC2Computer}, check its help page, too. */ @Override public String getHelpFile(String fieldName) { String p = super.getHelpFile(fieldName); if (p != null) return p; Descriptor slaveDescriptor = Jenkins.get().getDescriptor(EC2OndemandSlave.class); if (slaveDescriptor != null) { p = slaveDescriptor.getHelpFile(fieldName); if (p != null) return p; } slaveDescriptor = Jenkins.get().getDescriptor(EC2SpotSlave.class); if (slaveDescriptor != null) return slaveDescriptor.getHelpFile(fieldName); return null; } @Restricted(NoExternalUse.class) public FormValidation doCheckDescription(@QueryParameter String value) { try { Jenkins.checkGoodName(value); return FormValidation.ok(); } catch (Failure e) { return FormValidation.error(e.getMessage()); } } @Restricted(NoExternalUse.class) public FormValidation doCheckRemoteAdmin(@QueryParameter String value){ if(StringUtils.isBlank(value) || Jenkins.get().hasPermission(Jenkins.ADMINISTER)){ return FormValidation.ok(); }else{ return FormValidation.error(Messages.General_MissingPermission()); } } @Restricted(NoExternalUse.class) public FormValidation doCheckTmpDir(@QueryParameter String value){ if(StringUtils.isBlank(value) || Jenkins.get().hasPermission(Jenkins.ADMINISTER)){ return FormValidation.ok(); } else { return FormValidation.error(Messages.General_MissingPermission()); } } @Restricted(NoExternalUse.class) public FormValidation doCheckJvmopts(@QueryParameter String value){ if(StringUtils.isBlank(value) || Jenkins.get().hasPermission(Jenkins.ADMINISTER)){ return FormValidation.ok(); } else { return FormValidation.error(Messages.General_MissingPermission()); } } /*** * Check that the AMI requested is available in the cloud and can be used. */ @RequirePOST public FormValidation doValidateAmi(@QueryParameter boolean useInstanceProfileForCredentials, @QueryParameter String credentialsId, @QueryParameter String ec2endpoint, @QueryParameter String region, final @QueryParameter String ami, @QueryParameter String roleArn, @QueryParameter String roleSessionName) throws IOException { checkPermission(EC2Cloud.PROVISION); AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, credentialsId, roleArn, roleSessionName, region); AmazonEC2 ec2; if (region != null) { ec2 = AmazonEC2Factory.getInstance().connect(credentialsProvider, AmazonEC2Cloud.getEc2EndpointUrl(region)); } else { ec2 = AmazonEC2Factory.getInstance().connect(credentialsProvider, new URL(ec2endpoint)); } try { Image img = CloudHelper.getAmiImage(ec2, ami); if (img == null) { return FormValidation.error("No such AMI, or not usable with this accessId: " + ami); } String ownerAlias = img.getImageOwnerAlias(); return FormValidation.ok(img.getImageLocation() + (ownerAlias != null ? " by " + ownerAlias : "")); } catch (AmazonClientException e) { return FormValidation.error(e.getMessage()); } } private void checkPermission(Permission p) { final EC2Cloud ancestorObject = Stapler.getCurrentRequest().findAncestorObject(EC2Cloud.class); if (ancestorObject != null) { ancestorObject.checkPermission(p); } else { Jenkins.get().checkPermission(p); } } public FormValidation doCheckLabelString(@QueryParameter String value, @QueryParameter Node.Mode mode) { if (mode == Node.Mode.EXCLUSIVE && (value == null || value.trim().isEmpty())) { return FormValidation.warning("You may want to assign labels to this node;" + " it's marked to only run jobs that are exclusively tied to itself or a label."); } return FormValidation.ok(); } public FormValidation doCheckIdleTerminationMinutes(@QueryParameter String value) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= -59) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("Idle Termination time must be a greater than -59 (or null)"); } public FormValidation doCheckMaxTotalUses(@QueryParameter String value) { try { int val = Integer.parseInt(value); if (val >= -1) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("Maximum Total Uses must be greater or equal to -1"); } public FormValidation doCheckMinimumNumberOfInstances(@QueryParameter String value, @QueryParameter String instanceCapStr) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= 0) { int instanceCap; try { instanceCap = Integer.parseInt(instanceCapStr); } catch (NumberFormatException ignore) { instanceCap = Integer.MAX_VALUE; } if (val > instanceCap) { return FormValidation .error("Minimum number of instances must not be larger than AMI Instance Cap %d", instanceCap); } return FormValidation.ok(); } } catch (NumberFormatException ignore) { } return FormValidation.error("Minimum number of instances must be a non-negative integer (or null)"); } public FormValidation doCheckMinimumNoInstancesActiveTimeRangeFrom(@QueryParameter String value) { try { MinimumNumberOfInstancesTimeRangeConfig.validateLocalTimeString(value); return FormValidation.ok(); } catch (IllegalArgumentException e) { return FormValidation.error("Please enter value in format 'h:mm a' or 'HH:mm'"); } } public FormValidation doCheckMinimumNoInstancesActiveTimeRangeTo(@QueryParameter String value) { try { MinimumNumberOfInstancesTimeRangeConfig.validateLocalTimeString(value); return FormValidation.ok(); } catch (IllegalArgumentException e) { return FormValidation.error("Please enter value in format 'h:mm a' or 'HH:mm'"); } } // For some reason, all days will validate against this method so no need to repeat for each day. public FormValidation doCheckMonday(@QueryParameter boolean monday, @QueryParameter boolean tuesday, @QueryParameter boolean wednesday, @QueryParameter boolean thursday, @QueryParameter boolean friday, @QueryParameter boolean saturday, @QueryParameter boolean sunday) { if (!(monday || tuesday || wednesday || thursday || friday || saturday || sunday)) { return FormValidation.warning("At least one day should be checked or minimum number of instances won't be active"); } return FormValidation.ok(); } public FormValidation doCheckMinimumNumberOfSpareInstances(@QueryParameter String value, @QueryParameter String instanceCapStr) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= 0) { int instanceCap; try { instanceCap = Integer.parseInt(instanceCapStr); } catch (NumberFormatException ignore) { instanceCap = Integer.MAX_VALUE; } if (val > instanceCap) { return FormValidation .error("Minimum number of spare instances must not be larger than AMI Instance Cap %d", instanceCap); } return FormValidation.ok(); } } catch (NumberFormatException ignore) { } return FormValidation.error("Minimum number of spare instances must be a non-negative integer (or null)"); } public FormValidation doCheckInstanceCapStr(@QueryParameter String value) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val > 0) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("InstanceCap must be a non-negative integer (or null)"); } /* * Validate the Spot Block Duration to be between 0 & 6 hours as specified in the AWS API */ public FormValidation doCheckSpotBlockReservationDurationStr(@QueryParameter String value) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= 0 && val <= 6) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("Spot Block Reservation Duration must be an integer between 0 & 6"); } public FormValidation doCheckLaunchTimeoutStr(@QueryParameter String value) { if (value == null || value.trim().isEmpty()) return FormValidation.ok(); try { int val = Integer.parseInt(value); if (val >= 0) return FormValidation.ok(); } catch (NumberFormatException nfe) { } return FormValidation.error("Launch Timeout must be a non-negative integer (or null)"); } @RequirePOST public ListBoxModel doFillZoneItems(@QueryParameter boolean useInstanceProfileForCredentials, @QueryParameter String credentialsId, @QueryParameter String region, @QueryParameter String roleArn, @QueryParameter String roleSessionName) throws IOException, ServletException { checkPermission(EC2Cloud.PROVISION); AWSCredentialsProvider credentialsProvider = EC2Cloud.createCredentialsProvider(useInstanceProfileForCredentials, credentialsId, roleArn, roleSessionName, region); return EC2AbstractSlave.fillZoneItems(credentialsProvider, region); } public String getDefaultTenancy() { // new templates default to the most secure strategy return Tenancy.Default.name(); } /* * Validate the Spot Max Bid Price to ensure that it is a floating point number >= .001 */ public FormValidation doCheckSpotMaxBidPrice(@QueryParameter String spotMaxBidPrice) { if (SpotConfiguration.normalizeBid(spotMaxBidPrice) != null) { return FormValidation.ok(); } return FormValidation.error("Not a correct bid price"); } public String getDefaultConnectionStrategy() { return ConnectionStrategy.PRIVATE_IP.name(); } public List<NodePropertyDescriptor> getNodePropertyDescriptors() { return NodePropertyDescriptor.for_(NodeProperty.all(), EC2AbstractSlave.class); } public ListBoxModel doFillConnectionStrategyItems(@QueryParameter String connectionStrategy) { return Stream.of(ConnectionStrategy.values()) .map(v -> { if (v.toString().equals(connectionStrategy)) { return new ListBoxModel.Option(v.toString(), v.name(), true); } else { return new ListBoxModel.Option(v.toString(), v.name(), false); } }) .collect(Collectors.toCollection(ListBoxModel::new)); } public FormValidation doCheckConnectionStrategy(@QueryParameter String connectionStrategy) { return Stream.of(ConnectionStrategy.values()) .filter(v -> v.name().equals(connectionStrategy)) .findFirst() .map(s -> FormValidation.ok()) .orElse(FormValidation.error("Could not find selected connection strategy")); } public String getDefaultHostKeyVerificationStrategy() { // new templates default to the most secure strategy return HostKeyVerificationStrategyEnum.CHECK_NEW_HARD.name(); } public ListBoxModel doFillHostKeyVerificationStrategyItems(@QueryParameter String hostKeyVerificationStrategy) { return Stream.of(HostKeyVerificationStrategyEnum.values()) .map(v -> { if (v.name().equals(hostKeyVerificationStrategy)) { return new ListBoxModel.Option(v.getDisplayText(), v.name(), true); } else { return new ListBoxModel.Option(v.getDisplayText(), v.name(), false); } }) .collect(Collectors.toCollection(ListBoxModel::new)); } public FormValidation doCheckHostKeyVerificationStrategy(@QueryParameter String hostKeyVerificationStrategy) { Stream<HostKeyVerificationStrategyEnum> stream = Stream.of(HostKeyVerificationStrategyEnum.values()); Stream<HostKeyVerificationStrategyEnum> filteredStream = stream.filter(v -> v.name().equals(hostKeyVerificationStrategy)); Optional<HostKeyVerificationStrategyEnum> matched = filteredStream.findFirst(); Optional<FormValidation> okResult = matched.map(s -> FormValidation.ok()); return okResult.orElse(FormValidation.error(String.format("Could not find selected host key verification (%s)", hostKeyVerificationStrategy))); } public ListBoxModel doFillTenancyItems(@QueryParameter String tenancy) { return Stream.of(Tenancy.values()) .map(v -> { if (v.name().equals(tenancy)) { return new ListBoxModel.Option(v.name(), v.name(), true); } else { return new ListBoxModel.Option(v.name(), v.name(), false); } }) .collect(Collectors.toCollection(ListBoxModel::new)); } public String getDefaultEbsEncryptRootVolume() { return EbsEncryptRootVolume.DEFAULT.getDisplayText(); } public ListBoxModel doFillEbsEncryptRootVolumeItems(@QueryParameter String ebsEncryptRootVolume ) { return Stream.of(EbsEncryptRootVolume.values()) .map(v -> { if (v.name().equals(ebsEncryptRootVolume)) { return new ListBoxModel.Option(v.getDisplayText(), v.name(), true); } else { return new ListBoxModel.Option(v.getDisplayText(), v.name(), false); } }) .collect(Collectors.toCollection(ListBoxModel::new)); } public FormValidation doEbsEncryptRootVolume(@QueryParameter String ebsEncryptRootVolume) { Stream<EbsEncryptRootVolume> stream = Stream.of(EbsEncryptRootVolume.values()); Stream<EbsEncryptRootVolume> filteredStream = stream.filter(v -> v.name().equals(ebsEncryptRootVolume)); Optional<EbsEncryptRootVolume> matched = filteredStream.findFirst(); Optional<FormValidation> okResult = matched.map(s -> FormValidation.ok()); return okResult.orElse(FormValidation.error(String.format("Could not find selected option (%s)", ebsEncryptRootVolume))); } } }
Fix connection fill
src/main/java/hudson/plugins/ec2/SlaveTemplate.java
Fix connection fill
<ide><path>rc/main/java/hudson/plugins/ec2/SlaveTemplate.java <ide> public ListBoxModel doFillConnectionStrategyItems(@QueryParameter String connectionStrategy) { <ide> return Stream.of(ConnectionStrategy.values()) <ide> .map(v -> { <del> if (v.toString().equals(connectionStrategy)) { <add> if (v.name().equals(connectionStrategy)) { <ide> return new ListBoxModel.Option(v.toString(), v.name(), true); <ide> } else { <ide> return new ListBoxModel.Option(v.toString(), v.name(), false);
Java
apache-2.0
9ad22f2f0d486df79920d79e2bcb8637c0b6b3cc
0
ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma,ppavlidis/Gemma
/* * The Gemma project. * * Copyright (c) 2006-2007 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.model.analysis.expression.diff; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.time.StopWatch; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.Hibernate; import org.hibernate.LockOptions; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.criterion.DetachedCriteria; import org.hibernate.criterion.Restrictions; import org.hibernate.type.DoubleType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.orm.hibernate3.HibernateTemplate; import org.springframework.stereotype.Repository; import ubic.gemma.model.expression.designElement.CompositeSequence; import ubic.gemma.model.expression.experiment.BioAssaySet; import ubic.gemma.model.expression.experiment.ExperimentalFactor; import ubic.gemma.model.expression.experiment.ExpressionExperiment; import ubic.gemma.model.expression.experiment.FactorValue; import ubic.gemma.model.genome.Gene; /** * @author keshav * @version $Id$ * @see ubic.gemma.model.expression.analysis.DifferentialExpressionAnalysisResult */ @Repository public class DifferentialExpressionResultDaoImpl extends ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDaoBase { private Log log = LogFactory.getLog( this.getClass() ); private static final String fetchResultsByGeneAndExperimentsQuery = "select distinct e, r" + " from DifferentialExpressionAnalysisImpl a, BioSequence2GeneProductImpl bs2gp" + " inner join a.experimentAnalyzed e " + " inner join a.resultSets rs inner join rs.results r inner join fetch r.probe p " + "inner join p.biologicalCharacteristic bs inner join bs2gp.geneProduct gp inner join gp.gene g" + " where bs2gp.bioSequence=bs and g=:gene and e in (:experimentsAnalyzed)"; // no order by clause, we add // it later private static final String fetchResultsByGene = "select distinct e, r" + " from DifferentialExpressionAnalysisImpl a, BioSequence2GeneProductImpl bs2gp" + " inner join a.experimentAnalyzed e " + " inner join a.resultSets rs inner join rs.results r inner join fetch r.probe p " + "inner join p.biologicalCharacteristic bs inner join bs2gp.geneProduct gp inner join gp.gene g" + " where bs2gp.bioSequence=bs and g=:gene"; // no order by clause, we add it later private static final String fetchResultsByExperimentsQuery = "select distinct e, r" + " from DifferentialExpressionAnalysisImpl a, BioSequence2GeneProductImpl bs2gp" + " inner join a.experimentAnalyzed e " + " inner join a.resultSets rs inner join rs.results r inner join fetch r.probe p " + "left join p.biologicalCharacteristic bs left join bs2gp.geneProduct gp left join gp.gene g" + " where bs2gp.bioSequence=bs and e in (:experimentsAnalyzed) and r.correctedPvalue < :threshold order by r.correctedPvalue"; /** * No constraint on gene */ private static final String fetchResultsByResultSetQuery = "select distinct rs, r " + " from DifferentialExpressionAnalysisImpl a " + " inner join a.experimentAnalyzed e " + " inner join a.resultSets rs inner join rs.results r inner join fetch r.probe p " + " where rs in (:resultsAnalyzed)"; // no order by clause, we add it later; 'e' is not used in this query. private static final String fetchResultsByResultSetAndGeneQuery = "select dear.CORRECTED_PVALUE " + " from DIFFERENTIAL_EXPRESSION_ANALYSIS_RESULT dear, GENE2CS g2s FORCE KEY(GENE), PROBE_ANALYSIS_RESULT par " + " where g2s.CS = par.PROBE_FK and par.ID = dear.ID and " + " dear.EXPRESSION_ANALYSIS_RESULT_SET_FK = :rs_id and g2s.GENE = :gene_id " + " order by dear.CORRECTED_P_VALUE_BIN DESC"; // private static final String fetchBatchProbeAnalysisResultsByResultSetsAndGeneQuery = // "SELECT SQL_NO_CACHE dear.EXPRESSION_ANALYSIS_RESULT_SET_FK, dear.CORRECTED_P_VALUE_BIN, dear.ID" // + " from DIFFERENTIAL_EXPRESSION_ANALYSIS_RESULT dear, GENE2CS g2s , PROBE_ANALYSIS_RESULT par" //FORCE KEY(GENE) // + " where par.ID = dear.ID and g2s.CS = par.PROBE_FK and " // + " dear.EXPRESSION_ANALYSIS_RESULT_SET_FK IN (:rs_ids) and " // + " g2s.AD in (:ad_ids) and " // + // " g2s.GENE IN (:gene_ids) GROUP BY dear.EXPRESSION_ANALYSIS_RESULT_SET_FK, dear.CORRECTED_P_VALUE_BIN ORDER BY dear.CORRECTED_P_VALUE_BIN DESC"; private static final String fetchBatchProbeAnalysisResultsByResultSetsAndGeneQuery = "SELECT g2s.GENE, dear.CORRECTED_P_VALUE_BIN, dear.ID" + " from DIFFERENTIAL_EXPRESSION_ANALYSIS_RESULT dear, GENE2CS g2s FORCE KEY(GENE), PROBE_ANALYSIS_RESULT par" + " where par.ID = dear.ID and g2s.CS = par.PROBE_FK and " + " dear.EXPRESSION_ANALYSIS_RESULT_SET_FK = :rs_id and " + " g2s.AD in (:ad_ids) and " + " g2s.GENE IN (:gene_ids) ";//GROUP BY g2s.GENE, dear.CORRECTED_P_VALUE_BIN ORDER BY dear.CORRECTED_P_VALUE_BIN DESC"; @Autowired public DifferentialExpressionResultDaoImpl( SessionFactory sessionFactory ) { super.setSessionFactory( sessionFactory ); } /* * (non-Javadoc) * * @see * ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDao#countNumberOfDifferentiallyExpressedProbes * (long, double) */ public Integer countNumberOfDifferentiallyExpressedProbes( long resultSetId, double threshold ) { DetachedCriteria criteria = DetachedCriteria.forClass( HitListSize.class ); criteria.add( Restrictions.eq( "id", resultSetId ) ); criteria.add( Restrictions.eq( "thresholdQValue", threshold ) ); List<?> results = this.getHibernateTemplate().findByCriteria( criteria ); Object result = null; if ( results != null ) { if ( results.size() > 1 ) { throw new org.springframework.dao.InvalidDataAccessResourceUsageException( "More than one instance of '" + HitListSize.class.getName() + "' was found when executing query" ); } else if ( results.size() == 1 ) { result = results.iterator().next(); return ( ( HitListSize ) result ).getNumberOfProbes(); } return 0; } return 0; } /* * (non-Javadoc) * * @see * ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisDao#findResultsForGeneInExperiments(ubic * .gemma.model.genome.Gene ) */ public Map<BioAssaySet, List<ProbeAnalysisResult>> find( Gene gene ) { StopWatch timer = new StopWatch(); timer.start(); Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); if ( gene == null ) return results; HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); tpl.setCacheQueries( true ); List<?> qresult = tpl.findByNamedParam( fetchResultsByGene, "gene", gene ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; BioAssaySet ee = ( BioAssaySet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } /* * (non-Javadoc) * * @see * ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisDao#findResultsForGeneInExperiments(ubic * .gemma.model.genome.Gene, java.util.Collection) */ public Map<BioAssaySet, List<ProbeAnalysisResult>> find( Gene gene, Collection<BioAssaySet> experimentsAnalyzed ) { Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); if ( experimentsAnalyzed.size() == 0 ) { return results; } StopWatch timer = new StopWatch(); timer.start(); String[] paramNames = { "gene", "experimentsAnalyzed" }; Object[] objectValues = { gene, experimentsAnalyzed }; List<?> qresult = this.getHibernateTemplate().findByNamedParam( fetchResultsByGeneAndExperimentsQuery, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; ExpressionExperiment ee = ( ExpressionExperiment ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } /* * (non-Javadoc) * * @see ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDao#find(java.util.Collection, double, * java.lang.Integer) */ public java.util.Map<ubic.gemma.model.expression.experiment.BioAssaySet, java.util.List<ProbeAnalysisResult>> find( java.util.Collection<ubic.gemma.model.expression.experiment.BioAssaySet> experiments, double qvalueThreshold, Integer limit ) { Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); if ( experiments.size() == 0 ) { return results; } StopWatch timer = new StopWatch(); timer.start(); HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); tpl.setQueryCacheRegion( "diffExResult" ); tpl.setCacheQueries( true ); if ( limit != null ) { tpl.setMaxResults( limit ); } String[] paramNames = { "experimentsAnalyzed", "threshold" }; Object[] objectValues = { experiments, qvalueThreshold }; List<?> qresult = tpl.findByNamedParam( fetchResultsByExperimentsQuery, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; BioAssaySet ee = ( BioAssaySet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } /* * (non-Javadoc) * * @seeubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisDao# * findResultsForGeneInExperimentsMetThreshold(ubic.gemma.model.genome.Gene, double, integer) */ public java.util.Map<ubic.gemma.model.expression.experiment.BioAssaySet, java.util.List<ProbeAnalysisResult>> find( ubic.gemma.model.genome.Gene gene, double threshold, Integer limit ) { StopWatch timer = new StopWatch(); timer.start(); String qs = fetchResultsByGene; if ( threshold > 0 ) qs = qs + " and r.correctedPvalue < :threshold"; HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); tpl.setQueryCacheRegion( "diffExResult" ); tpl.setCacheQueries( true ); if ( limit != null ) { tpl.setMaxResults( limit ); qs += " order by r.correctedPvalue"; } Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); String[] paramNames = { "gene", "threshold" }; Object[] objectValues = { gene, threshold }; List<?> qresult = tpl.findByNamedParam( qs, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; BioAssaySet ee = ( BioAssaySet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } log.debug( "Num experiments with probe analysis results (with limit = " + limit + ") : " + results.size() + ". Number of probes returned in total: " + qresult.size() ); timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } /* * (non-Javadoc) * * @seeubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisDao# * findResultsForGeneInExperimentsMetThreshold(ubic.gemma.model.genome.Gene, java.util.Collection, double, Integer) */ public java.util.Map<ubic.gemma.model.expression.experiment.BioAssaySet, java.util.List<ProbeAnalysisResult>> find( ubic.gemma.model.genome.Gene gene, java.util.Collection<ubic.gemma.model.expression.experiment.BioAssaySet> experimentsAnalyzed, double threshold, Integer limit ) { StopWatch timer = new StopWatch(); timer.start(); String qs = fetchResultsByGeneAndExperimentsQuery + " and r.correctedPvalue < :threshold"; HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); tpl.setQueryCacheRegion( "diffExResult" ); tpl.setCacheQueries( true ); if ( limit != null ) { tpl.setMaxResults( limit ); qs += " order by r.correctedPvalue"; } Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); if ( experimentsAnalyzed.size() == 0 ) { return results; } String[] paramNames = { "gene", "experimentsAnalyzed", "threshold" }; Object[] objectValues = { gene, experimentsAnalyzed, threshold }; List<?> qresult = tpl.findByNamedParam( qs, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; BioAssaySet ee = ( BioAssaySet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } log.warn( "Num experiments with probe analysis results (with limit = " + limit + ") : " + results.size() + ". Number of probes returned in total: " + qresult.size() ); timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } public List<Double> findGeneInResultSets( Gene gene, ExpressionAnalysisResultSet resultSet, Collection<Long> arrayDesignIds, Integer limit ) { StopWatch timer = new StopWatch(); timer.start(); List<Double> results = null; try { Session session = super.getSession(); org.hibernate.SQLQuery queryObject = session.createSQLQuery( fetchResultsByResultSetAndGeneQuery ); queryObject.setLong( "gene_id", gene.getId() ); queryObject.setLong( "rs_id", resultSet.getId() ); // queryObject.setParameterList( "array_ids", arrayDesignIds ); // queryObject.setLong( "array_ids", arrayDesignIds.iterator().next() ); if ( limit != null ) { queryObject.setMaxResults( limit ); } queryObject.addScalar( "CORRECTED_PVALUE", new DoubleType() ); results = queryObject.list(); } catch ( org.hibernate.HibernateException ex ) { throw super.convertHibernateAccessException( ex ); } timer.stop(); if ( log.isDebugEnabled() ) log.debug( "Fetching probeResults from resultSet " + resultSet.getId() + " for gene " + gene.getId() + "and " + arrayDesignIds.size() + "arrays took : " + timer.getTime() + " ms" ); return results; } /** * Given a list of result sets finds the results that met the given threshold * * @param resultsAnalyzed * @param threshold * @param limit - max number of results to return. * @return */ public java.util.Map<ExpressionAnalysisResultSet, List<ProbeAnalysisResult>> findInResultSets( java.util.Collection<ExpressionAnalysisResultSet> resultsAnalyzed, double threshold, Integer limit ) { Map<ExpressionAnalysisResultSet, List<ProbeAnalysisResult>> results = new HashMap<ExpressionAnalysisResultSet, List<ProbeAnalysisResult>>(); if ( resultsAnalyzed.size() == 0 ) { return results; } // Integer bin = Math.log10(threshold); StopWatch timer = new StopWatch(); timer.start(); String qs = fetchResultsByResultSetQuery + " and r.correctedPvalue < :threshold order by r.correctedPvalue"; HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); if ( limit != null ) { tpl.setMaxResults( limit ); } String[] paramNames = { "resultsAnalyzed", "threshold" }; Object[] objectValues = { resultsAnalyzed, threshold }; List<?> qresult = tpl.findByNamedParam( qs, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; ExpressionAnalysisResultSet ee = ( ExpressionAnalysisResultSet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } public static class DiffExprGeneSearchResult { private long probeAnalysisResultId; private int numberOfProbes = 0; private int numberOfProbesDiffExpressed = 0; public int getNumberOfProbesDiffExpressed() { return numberOfProbesDiffExpressed; } public void setNumberOfProbesDiffExpressed( int numberOfProbesDiffExpressed ) { this.numberOfProbesDiffExpressed = numberOfProbesDiffExpressed; } public long getProbeAnalysisResultId() { return probeAnalysisResultId; } public void setProbeAnalysisResultId( long probeAnalysisResultId ) { this.probeAnalysisResultId = probeAnalysisResultId; } public int getNumberOfProbes() { return numberOfProbes; } public void setNumberOfProbes( int numberOfProbes ) { this.numberOfProbes = numberOfProbes; } } public Map<Long, DiffExprGeneSearchResult> findProbeAnalysisResultIdsInResultSet( Long resultSetId, Collection<Long> geneIds, Collection<Long> adUsed ) { StopWatch timer = new StopWatch(); timer.start(); Map<Long,DiffExprGeneSearchResult> results = new HashMap<Long,DiffExprGeneSearchResult>(); Map<Long, Integer> best_p_value = new HashMap<Long, Integer>(); Session session = super.getSession(); try { org.hibernate.SQLQuery queryObject = session .createSQLQuery( fetchBatchProbeAnalysisResultsByResultSetsAndGeneQuery ); queryObject.setLong( "rs_id", resultSetId ); queryObject.setParameterList( "gene_ids", geneIds ); queryObject.setParameterList( "ad_ids", adUsed ); List<?> queryResult = queryObject.list(); log.info( "Got " + queryResult.size() + " results" ); if ( queryResult.isEmpty() ) return results; // Get probe result with the best pValue. for ( Object o : queryResult ) { Object[] row = ( Object[] ) o; BigInteger geneId = ( BigInteger ) row[0]; Integer p_value_bin = ( Integer ) row[1]; BigInteger probe_analysis_id = ( BigInteger ) row[2]; // Count diff expressed probes per gene. if (results.get( geneId.longValue() ) != null) { DiffExprGeneSearchResult r = results.get( geneId.longValue() ); r.setNumberOfProbes( r.getNumberOfProbes() + 1 ); if (p_value_bin != null && p_value_bin > 0) { r.setNumberOfProbesDiffExpressed( r.getNumberOfProbesDiffExpressed() + 1 ); } } if ( best_p_value.get( geneId.longValue() ) == null ) { // first encounter best_p_value.put( geneId.longValue(), p_value_bin ); DiffExprGeneSearchResult r = new DiffExprGeneSearchResult(); r.setProbeAnalysisResultId( probe_analysis_id.longValue() ); r.setNumberOfProbes( r.getNumberOfProbes() + 1 ); if (p_value_bin != null && p_value_bin > 0) { r.setNumberOfProbesDiffExpressed( r.getNumberOfProbesDiffExpressed() + 1 ); } results.put( geneId.longValue(), r ); } else { if ( p_value_bin != null && best_p_value.get( geneId.longValue() ) < p_value_bin) { // replace best_p_value.put( geneId.longValue(), p_value_bin ); DiffExprGeneSearchResult r = results.get( geneId.longValue() ); r.setProbeAnalysisResultId( probe_analysis_id.longValue() ); //results.put( geneId.longValue(),r ); } } } } catch ( org.hibernate.HibernateException ex ) { throw super.convertHibernateAccessException( ex ); } finally { super.releaseSession( session ); } timer.stop(); // if ( log.isDebugEnabled() ) log.info( "Fetching ProbeResults for geneIds " + StringUtils.join( geneIds, "," ) + " and result set " + resultSetId + " ad used " + StringUtils.join( adUsed, "," ) + " took : " + timer.getTime() + " ms" ); return results; } /* * (non-Javadoc) * * @see * ubic.gemma.model.expression.analysis.DifferentialExpressionAnalysisResultDaoBase#handleGetExperimentalFactors * (java.util.Collection) */ @Override protected Map<ProbeAnalysisResult, Collection<ExperimentalFactor>> handleGetExperimentalFactors( Collection<ProbeAnalysisResult> differentialExpressionAnalysisResults ) throws Exception { StopWatch timer = new StopWatch(); timer.start(); Map<ProbeAnalysisResult, Collection<ExperimentalFactor>> factorsByResult = new HashMap<ProbeAnalysisResult, Collection<ExperimentalFactor>>(); if ( differentialExpressionAnalysisResults.size() == 0 ) { return factorsByResult; } final String queryString = "select ef, r from ExpressionAnalysisResultSetImpl rs" + " inner join rs.results r inner join rs.experimentalFactors ef where r in (:differentialExpressionAnalysisResults)"; String[] paramNames = { "differentialExpressionAnalysisResults" }; Object[] objectValues = { differentialExpressionAnalysisResults }; List<?> qr = this.getHibernateTemplate().findByNamedParam( queryString, paramNames, objectValues ); if ( qr == null || qr.isEmpty() ) return factorsByResult; for ( Object o : qr ) { Object[] ar = ( Object[] ) o; ExperimentalFactor f = ( ExperimentalFactor ) ar[0]; ProbeAnalysisResult res = ( ProbeAnalysisResult ) ar[1]; if ( !factorsByResult.containsKey( res ) ) { factorsByResult.put( res, new HashSet<ExperimentalFactor>() ); } factorsByResult.get( res ).add( f ); if ( log.isDebugEnabled() ) log.debug( res ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "factors by results: " + timer.getTime() + " ms" ); } return factorsByResult; } /* * (non-Javadoc) * * @see * ubic.gemma.model.expression.analysis.DifferentialExpressionAnalysisResultDaoBase#handleGetExperimentalFactors * (ubic.gemma.model.expression.analysis.DifferentialExpressionAnalysisResult) */ @Override protected Collection<ExperimentalFactor> handleGetExperimentalFactors( ProbeAnalysisResult differentialExpressionAnalysisResult ) throws Exception { final String queryString = "select ef from ExpressionAnalysisResultSetImpl rs" + " inner join rs.results r inner join rs.experimentalFactors ef where r=:differentialExpressionAnalysisResult"; String[] paramNames = { "differentialExpressionAnalysisResult" }; Object[] objectValues = { differentialExpressionAnalysisResult }; return this.getHibernateTemplate().findByNamedParam( queryString, paramNames, objectValues ); } /* * (non-Javadoc) * * @see ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDaoBase#load(java.lang.Long) */ public ProbeAnalysisResult load( Long id ) { return this.getHibernateTemplate().get( ProbeAnalysisResultImpl.class, id ); } public Collection<ProbeAnalysisResult> loadAll() { throw new UnsupportedOperationException( "Sorry, that would be nuts" ); } /* * (non-Javadoc) * * @see ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDao#thaw(java.util.Collection) */ public void thaw( final Collection<ProbeAnalysisResult> results ) { HibernateTemplate templ = this.getHibernateTemplate(); templ.execute( new org.springframework.orm.hibernate3.HibernateCallback<Object>() { public Object doInHibernate( org.hibernate.Session session ) throws org.hibernate.HibernateException { for ( ProbeAnalysisResult result : results ) { session.buildLockRequest( LockOptions.NONE ).lock( result ); Hibernate.initialize( result ); CompositeSequence cs = result.getProbe(); Hibernate.initialize( cs ); } return null; } } ); } /* * (non-Javadoc) * * @see * ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDao#thaw(ubic.gemma.model.analysis.expression * .diff.ProbeAnalysisResult) */ public void thaw( final ProbeAnalysisResult result ) { HibernateTemplate templ = this.getHibernateTemplate(); templ.execute( new org.springframework.orm.hibernate3.HibernateCallback<Object>() { public Object doInHibernate( org.hibernate.Session session ) throws org.hibernate.HibernateException { session.buildLockRequest( LockOptions.NONE ).lock( result ); Hibernate.initialize( result ); CompositeSequence cs = result.getProbe(); Hibernate.initialize( cs ); Collection<ContrastResult> contrasts = result.getContrasts(); for ( ContrastResult contrast : contrasts ) { FactorValue f = contrast.getFactorValue(); Hibernate.initialize( f ); f.getIsBaseline(); } return null; } } ); } public Map<Long, DifferentialExpressionAnalysisResult> loadMultiple( Collection<Long> ids ) { final String queryString = "select dea from DifferentialExpressionAnalysisResultImpl dea where dea.id in (:ids)"; Map<Long, DifferentialExpressionAnalysisResult> probeResults = new HashMap<Long, DifferentialExpressionAnalysisResult>(); if ( ids.size() == 0 ) { return probeResults; } int BATCH_SIZE = 100; Collection<Long> batch = new HashSet<Long>(); for ( Long probeResultId : ids ) { batch.add( probeResultId ); if ( batch.size() == BATCH_SIZE ) { Collection<DifferentialExpressionAnalysisResult> batchResults = getHibernateTemplate() .findByNamedParam( queryString, "ids", batch ); for ( DifferentialExpressionAnalysisResult par : batchResults ) { probeResults.put( par.getId(), par ); } batch.clear(); } } if ( batch.size() > 0 ) { Collection<DifferentialExpressionAnalysisResult> batchResults = getHibernateTemplate().findByNamedParam( queryString, "ids", batch ); for ( DifferentialExpressionAnalysisResult par : batchResults ) { probeResults.put( par.getId(), par ); } } return probeResults; } }
gemma-mda/src/main/java/ubic/gemma/model/analysis/expression/diff/DifferentialExpressionResultDaoImpl.java
/* * The Gemma project. * * Copyright (c) 2006-2007 University of British Columbia * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package ubic.gemma.model.analysis.expression.diff; import java.math.BigInteger; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.time.StopWatch; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.hibernate.Hibernate; import org.hibernate.LockOptions; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.hibernate.criterion.DetachedCriteria; import org.hibernate.criterion.Restrictions; import org.hibernate.type.DoubleType; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.orm.hibernate3.HibernateTemplate; import org.springframework.stereotype.Repository; import ubic.gemma.model.expression.designElement.CompositeSequence; import ubic.gemma.model.expression.experiment.BioAssaySet; import ubic.gemma.model.expression.experiment.ExperimentalFactor; import ubic.gemma.model.expression.experiment.ExpressionExperiment; import ubic.gemma.model.expression.experiment.FactorValue; import ubic.gemma.model.genome.Gene; /** * @author keshav * @version $Id$ * @see ubic.gemma.model.expression.analysis.DifferentialExpressionAnalysisResult */ @Repository public class DifferentialExpressionResultDaoImpl extends ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDaoBase { private Log log = LogFactory.getLog( this.getClass() ); private static final String fetchResultsByGeneAndExperimentsQuery = "select distinct e, r" + " from DifferentialExpressionAnalysisImpl a, BioSequence2GeneProductImpl bs2gp" + " inner join a.experimentAnalyzed e " + " inner join a.resultSets rs inner join rs.results r inner join fetch r.probe p " + "inner join p.biologicalCharacteristic bs inner join bs2gp.geneProduct gp inner join gp.gene g" + " where bs2gp.bioSequence=bs and g=:gene and e in (:experimentsAnalyzed)"; // no order by clause, we add // it later private static final String fetchResultsByGene = "select distinct e, r" + " from DifferentialExpressionAnalysisImpl a, BioSequence2GeneProductImpl bs2gp" + " inner join a.experimentAnalyzed e " + " inner join a.resultSets rs inner join rs.results r inner join fetch r.probe p " + "inner join p.biologicalCharacteristic bs inner join bs2gp.geneProduct gp inner join gp.gene g" + " where bs2gp.bioSequence=bs and g=:gene"; // no order by clause, we add it later private static final String fetchResultsByExperimentsQuery = "select distinct e, r" + " from DifferentialExpressionAnalysisImpl a, BioSequence2GeneProductImpl bs2gp" + " inner join a.experimentAnalyzed e " + " inner join a.resultSets rs inner join rs.results r inner join fetch r.probe p " + "left join p.biologicalCharacteristic bs left join bs2gp.geneProduct gp left join gp.gene g" + " where bs2gp.bioSequence=bs and e in (:experimentsAnalyzed) and r.correctedPvalue < :threshold order by r.correctedPvalue"; /** * No constraint on gene */ private static final String fetchResultsByResultSetQuery = "select distinct rs, r " + " from DifferentialExpressionAnalysisImpl a " + " inner join a.experimentAnalyzed e " + " inner join a.resultSets rs inner join rs.results r inner join fetch r.probe p " + " where rs in (:resultsAnalyzed)"; // no order by clause, we add it later; 'e' is not used in this query. private static final String fetchResultsByResultSetAndGeneQuery = "select dear.CORRECTED_PVALUE " + " from DIFFERENTIAL_EXPRESSION_ANALYSIS_RESULT dear, GENE2CS g2s FORCE KEY(GENE), PROBE_ANALYSIS_RESULT par " + " where g2s.CS = par.PROBE_FK and par.ID = dear.ID and " + " dear.EXPRESSION_ANALYSIS_RESULT_SET_FK = :rs_id and g2s.GENE = :gene_id " + " order by dear.CORRECTED_P_VALUE_BIN DESC"; // private static final String fetchBatchProbeAnalysisResultsByResultSetsAndGeneQuery = // "SELECT SQL_NO_CACHE dear.EXPRESSION_ANALYSIS_RESULT_SET_FK, dear.CORRECTED_P_VALUE_BIN, dear.ID" // + " from DIFFERENTIAL_EXPRESSION_ANALYSIS_RESULT dear, GENE2CS g2s , PROBE_ANALYSIS_RESULT par" //FORCE KEY(GENE) // + " where par.ID = dear.ID and g2s.CS = par.PROBE_FK and " // + " dear.EXPRESSION_ANALYSIS_RESULT_SET_FK IN (:rs_ids) and " // + " g2s.AD in (:ad_ids) and " // + // " g2s.GENE IN (:gene_ids) GROUP BY dear.EXPRESSION_ANALYSIS_RESULT_SET_FK, dear.CORRECTED_P_VALUE_BIN ORDER BY dear.CORRECTED_P_VALUE_BIN DESC"; private static final String fetchBatchProbeAnalysisResultsByResultSetsAndGeneQuery = "SELECT g2s.GENE, dear.CORRECTED_P_VALUE_BIN, dear.ID" + " from DIFFERENTIAL_EXPRESSION_ANALYSIS_RESULT dear, GENE2CS g2s FORCE KEY(GENE), PROBE_ANALYSIS_RESULT par" + " where par.ID = dear.ID and g2s.CS = par.PROBE_FK and " + " dear.EXPRESSION_ANALYSIS_RESULT_SET_FK = :rs_id and " + " g2s.AD in (:ad_ids) and " + " g2s.GENE IN (:gene_ids) ";//GROUP BY g2s.GENE, dear.CORRECTED_P_VALUE_BIN ORDER BY dear.CORRECTED_P_VALUE_BIN DESC"; @Autowired public DifferentialExpressionResultDaoImpl( SessionFactory sessionFactory ) { super.setSessionFactory( sessionFactory ); } /* * (non-Javadoc) * * @see * ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDao#countNumberOfDifferentiallyExpressedProbes * (long, double) */ public Integer countNumberOfDifferentiallyExpressedProbes( long resultSetId, double threshold ) { DetachedCriteria criteria = DetachedCriteria.forClass( HitListSize.class ); criteria.add( Restrictions.eq( "id", resultSetId ) ); criteria.add( Restrictions.eq( "thresholdQValue", threshold ) ); List<?> results = this.getHibernateTemplate().findByCriteria( criteria ); Object result = null; if ( results != null ) { if ( results.size() > 1 ) { throw new org.springframework.dao.InvalidDataAccessResourceUsageException( "More than one instance of '" + HitListSize.class.getName() + "' was found when executing query" ); } else if ( results.size() == 1 ) { result = results.iterator().next(); return ( ( HitListSize ) result ).getNumberOfProbes(); } return 0; } return 0; } /* * (non-Javadoc) * * @see * ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisDao#findResultsForGeneInExperiments(ubic * .gemma.model.genome.Gene ) */ public Map<BioAssaySet, List<ProbeAnalysisResult>> find( Gene gene ) { StopWatch timer = new StopWatch(); timer.start(); Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); if ( gene == null ) return results; HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); tpl.setCacheQueries( true ); List<?> qresult = tpl.findByNamedParam( fetchResultsByGene, "gene", gene ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; BioAssaySet ee = ( BioAssaySet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } /* * (non-Javadoc) * * @see * ubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisDao#findResultsForGeneInExperiments(ubic * .gemma.model.genome.Gene, java.util.Collection) */ public Map<BioAssaySet, List<ProbeAnalysisResult>> find( Gene gene, Collection<BioAssaySet> experimentsAnalyzed ) { Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); if ( experimentsAnalyzed.size() == 0 ) { return results; } StopWatch timer = new StopWatch(); timer.start(); String[] paramNames = { "gene", "experimentsAnalyzed" }; Object[] objectValues = { gene, experimentsAnalyzed }; List<?> qresult = this.getHibernateTemplate().findByNamedParam( fetchResultsByGeneAndExperimentsQuery, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; ExpressionExperiment ee = ( ExpressionExperiment ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } /* * (non-Javadoc) * * @see ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDao#find(java.util.Collection, double, * java.lang.Integer) */ public java.util.Map<ubic.gemma.model.expression.experiment.BioAssaySet, java.util.List<ProbeAnalysisResult>> find( java.util.Collection<ubic.gemma.model.expression.experiment.BioAssaySet> experiments, double qvalueThreshold, Integer limit ) { Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); if ( experiments.size() == 0 ) { return results; } StopWatch timer = new StopWatch(); timer.start(); HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); tpl.setQueryCacheRegion( "diffExResult" ); tpl.setCacheQueries( true ); if ( limit != null ) { tpl.setMaxResults( limit ); } String[] paramNames = { "experimentsAnalyzed", "threshold" }; Object[] objectValues = { experiments, qvalueThreshold }; List<?> qresult = tpl.findByNamedParam( fetchResultsByExperimentsQuery, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; BioAssaySet ee = ( BioAssaySet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } /* * (non-Javadoc) * * @seeubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisDao# * findResultsForGeneInExperimentsMetThreshold(ubic.gemma.model.genome.Gene, double, integer) */ public java.util.Map<ubic.gemma.model.expression.experiment.BioAssaySet, java.util.List<ProbeAnalysisResult>> find( ubic.gemma.model.genome.Gene gene, double threshold, Integer limit ) { StopWatch timer = new StopWatch(); timer.start(); String qs = fetchResultsByGene; if ( threshold > 0 ) qs = qs + " and r.correctedPvalue < :threshold"; HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); tpl.setQueryCacheRegion( "diffExResult" ); tpl.setCacheQueries( true ); if ( limit != null ) { tpl.setMaxResults( limit ); qs += " order by r.correctedPvalue"; } Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); String[] paramNames = { "gene", "threshold" }; Object[] objectValues = { gene, threshold }; List<?> qresult = tpl.findByNamedParam( qs, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; BioAssaySet ee = ( BioAssaySet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } log.debug( "Num experiments with probe analysis results (with limit = " + limit + ") : " + results.size() + ". Number of probes returned in total: " + qresult.size() ); timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } /* * (non-Javadoc) * * @seeubic.gemma.model.analysis.expression.diff.DifferentialExpressionAnalysisDao# * findResultsForGeneInExperimentsMetThreshold(ubic.gemma.model.genome.Gene, java.util.Collection, double, Integer) */ public java.util.Map<ubic.gemma.model.expression.experiment.BioAssaySet, java.util.List<ProbeAnalysisResult>> find( ubic.gemma.model.genome.Gene gene, java.util.Collection<ubic.gemma.model.expression.experiment.BioAssaySet> experimentsAnalyzed, double threshold, Integer limit ) { StopWatch timer = new StopWatch(); timer.start(); String qs = fetchResultsByGeneAndExperimentsQuery + " and r.correctedPvalue < :threshold"; HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); tpl.setQueryCacheRegion( "diffExResult" ); tpl.setCacheQueries( true ); if ( limit != null ) { tpl.setMaxResults( limit ); qs += " order by r.correctedPvalue"; } Map<BioAssaySet, List<ProbeAnalysisResult>> results = new HashMap<BioAssaySet, List<ProbeAnalysisResult>>(); if ( experimentsAnalyzed.size() == 0 ) { return results; } String[] paramNames = { "gene", "experimentsAnalyzed", "threshold" }; Object[] objectValues = { gene, experimentsAnalyzed, threshold }; List<?> qresult = tpl.findByNamedParam( qs, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; BioAssaySet ee = ( BioAssaySet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } log.warn( "Num experiments with probe analysis results (with limit = " + limit + ") : " + results.size() + ". Number of probes returned in total: " + qresult.size() ); timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } public List<Double> findGeneInResultSets( Gene gene, ExpressionAnalysisResultSet resultSet, Collection<Long> arrayDesignIds, Integer limit ) { StopWatch timer = new StopWatch(); timer.start(); List<Double> results = null; try { Session session = super.getSession(); org.hibernate.SQLQuery queryObject = session.createSQLQuery( fetchResultsByResultSetAndGeneQuery ); queryObject.setLong( "gene_id", gene.getId() ); queryObject.setLong( "rs_id", resultSet.getId() ); // queryObject.setParameterList( "array_ids", arrayDesignIds ); // queryObject.setLong( "array_ids", arrayDesignIds.iterator().next() ); if ( limit != null ) { queryObject.setMaxResults( limit ); } queryObject.addScalar( "CORRECTED_PVALUE", new DoubleType() ); results = queryObject.list(); } catch ( org.hibernate.HibernateException ex ) { throw super.convertHibernateAccessException( ex ); } timer.stop(); if ( log.isDebugEnabled() ) log.debug( "Fetching probeResults from resultSet " + resultSet.getId() + " for gene " + gene.getId() + "and " + arrayDesignIds.size() + "arrays took : " + timer.getTime() + " ms" ); return results; } /** * Given a list of result sets finds the results that met the given threshold * * @param resultsAnalyzed * @param threshold * @param limit - max number of results to return. * @return */ public java.util.Map<ExpressionAnalysisResultSet, List<ProbeAnalysisResult>> findInResultSets( java.util.Collection<ExpressionAnalysisResultSet> resultsAnalyzed, double threshold, Integer limit ) { Map<ExpressionAnalysisResultSet, List<ProbeAnalysisResult>> results = new HashMap<ExpressionAnalysisResultSet, List<ProbeAnalysisResult>>(); if ( resultsAnalyzed.size() == 0 ) { return results; } // Integer bin = Math.log10(threshold); StopWatch timer = new StopWatch(); timer.start(); String qs = fetchResultsByResultSetQuery + " and r.correctedPvalue < :threshold order by r.correctedPvalue"; HibernateTemplate tpl = new HibernateTemplate( this.getSessionFactory() ); if ( limit != null ) { tpl.setMaxResults( limit ); } String[] paramNames = { "resultsAnalyzed", "threshold" }; Object[] objectValues = { resultsAnalyzed, threshold }; List<?> qresult = tpl.findByNamedParam( qs, paramNames, objectValues ); for ( Object o : qresult ) { Object[] oa = ( Object[] ) o; ExpressionAnalysisResultSet ee = ( ExpressionAnalysisResultSet ) oa[0]; ProbeAnalysisResult probeResult = ( ProbeAnalysisResult ) oa[1]; if ( !results.containsKey( ee ) ) { results.put( ee, new ArrayList<ProbeAnalysisResult>() ); } results.get( ee ).add( probeResult ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "Diff ex results: " + timer.getTime() + " ms" ); } return results; } public static class DiffExprGeneSearchResult { private long probeAnalysisResultId; private int numberOfProbes = 0; private int numberOfProbesDiffExpressed = 0; public int getNumberOfProbesDiffExpressed() { return numberOfProbesDiffExpressed; } public void setNumberOfProbesDiffExpressed( int numberOfProbesDiffExpressed ) { this.numberOfProbesDiffExpressed = numberOfProbesDiffExpressed; } public long getProbeAnalysisResultId() { return probeAnalysisResultId; } public void setProbeAnalysisResultId( long probeAnalysisResultId ) { this.probeAnalysisResultId = probeAnalysisResultId; } public int getNumberOfProbes() { return numberOfProbes; } public void setNumberOfProbes( int numberOfProbes ) { this.numberOfProbes = numberOfProbes; } } public Map<Long, DiffExprGeneSearchResult> findProbeAnalysisResultIdsInResultSet( Long resultSetId, Collection<Long> geneIds, Collection<Long> adUsed ) { StopWatch timer = new StopWatch(); timer.start(); Map<Long,DiffExprGeneSearchResult> results = new HashMap<Long,DiffExprGeneSearchResult>(); Map<Long, Integer> best_p_value = new HashMap<Long, Integer>(); Session session = super.getSession(); try { org.hibernate.SQLQuery queryObject = session .createSQLQuery( fetchBatchProbeAnalysisResultsByResultSetsAndGeneQuery ); queryObject.setLong( "rs_id", resultSetId ); queryObject.setParameterList( "gene_ids", geneIds ); queryObject.setParameterList( "ad_ids", adUsed ); List<?> queryResult = queryObject.list(); log.warn( "Got " + queryResult.size() + " results" ); if ( queryResult.isEmpty() ) return results; // Get probe result with the best pValue. for ( Object o : queryResult ) { Object[] row = ( Object[] ) o; BigInteger geneId = ( BigInteger ) row[0]; Integer p_value_bin = ( Integer ) row[1]; BigInteger probe_analysis_id = ( BigInteger ) row[2]; // Count diff expressed probes per gene. if (results.get( geneId.longValue() ) != null) { DiffExprGeneSearchResult r = results.get( geneId.longValue() ); r.setNumberOfProbes( r.getNumberOfProbes() + 1 ); if (p_value_bin != null && p_value_bin > 0) { r.setNumberOfProbesDiffExpressed( r.getNumberOfProbesDiffExpressed() + 1 ); } } if ( best_p_value.get( geneId.longValue() ) == null ) { // first encounter best_p_value.put( geneId.longValue(), p_value_bin ); DiffExprGeneSearchResult r = new DiffExprGeneSearchResult(); r.setProbeAnalysisResultId( probe_analysis_id.longValue() ); r.setNumberOfProbes( r.getNumberOfProbes() + 1 ); if (p_value_bin != null && p_value_bin > 0) { r.setNumberOfProbesDiffExpressed( r.getNumberOfProbesDiffExpressed() + 1 ); } results.put( geneId.longValue(), r ); } else { if ( p_value_bin != null && best_p_value.get( geneId.longValue() ) < p_value_bin) { // replace best_p_value.put( geneId.longValue(), p_value_bin ); DiffExprGeneSearchResult r = results.get( geneId.longValue() ); r.setProbeAnalysisResultId( probe_analysis_id.longValue() ); //results.put( geneId.longValue(),r ); } } } } catch ( org.hibernate.HibernateException ex ) { throw super.convertHibernateAccessException( ex ); } finally { super.releaseSession( session ); } timer.stop(); // if ( log.isDebugEnabled() ) log.info( "Fetching ProbeResults for geneIds " + StringUtils.join( geneIds, "," ) + " and result set " + resultSetId + " ad used " + StringUtils.join( adUsed, "," ) + " took : " + timer.getTime() + " ms" ); return results; } /* * (non-Javadoc) * * @see * ubic.gemma.model.expression.analysis.DifferentialExpressionAnalysisResultDaoBase#handleGetExperimentalFactors * (java.util.Collection) */ @Override protected Map<ProbeAnalysisResult, Collection<ExperimentalFactor>> handleGetExperimentalFactors( Collection<ProbeAnalysisResult> differentialExpressionAnalysisResults ) throws Exception { StopWatch timer = new StopWatch(); timer.start(); Map<ProbeAnalysisResult, Collection<ExperimentalFactor>> factorsByResult = new HashMap<ProbeAnalysisResult, Collection<ExperimentalFactor>>(); if ( differentialExpressionAnalysisResults.size() == 0 ) { return factorsByResult; } final String queryString = "select ef, r from ExpressionAnalysisResultSetImpl rs" + " inner join rs.results r inner join rs.experimentalFactors ef where r in (:differentialExpressionAnalysisResults)"; String[] paramNames = { "differentialExpressionAnalysisResults" }; Object[] objectValues = { differentialExpressionAnalysisResults }; List<?> qr = this.getHibernateTemplate().findByNamedParam( queryString, paramNames, objectValues ); if ( qr == null || qr.isEmpty() ) return factorsByResult; for ( Object o : qr ) { Object[] ar = ( Object[] ) o; ExperimentalFactor f = ( ExperimentalFactor ) ar[0]; ProbeAnalysisResult res = ( ProbeAnalysisResult ) ar[1]; if ( !factorsByResult.containsKey( res ) ) { factorsByResult.put( res, new HashSet<ExperimentalFactor>() ); } factorsByResult.get( res ).add( f ); if ( log.isDebugEnabled() ) log.debug( res ); } timer.stop(); if ( timer.getTime() > 1000 ) { log.info( "factors by results: " + timer.getTime() + " ms" ); } return factorsByResult; } /* * (non-Javadoc) * * @see * ubic.gemma.model.expression.analysis.DifferentialExpressionAnalysisResultDaoBase#handleGetExperimentalFactors * (ubic.gemma.model.expression.analysis.DifferentialExpressionAnalysisResult) */ @Override protected Collection<ExperimentalFactor> handleGetExperimentalFactors( ProbeAnalysisResult differentialExpressionAnalysisResult ) throws Exception { final String queryString = "select ef from ExpressionAnalysisResultSetImpl rs" + " inner join rs.results r inner join rs.experimentalFactors ef where r=:differentialExpressionAnalysisResult"; String[] paramNames = { "differentialExpressionAnalysisResult" }; Object[] objectValues = { differentialExpressionAnalysisResult }; return this.getHibernateTemplate().findByNamedParam( queryString, paramNames, objectValues ); } /* * (non-Javadoc) * * @see ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDaoBase#load(java.lang.Long) */ public ProbeAnalysisResult load( Long id ) { return this.getHibernateTemplate().get( ProbeAnalysisResultImpl.class, id ); } public Collection<ProbeAnalysisResult> loadAll() { throw new UnsupportedOperationException( "Sorry, that would be nuts" ); } /* * (non-Javadoc) * * @see ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDao#thaw(java.util.Collection) */ public void thaw( final Collection<ProbeAnalysisResult> results ) { HibernateTemplate templ = this.getHibernateTemplate(); templ.execute( new org.springframework.orm.hibernate3.HibernateCallback<Object>() { public Object doInHibernate( org.hibernate.Session session ) throws org.hibernate.HibernateException { for ( ProbeAnalysisResult result : results ) { session.buildLockRequest( LockOptions.NONE ).lock( result ); Hibernate.initialize( result ); CompositeSequence cs = result.getProbe(); Hibernate.initialize( cs ); } return null; } } ); } /* * (non-Javadoc) * * @see * ubic.gemma.model.analysis.expression.diff.DifferentialExpressionResultDao#thaw(ubic.gemma.model.analysis.expression * .diff.ProbeAnalysisResult) */ public void thaw( final ProbeAnalysisResult result ) { HibernateTemplate templ = this.getHibernateTemplate(); templ.execute( new org.springframework.orm.hibernate3.HibernateCallback<Object>() { public Object doInHibernate( org.hibernate.Session session ) throws org.hibernate.HibernateException { session.buildLockRequest( LockOptions.NONE ).lock( result ); Hibernate.initialize( result ); CompositeSequence cs = result.getProbe(); Hibernate.initialize( cs ); Collection<ContrastResult> contrasts = result.getContrasts(); for ( ContrastResult contrast : contrasts ) { FactorValue f = contrast.getFactorValue(); Hibernate.initialize( f ); f.getIsBaseline(); } return null; } } ); } public Map<Long, DifferentialExpressionAnalysisResult> loadMultiple( Collection<Long> ids ) { final String queryString = "select dea from DifferentialExpressionAnalysisResultImpl dea where dea.id in (:ids)"; Map<Long, DifferentialExpressionAnalysisResult> probeResults = new HashMap<Long, DifferentialExpressionAnalysisResult>(); if ( ids.size() == 0 ) { return probeResults; } int BATCH_SIZE = 100; Collection<Long> batch = new HashSet<Long>(); for ( Long probeResultId : ids ) { batch.add( probeResultId ); if ( batch.size() == BATCH_SIZE ) { Collection<DifferentialExpressionAnalysisResult> batchResults = getHibernateTemplate() .findByNamedParam( queryString, "ids", batch ); for ( DifferentialExpressionAnalysisResult par : batchResults ) { probeResults.put( par.getId(), par ); } batch.clear(); } } if ( batch.size() > 0 ) { Collection<DifferentialExpressionAnalysisResult> batchResults = getHibernateTemplate().findByNamedParam( queryString, "ids", batch ); for ( DifferentialExpressionAnalysisResult par : batchResults ) { probeResults.put( par.getId(), par ); } } return probeResults; } }
Minor. Changed log level.
gemma-mda/src/main/java/ubic/gemma/model/analysis/expression/diff/DifferentialExpressionResultDaoImpl.java
Minor. Changed log level.
<ide><path>emma-mda/src/main/java/ubic/gemma/model/analysis/expression/diff/DifferentialExpressionResultDaoImpl.java <ide> <ide> List<?> queryResult = queryObject.list(); <ide> <del> log.warn( "Got " + queryResult.size() + " results" ); <add> log.info( "Got " + queryResult.size() + " results" ); <ide> <ide> if ( queryResult.isEmpty() ) return results; <ide>
Java
lgpl-2.1
8e80518566f4face6f0abf5a57438b3ee6b93483
0
MaxGaming63/BornToBeaHero-1.11.2
package com.emyxam.btbh.item.armors.marvel; import com.emyxam.btbh.BornToBeaHero; import com.emyxam.btbh.item.ItemModelProvider; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.EntityEquipmentSlot; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.potion.Potion; import net.minecraft.potion.PotionEffect; import net.minecraft.world.World; import net.minecraftforge.event.entity.living.LivingEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import java.util.ArrayList; import java.util.List; public class ItemCaptainArmor extends net.minecraft.item.ItemArmor implements ItemModelProvider { private String name; public ItemCaptainArmor(ArmorMaterial material, EntityEquipmentSlot slot, String name) { super(material, 0, slot); setRegistryName(name); setUnlocalizedName(name); this.name = name; setCreativeTab(BornToBeaHero.creativeTab); this.setMaxStackSize(1); } @Override public void registerItemModel(Item item) { BornToBeaHero.proxy.registerItemRenderer(this, 0, name); } @SideOnly(Side.CLIENT) public void addInformation(ItemStack stack, EntityPlayer player, List tooltip, boolean advanced) { int damage = stack.getMaxDamage() - stack.getItemDamage(); tooltip.add("Durability: \u00A7c" + damage); } public void onArmorTick(World world, EntityPlayer entity, ItemStack itemStack) { ItemStack head = entity.getItemStackFromSlot(EntityEquipmentSlot.HEAD); ItemStack chest = entity.getItemStackFromSlot(EntityEquipmentSlot.CHEST); ItemStack legs = entity.getItemStackFromSlot(EntityEquipmentSlot.LEGS); ItemStack feet = entity.getItemStackFromSlot(EntityEquipmentSlot.FEET); if (head != null && head.getItem() instanceof ItemCaptainArmor && chest != null && chest.getItem() instanceof ItemCaptainArmor && legs != null && legs.getItem() instanceof ItemCaptainArmor && feet != null && feet.getItem() instanceof ItemCaptainArmor || entity.capabilities.isCreativeMode || entity.isSpectator()) { entity.fallDistance = 0.0F; } } @Override public int getItemEnchantability() { return 0; } public static class abilityHandler { public static List<String> playersWithSet = new ArrayList<String>(); private boolean hasSet; public static String playerKey(EntityPlayer player) { return player.getGameProfile().getName() + ":" + player.world.isRemote; } public static boolean playerHasSet(EntityPlayer entity) { ItemStack head = entity.getItemStackFromSlot(EntityEquipmentSlot.HEAD); ItemStack chest = entity.getItemStackFromSlot(EntityEquipmentSlot.CHEST); ItemStack legs = entity.getItemStackFromSlot(EntityEquipmentSlot.LEGS); ItemStack feet = entity.getItemStackFromSlot(EntityEquipmentSlot.FEET); return head != null && head.getItem() instanceof ItemCaptainArmor && chest != null && chest.getItem() instanceof ItemCaptainArmor && legs != null && legs.getItem() instanceof ItemCaptainArmor && feet != null && feet.getItem() instanceof ItemCaptainArmor; } @SubscribeEvent public void updatePlayerAbilityStatus(LivingEvent.LivingUpdateEvent event) { if(event.getEntityLiving() instanceof EntityPlayer) { EntityPlayer player = (EntityPlayer) event.getEntityLiving(); String key = playerKey(player); Boolean hasSet = playerHasSet(player); if(playersWithSet.contains(key)) { if(hasSet) { player.addPotionEffect(new PotionEffect(Potion.getPotionById(5), 10, 0)); //Strength player.addPotionEffect(new PotionEffect(Potion.getPotionById(11), 10, 0)); //Resistance player.addPotionEffect(new PotionEffect(Potion.getPotionById(1), 10, 0)); //Speed player.addPotionEffect(new PotionEffect(Potion.getPotionById(8), 10, 0)); //Jump Boost player.capabilities.allowFlying = true; } else { player.stepHeight = 0.5F; if(!player.capabilities.isCreativeMode && !player.isSpectator()) { player.capabilities.allowFlying = false; player.capabilities.isFlying = false; } playersWithSet.remove(key); } } else if (hasSet) { playersWithSet.add(key); } } } } }
src/main/java/com/emyxam/btbh/item/armors/marvel/ItemCaptainArmor.java
package com.emyxam.btbh.item.armors.marvel; import com.emyxam.btbh.BornToBeaHero; import com.emyxam.btbh.item.ItemModelProvider; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.EntityEquipmentSlot; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraft.potion.Potion; import net.minecraft.potion.PotionEffect; import net.minecraft.world.World; import net.minecraftforge.event.entity.living.LivingEvent; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; import net.minecraftforge.fml.relauncher.Side; import net.minecraftforge.fml.relauncher.SideOnly; import java.util.ArrayList; import java.util.List; /** * Created by Emyxam on 01/07/2017. */ public class ItemCaptainArmor extends net.minecraft.item.ItemArmor implements ItemModelProvider { private String name; public ItemCaptainArmor(ArmorMaterial material, EntityEquipmentSlot slot, String name) { super(material, 0, slot); setRegistryName(name); setUnlocalizedName(name); this.name = name; setCreativeTab(BornToBeaHero.creativeTab); this.setMaxStackSize(1); } @Override public void registerItemModel(Item item) { BornToBeaHero.proxy.registerItemRenderer(this, 0, name); } @SideOnly(Side.CLIENT) public void addInformation(ItemStack stack, EntityPlayer player, List tooltip, boolean advanced) { int damage = stack.getMaxDamage() - stack.getItemDamage(); tooltip.add("Durability: \u00A7c" + damage); } public void onArmorTick(World world, EntityPlayer entity, ItemStack itemStack) { ItemStack head = entity.getItemStackFromSlot(EntityEquipmentSlot.HEAD); ItemStack chest = entity.getItemStackFromSlot(EntityEquipmentSlot.CHEST); ItemStack legs = entity.getItemStackFromSlot(EntityEquipmentSlot.LEGS); ItemStack feet = entity.getItemStackFromSlot(EntityEquipmentSlot.FEET); if (head != null && head.getItem() instanceof ItemCaptainArmor && chest != null && chest.getItem() instanceof ItemCaptainArmor && legs != null && legs.getItem() instanceof ItemCaptainArmor && feet != null && feet.getItem() instanceof ItemCaptainArmor || entity.capabilities.isCreativeMode || entity.isSpectator()) { entity.fallDistance = 0.0F; } } @Override public int getItemEnchantability() { return 0; } public static class abilityHandler { public static List<String> playersWithSet = new ArrayList<String>(); private boolean hasSet; public static String playerKey(EntityPlayer player) { return player.getGameProfile().getName() + ":" + player.world.isRemote; } public static boolean playerHasSet(EntityPlayer entity) { ItemStack head = entity.getItemStackFromSlot(EntityEquipmentSlot.HEAD); ItemStack chest = entity.getItemStackFromSlot(EntityEquipmentSlot.CHEST); ItemStack legs = entity.getItemStackFromSlot(EntityEquipmentSlot.LEGS); ItemStack feet = entity.getItemStackFromSlot(EntityEquipmentSlot.FEET); return head != null && head.getItem() instanceof ItemCaptainArmor && chest != null && chest.getItem() instanceof ItemCaptainArmor && legs != null && legs.getItem() instanceof ItemCaptainArmor && feet != null && feet.getItem() instanceof ItemCaptainArmor; } @SubscribeEvent public void updatePlayerAbilityStatus(LivingEvent.LivingUpdateEvent event) { if (event.getEntityLiving() instanceof EntityPlayer) { EntityPlayer player = (EntityPlayer) event.getEntityLiving(); String key = playerKey(player); Boolean hasSet = playerHasSet(player); if (playersWithSet.contains(key)) { if (hasSet) { player.addPotionEffect(new PotionEffect(Potion.getPotionById(5), 10, 0)); //Strength player.addPotionEffect(new PotionEffect(Potion.getPotionById(11), 10, 0)); //Resistance player.addPotionEffect(new PotionEffect(Potion.getPotionById(1), 10, 0)); //Speed player.addPotionEffect(new PotionEffect(Potion.getPotionById(8), 10, 0)); //Jump Boost player.capabilities.allowFlying = true; } else { player.stepHeight = 0.5F; if (!player.capabilities.isCreativeMode && !player.isSpectator()) { player.capabilities.allowFlying = false; player.capabilities.isFlying = false; } playersWithSet.remove(key); } } else if (hasSet) { playersWithSet.add(key); } } } } }
remove
src/main/java/com/emyxam/btbh/item/armors/marvel/ItemCaptainArmor.java
remove
<ide><path>rc/main/java/com/emyxam/btbh/item/armors/marvel/ItemCaptainArmor.java <ide> <ide> import java.util.ArrayList; <ide> import java.util.List; <del> <del>/** <del> * Created by Emyxam on 01/07/2017. <del> */ <ide> <ide> public class ItemCaptainArmor extends net.minecraft.item.ItemArmor implements ItemModelProvider { <ide> <ide> <ide> @SubscribeEvent <ide> public void updatePlayerAbilityStatus(LivingEvent.LivingUpdateEvent event) { <del> if (event.getEntityLiving() instanceof EntityPlayer) { <add> if(event.getEntityLiving() instanceof EntityPlayer) { <ide> EntityPlayer player = (EntityPlayer) event.getEntityLiving(); <ide> String key = playerKey(player); <ide> <ide> Boolean hasSet = playerHasSet(player); <ide> <del> if (playersWithSet.contains(key)) { <del> if (hasSet) { <add> if(playersWithSet.contains(key)) { <add> if(hasSet) { <ide> player.addPotionEffect(new PotionEffect(Potion.getPotionById(5), 10, 0)); //Strength <ide> player.addPotionEffect(new PotionEffect(Potion.getPotionById(11), 10, 0)); //Resistance <ide> player.addPotionEffect(new PotionEffect(Potion.getPotionById(1), 10, 0)); //Speed <ide> player.capabilities.allowFlying = true; <ide> } else { <ide> player.stepHeight = 0.5F; <del> if (!player.capabilities.isCreativeMode && !player.isSpectator()) { <add> if(!player.capabilities.isCreativeMode && !player.isSpectator()) { <ide> player.capabilities.allowFlying = false; <ide> player.capabilities.isFlying = false; <ide> }
Java
apache-2.0
a9ce81a8120298b822923775bdbd1a20f08fd330
0
consulo/hub.consulo.io
package consulo.webService.plugins.ui; import java.text.DateFormatSymbols; import java.util.*; import java.util.stream.Collectors; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.SortedSetMultimap; import com.google.common.collect.TreeMultimap; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.text.DateFormatUtilRt; import com.intellij.util.text.VersionComparatorUtil; import com.vaadin.server.Page; import com.vaadin.ui.Component; import com.vaadin.ui.HorizontalLayout; import com.vaadin.ui.HorizontalSplitPanel; import com.vaadin.ui.ListSelect; import com.vaadin.ui.TextArea; import com.vaadin.ui.Tree; import com.vaadin.ui.VerticalLayout; import com.vaadin.ui.components.calendar.event.BasicEvent; import com.vaadin.ui.themes.ValoTheme; import consulo.webService.UserConfigurationService; import consulo.webService.plugins.PluginChannel; import consulo.webService.plugins.PluginChannelService; import consulo.webService.plugins.PluginNode; import consulo.webService.plugins.PluginStatisticsService; import consulo.webService.plugins.mongo.MongoDownloadStat; import consulo.webService.ui.RepositoryUI; import consulo.webService.ui.util.TidyComponents; import consulo.webService.ui.util.VaadinUIUtil; /** * @author VISTALL * @since 04-Jan-17 */ public class RepositoryChannelUI extends HorizontalLayout { private final UserConfigurationService myUserConfigurationService; private final PluginStatisticsService myPluginStatisticsService; private final PluginChannel myPluginChannel; private final HorizontalLayout myRightLayout; public RepositoryChannelUI(Page page, @NotNull PluginChannel pluginChannel, @NotNull UserConfigurationService userConfigurationService, @NotNull PluginStatisticsService pluginStatisticsService, @Nullable String selectedPluginId) { myPluginChannel = pluginChannel; myUserConfigurationService = userConfigurationService; myPluginStatisticsService = pluginStatisticsService; setSizeFull(); ListSelect listSelect = new ListSelect(); listSelect.setNullSelectionAllowed(false); listSelect.setSizeFull(); addComponent(listSelect); myRightLayout = new HorizontalLayout(); myRightLayout.setSizeFull(); addComponent(myRightLayout); setExpandRatio(listSelect, .3f); setExpandRatio(myRightLayout, 1f); HorizontalSplitPanel panel = new HorizontalSplitPanel(); panel.setSplitPosition(70, Unit.PERCENTAGE); panel.setSizeFull(); myRightLayout.addComponent(panel); PluginChannelService repositoryByChannel = myUserConfigurationService.getRepositoryByChannel(pluginChannel); Multimap<String, PluginNode> multimap = ArrayListMultimap.create(); repositoryByChannel.iteratePluginNodes(pluginNode -> multimap.put(pluginNode.id, pluginNode)); // name -> id Map<PluginNode, String> map = new TreeMap<>((o1, o2) -> { int i = o1.name.compareTo(o2.name); if(PluginChannelService.isPlatformNode(o1.id)) { return -1; } else if(PluginChannelService.isPlatformNode(o2.id)) { return 1; } return i; }); for(Map.Entry<String, Collection<PluginNode>> entry : multimap.asMap().entrySet()) { map.put(entry.getValue().iterator().next(), entry.getKey()); } for(Map.Entry<PluginNode, String> entry : map.entrySet()) { listSelect.addItem(entry.getValue()); listSelect.setItemCaption(entry.getValue(), getPluginNodeName(entry.getKey())); } listSelect.addValueChangeListener(event -> { String pluginId = (String) event.getProperty().getValue(); page.setUriFragment(RepositoryUI.getUrlFragment(pluginChannel, pluginId)); // all plugin nodes Collection<PluginNode> pluginNodes = multimap.get(pluginId); // version -> nodes Comparator<PluginNode> pluginNodeComparator = (o1, o2) -> VersionComparatorUtil.compare(o2.version, o1.version); SortedSetMultimap<String, PluginNode> sortByVersion = TreeMultimap.create(Collections.reverseOrder(StringUtil::naturalCompare), pluginNodeComparator); for(PluginNode pluginNode : pluginNodes) { sortByVersion.put(pluginNode.platformVersion, pluginNode); } PluginNode lastPluginNode = null; Map<String, Collection<PluginNode>> sorted = sortByVersion.asMap(); Tree tree = new Tree("Versions"); for(Map.Entry<String, Collection<PluginNode>> entry : sorted.entrySet()) { tree.addItem(entry.getKey()); tree.setItemCaption(entry.getKey(), "Consulo #" + entry.getKey()); for(PluginNode node : entry.getValue()) { if(lastPluginNode == null) { lastPluginNode = node; } UUID uuid = UUID.randomUUID(); tree.addItem(uuid); Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(node.date); tree.setItemCaption(uuid, "build #" + node.version + " at " + DateFormatUtilRt.formatBuildDate(calendar)); tree.setParent(uuid, entry.getKey()); tree.setChildrenAllowed(uuid, false); } } assert lastPluginNode != null; panel.setFirstComponent(buildInfo(lastPluginNode)); panel.setSecondComponent(tree); }); if(selectedPluginId != null) { listSelect.setValue(selectedPluginId); } } @NotNull private Component buildInfo(@NotNull PluginNode pluginNode) { VerticalLayout verticalLayout = new VerticalLayout(); verticalLayout.setMargin(true); verticalLayout.setSpacing(true); verticalLayout.addComponent(VaadinUIUtil.labeled("ID: ", TidyComponents.newLabel(pluginNode.id))); verticalLayout.addComponent(VaadinUIUtil.labeled("Name: ", TidyComponents.newLabel(getPluginNodeName(pluginNode)))); verticalLayout.addComponent(VaadinUIUtil.labeled("Category: ", TidyComponents.newLabel(pluginNode.category))); if(!StringUtil.isEmpty(pluginNode.vendor)) { verticalLayout.addComponent(VaadinUIUtil.labeled("Vendor: ", TidyComponents.newLabel(pluginNode.vendor))); } List<MongoDownloadStat> allDownloadStat = myPluginStatisticsService.getDownloadStat(pluginNode.id); List<MongoDownloadStat> channelDownloadStat = allDownloadStat.stream().filter(it -> it.getChannel().equals(myPluginChannel.name())).collect(Collectors.toList()); verticalLayout.addComponent(VaadinUIUtil.labeled("Downloads: ", TidyComponents.newLabel(channelDownloadStat.size() + " (all: " + allDownloadStat.size() + ")"))); com.vaadin.ui.Calendar calendar = new com.vaadin.ui.Calendar() { protected String[] getDayNamesShort() { DateFormatSymbols s = new DateFormatSymbols(getLocale()); return Arrays.copyOfRange(s.getShortWeekdays(), 1, 8); } }; calendar.setWidth(25, Unit.EM); calendar.setHeight(18, Unit.EM); for(MongoDownloadStat mongoDownloadStat : channelDownloadStat) { calendar.addEvent(new BasicEvent("download", "", new Date(mongoDownloadStat.getTime()))); } HorizontalLayout calendarControl = new HorizontalLayout(); calendarControl.setSpacing(true); calendarControl.addComponent(TidyComponents.newLabel("Download statistics")); calendarControl.addComponent(TidyComponents.newButton("Month view", event -> switchToMonthView(calendar))); verticalLayout.addComponent(calendarControl); switchToMonthView(calendar); verticalLayout.addComponent(calendar); if(!StringUtil.isEmpty(pluginNode.description)) { TextArea area = new TextArea(); area.setValue(pluginNode.description); area.setReadOnly(true); area.setWidth(100, Unit.PERCENTAGE); area.addStyleName(ValoTheme.TEXTAREA_SMALL); area.addStyleName(ValoTheme.TEXTAREA_BORDERLESS); verticalLayout.addComponent(area); } return verticalLayout; } private void switchToMonthView(com.vaadin.ui.Calendar calendarComponent) { Calendar calendar = Calendar.getInstance(); int rollAmount = calendar.get(GregorianCalendar.DAY_OF_MONTH) - 1; calendar.add(GregorianCalendar.DAY_OF_MONTH, -rollAmount); calendarComponent.setStartDate(calendar.getTime()); calendar.add(GregorianCalendar.MONTH, 1); calendar.add(GregorianCalendar.DATE, -1); calendarComponent.setEndDate(calendar.getTime()); } private static String getPluginNodeName(PluginNode pluginNode) { if(PluginChannelService.isPlatformNode(pluginNode.id)) { switch(pluginNode.id) { // windows case "consulo-win-no-jre": return "Platform (Windows, without JRE)"; case "consulo-win": return "Platform (Windows, with JRE x32)"; case "consulo-win64": return "Platform (Windows, with JRE x64)"; case "consulo-win-no-jre-zip": return "Platform (Windows, without JRE, zip archive)"; case "consulo-win-zip": return "Platform (Windows, with JRE x32, zip archive)"; case "consulo-win64-zip": return "Platform (Windows, with JRE x64, zip archive)"; // linux case "consulo-linux-no-jre": return "Platform (Linux, without JRE)"; case "consulo-linux": return "Platform (Linux, with JRE x32)"; case "consulo-linux64": return "Platform (Linux, with JRE x64)"; // mac case "consulo-mac-no-jre": return "Platform (macOS, without JRE)"; case "consulo-mac64": return "Platform (macOS, with JRE x64)"; default: return pluginNode.id; } } return pluginNode.name; } }
frontend/src/main/java/consulo/webService/plugins/ui/RepositoryChannelUI.java
package consulo.webService.plugins.ui; import java.text.DateFormatSymbols; import java.util.*; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.SortedSetMultimap; import com.google.common.collect.TreeMultimap; import com.intellij.openapi.util.text.StringUtil; import com.intellij.util.text.DateFormatUtilRt; import com.intellij.util.text.VersionComparatorUtil; import com.vaadin.server.Page; import com.vaadin.ui.Component; import com.vaadin.ui.HorizontalLayout; import com.vaadin.ui.HorizontalSplitPanel; import com.vaadin.ui.ListSelect; import com.vaadin.ui.TextArea; import com.vaadin.ui.Tree; import com.vaadin.ui.VerticalLayout; import com.vaadin.ui.components.calendar.event.BasicEvent; import com.vaadin.ui.themes.ValoTheme; import consulo.webService.UserConfigurationService; import consulo.webService.plugins.PluginChannel; import consulo.webService.plugins.PluginChannelService; import consulo.webService.plugins.PluginNode; import consulo.webService.plugins.PluginStatisticsService; import consulo.webService.plugins.mongo.MongoDownloadStat; import consulo.webService.ui.RepositoryUI; import consulo.webService.ui.util.TidyComponents; import consulo.webService.ui.util.VaadinUIUtil; /** * @author VISTALL * @since 04-Jan-17 */ public class RepositoryChannelUI extends HorizontalLayout { private final UserConfigurationService myUserConfigurationService; private final PluginStatisticsService myPluginStatisticsService; private final HorizontalLayout myRightLayout; public RepositoryChannelUI(Page page, @NotNull PluginChannel pluginChannel, @NotNull UserConfigurationService userConfigurationService, @NotNull PluginStatisticsService pluginStatisticsService, @Nullable String selectedPluginId) { myUserConfigurationService = userConfigurationService; myPluginStatisticsService = pluginStatisticsService; setSizeFull(); ListSelect listSelect = new ListSelect(); listSelect.setNullSelectionAllowed(false); listSelect.setSizeFull(); addComponent(listSelect); myRightLayout = new HorizontalLayout(); myRightLayout.setSizeFull(); addComponent(myRightLayout); setExpandRatio(listSelect, .3f); setExpandRatio(myRightLayout, 1f); HorizontalSplitPanel panel = new HorizontalSplitPanel(); panel.setSplitPosition(70, Unit.PERCENTAGE); panel.setSizeFull(); myRightLayout.addComponent(panel); PluginChannelService repositoryByChannel = myUserConfigurationService.getRepositoryByChannel(pluginChannel); Multimap<String, PluginNode> multimap = ArrayListMultimap.create(); repositoryByChannel.iteratePluginNodes(pluginNode -> multimap.put(pluginNode.id, pluginNode)); // name -> id Map<PluginNode, String> map = new TreeMap<>((o1, o2) -> { int i = o1.name.compareTo(o2.name); if(PluginChannelService.isPlatformNode(o1.id)) { return -1; } else if(PluginChannelService.isPlatformNode(o2.id)) { return 1; } return i; }); for(Map.Entry<String, Collection<PluginNode>> entry : multimap.asMap().entrySet()) { map.put(entry.getValue().iterator().next(), entry.getKey()); } for(Map.Entry<PluginNode, String> entry : map.entrySet()) { listSelect.addItem(entry.getValue()); listSelect.setItemCaption(entry.getValue(), getPluginNodeName(entry.getKey())); } listSelect.addValueChangeListener(event -> { String pluginId = (String) event.getProperty().getValue(); page.setUriFragment(RepositoryUI.getUrlFragment(pluginChannel, pluginId)); // all plugin nodes Collection<PluginNode> pluginNodes = multimap.get(pluginId); // version -> nodes Comparator<PluginNode> pluginNodeComparator = (o1, o2) -> VersionComparatorUtil.compare(o2.version, o1.version); SortedSetMultimap<String, PluginNode> sortByVersion = TreeMultimap.create(Collections.reverseOrder(StringUtil::naturalCompare), pluginNodeComparator); for(PluginNode pluginNode : pluginNodes) { sortByVersion.put(pluginNode.platformVersion, pluginNode); } PluginNode lastPluginNode = null; Map<String, Collection<PluginNode>> sorted = sortByVersion.asMap(); Tree tree = new Tree("Versions"); for(Map.Entry<String, Collection<PluginNode>> entry : sorted.entrySet()) { tree.addItem(entry.getKey()); tree.setItemCaption(entry.getKey(), "Consulo #" + entry.getKey()); for(PluginNode node : entry.getValue()) { if(lastPluginNode == null) { lastPluginNode = node; } UUID uuid = UUID.randomUUID(); tree.addItem(uuid); Calendar calendar = Calendar.getInstance(); calendar.setTimeInMillis(node.date); tree.setItemCaption(uuid, "build #" + node.version + " at " + DateFormatUtilRt.formatBuildDate(calendar)); tree.setParent(uuid, entry.getKey()); tree.setChildrenAllowed(uuid, false); } } assert lastPluginNode != null; panel.setFirstComponent(buildInfo(lastPluginNode)); panel.setSecondComponent(tree); }); if(selectedPluginId != null) { listSelect.setValue(selectedPluginId); } } @NotNull private Component buildInfo(@NotNull PluginNode pluginNode) { VerticalLayout verticalLayout = new VerticalLayout(); verticalLayout.setMargin(true); verticalLayout.setSpacing(true); verticalLayout.addComponent(VaadinUIUtil.labeled("ID: ", TidyComponents.newLabel(pluginNode.id))); verticalLayout.addComponent(VaadinUIUtil.labeled("Name: ", TidyComponents.newLabel(getPluginNodeName(pluginNode)))); verticalLayout.addComponent(VaadinUIUtil.labeled("Category: ", TidyComponents.newLabel(pluginNode.category))); if(!StringUtil.isEmpty(pluginNode.vendor)) { verticalLayout.addComponent(VaadinUIUtil.labeled("Vendor: ", TidyComponents.newLabel(pluginNode.vendor))); } List<MongoDownloadStat> downloadStat = myPluginStatisticsService.getDownloadStat(pluginNode.id); verticalLayout.addComponent(VaadinUIUtil.labeled("Downloads: ", TidyComponents.newLabel(String.valueOf(downloadStat.size())))); com.vaadin.ui.Calendar calendar = new com.vaadin.ui.Calendar() { protected String[] getDayNamesShort() { DateFormatSymbols s = new DateFormatSymbols(getLocale()); return Arrays.copyOfRange(s.getShortWeekdays(), 1, 8); } }; calendar.setWidth(25, Unit.EM); calendar.setHeight(18, Unit.EM); for(MongoDownloadStat mongoDownloadStat : downloadStat) { calendar.addEvent(new BasicEvent("download", "", new Date(mongoDownloadStat.getTime()))); } HorizontalLayout calendarControl = new HorizontalLayout(); calendarControl.setSpacing(true); calendarControl.addComponent(TidyComponents.newLabel("Download statistics")); calendarControl.addComponent(TidyComponents.newButton("Month view", event -> switchToMonthView(calendar))); verticalLayout.addComponent(calendarControl); switchToMonthView(calendar); verticalLayout.addComponent(calendar); if(!StringUtil.isEmpty(pluginNode.description)) { TextArea area = new TextArea(); area.setValue(pluginNode.description); area.setReadOnly(true); area.setWidth(100, Unit.PERCENTAGE); area.addStyleName(ValoTheme.TEXTAREA_SMALL); area.addStyleName(ValoTheme.TEXTAREA_BORDERLESS); verticalLayout.addComponent(area); } return verticalLayout; } private void switchToMonthView(com.vaadin.ui.Calendar calendarComponent) { Calendar calendar = Calendar.getInstance(); int rollAmount = calendar.get(GregorianCalendar.DAY_OF_MONTH) - 1; calendar.add(GregorianCalendar.DAY_OF_MONTH, -rollAmount); calendarComponent.setStartDate(calendar.getTime()); calendar.add(GregorianCalendar.MONTH, 1); calendar.add(GregorianCalendar.DATE, -1); calendarComponent.setEndDate(calendar.getTime()); } private static String getPluginNodeName(PluginNode pluginNode) { if(PluginChannelService.isPlatformNode(pluginNode.id)) { switch(pluginNode.id) { // windows case "consulo-win-no-jre": return "Platform (Windows, without JRE)"; case "consulo-win": return "Platform (Windows, with JRE x32)"; case "consulo-win64": return "Platform (Windows, with JRE x64)"; case "consulo-win-no-jre-zip": return "Platform (Windows, without JRE, zip archive)"; case "consulo-win-zip": return "Platform (Windows, with JRE x32, zip archive)"; case "consulo-win64-zip": return "Platform (Windows, with JRE x64, zip archive)"; // linux case "consulo-linux-no-jre": return "Platform (Linux, without JRE)"; case "consulo-linux": return "Platform (Linux, with JRE x32)"; case "consulo-linux64": return "Platform (Linux, with JRE x64)"; // mac case "consulo-mac-no-jre": return "Platform (macOS, without JRE)"; case "consulo-mac64": return "Platform (macOS, with JRE x64)"; default: return pluginNode.id; } } return pluginNode.name; } }
show download statistics only for current channel not all
frontend/src/main/java/consulo/webService/plugins/ui/RepositoryChannelUI.java
show download statistics only for current channel not all
<ide><path>rontend/src/main/java/consulo/webService/plugins/ui/RepositoryChannelUI.java <ide> <ide> import java.text.DateFormatSymbols; <ide> import java.util.*; <add>import java.util.stream.Collectors; <ide> <ide> import org.jetbrains.annotations.NotNull; <ide> import org.jetbrains.annotations.Nullable; <ide> { <ide> private final UserConfigurationService myUserConfigurationService; <ide> private final PluginStatisticsService myPluginStatisticsService; <add> private final PluginChannel myPluginChannel; <ide> <ide> private final HorizontalLayout myRightLayout; <ide> <ide> @NotNull PluginStatisticsService pluginStatisticsService, <ide> @Nullable String selectedPluginId) <ide> { <add> myPluginChannel = pluginChannel; <ide> myUserConfigurationService = userConfigurationService; <ide> myPluginStatisticsService = pluginStatisticsService; <ide> <ide> verticalLayout.addComponent(VaadinUIUtil.labeled("Vendor: ", TidyComponents.newLabel(pluginNode.vendor))); <ide> } <ide> <del> List<MongoDownloadStat> downloadStat = myPluginStatisticsService.getDownloadStat(pluginNode.id); <del> <del> verticalLayout.addComponent(VaadinUIUtil.labeled("Downloads: ", TidyComponents.newLabel(String.valueOf(downloadStat.size())))); <add> List<MongoDownloadStat> allDownloadStat = myPluginStatisticsService.getDownloadStat(pluginNode.id); <add> List<MongoDownloadStat> channelDownloadStat = allDownloadStat.stream().filter(it -> it.getChannel().equals(myPluginChannel.name())).collect(Collectors.toList()); <add> <add> verticalLayout.addComponent(VaadinUIUtil.labeled("Downloads: ", TidyComponents.newLabel(channelDownloadStat.size() + " (all: " + allDownloadStat.size() + ")"))); <ide> <ide> com.vaadin.ui.Calendar calendar = new com.vaadin.ui.Calendar() <ide> { <ide> calendar.setWidth(25, Unit.EM); <ide> calendar.setHeight(18, Unit.EM); <ide> <del> for(MongoDownloadStat mongoDownloadStat : downloadStat) <add> for(MongoDownloadStat mongoDownloadStat : channelDownloadStat) <ide> { <ide> calendar.addEvent(new BasicEvent("download", "", new Date(mongoDownloadStat.getTime()))); <ide> }
Java
lgpl-2.1
063e4d8434f7d517355387c3cebb86837aa21286
0
CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine,CloverETL/CloverETL-Engine
/* * jETeL/Clover - Java based ETL application framework. * Copyright (C) 2002-04 David Pavlis <[email protected]> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ // FILE: c:/projects/jetel/org/jetel/graph/TransformationGraph.java package org.jetel.graph; import java.io.PrintStream; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.Stack; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jetel.enums.EdgeTypeEnum; import org.jetel.enums.EnabledEnum; import org.jetel.exception.GraphConfigurationException; /* * import org.apache.log4j.Logger; * import org.apache.log4j.BasicConfigurator; */ /** * A class that analyzes relations between Nodes and Edges of the Transformation Graph * * @author D.Pavlis * @since April 2, 2002 * @revision $Revision$ * @see OtherClasses */ public class TransformationGraphAnalyzer { static Log logger = LogFactory.getLog(TransformationGraphAnalyzer.class); static PrintStream log = System.out;// default info messages to stdout /** * Returns list (precisely array) of all Nodes. The order of Nodes listed is such that * any parent Node is guaranteed to be listed befor child Node. * The circular references between nodes should be detected. * * @param nodes Description of the Parameter * @return Description of the Returned Value * @exception GraphConfigurationException Description of the Exception * @since July 29, 2002 */ public static List<Node> analyzeGraphTopology(List<Node> nodes) throws GraphConfigurationException { Set<Node> set1 = new HashSet<Node>(); Set<Node> set2 = new HashSet<Node>(); Set<Node> actualSet; Set<Node> enumerationOfNodes = new LinkedHashSet<Node>(nodes.size()); Stack<AnalyzedNode> nodesStack = new Stack<AnalyzedNode>(); List<Node> rootNodes; Node node; Iterator iterator; // initial populating of set1 - with root Nodes only iterator = nodes.iterator(); while (iterator.hasNext()) { node = (Node) iterator.next(); if (node.isRoot()) { set1.add(node); } } if (set1.isEmpty()) { logger.error("No root Nodes detected! There must be at least one root node defined." + " (Root node is node with output ports defined only.)"); throw new GraphConfigurationException("No root node!"); } // we need root nodes to traverse graph rootNodes = new LinkedList<Node>(set1); // DETECTING CIRCULAR REFERENCES IN GRAPH iterator = rootNodes.iterator(); while (iterator.hasNext()) { nodesStack.clear(); nodesStack.push(new AnalyzedNode((Node) iterator.next())); if (!inspectCircularReference(nodesStack)) { throw new GraphConfigurationException("Circular reference found in graph !"); } } // enumerate all nodes actualSet = set1; // initialize - actualSet is set1 for the very first run while (!actualSet.isEmpty()) { // add individual nodes from set enumerationOfNodes.addAll(actualSet); // find successors , switch actualSet if (actualSet == set1) { findNodesSuccessors(set1, set2); actualSet = set2; } else { findNodesSuccessors(set2, set1); actualSet = set1; } } // returning nodes ordered by their appearance in the graph -> not really guratanteed that it // works for all configurations, but should be sufficient return Arrays.asList(enumerationOfNodes.toArray(new Node[enumerationOfNodes.size()])); } /** * Method which analyzes the need of forcing buffered edge in case * when one component feeds through multiple output ports other components * and dead-lock could occure. See inspectMultipleFeeds() method. * * @param nodes */ public static void analyzeMultipleFeeds(List nodes){ Stack nodesStack = new Stack(); List nodesToAnalyze = new LinkedList(); Node node; Iterator iterator; // set up initial list of nodes to analyze // ontly those with 2 or more input ports need inspection iterator = nodes.iterator(); while (iterator.hasNext()) { node = (Node) iterator.next(); if (node.getInPorts().size()>1 ) { nodesToAnalyze.add(node); } } // DETECTING buffering needs iterator = nodesToAnalyze.iterator(); while (iterator.hasNext()) { nodesStack.clear(); nodesStack.push(new AnalyzedNode((Node) iterator.next())); inspectMultipleFeeds(nodesStack); } } /** * Tests whether there is no loop/cycle in path from root node to leaf node * This test must be run for each root note to ensure that the whole graph is free of cycles * It assumes that the IDs of individual nodes are unique -> it is constraint imposed by design * * @param nodesStack Stack with one elemen - root node from which to start analyzing * @return true if path has no loops, otherwise false */ private static boolean inspectCircularReference(Stack nodesStack) { OutputPort port; Node nextNode; Set nodesEncountered = new HashSet(); while (!nodesStack.empty()) { port = ((AnalyzedNode) nodesStack.peek()).getNextOutPort(); if (port == null) { // this node has no more ports (offsprings) // we have to remove it from already visited nodes nodesEncountered.remove(((AnalyzedNode) nodesStack.pop()).getNode().getId()); } else { nextNode = port.getReader(); //DEBUG ! System.out.println("-"+nextNode.getID()); if (nextNode != null) { // have we seen this node before ? if yes, then it is a loop if (!nodesEncountered.add(nextNode.getId())) { dumpNodesReferences(nodesStack.iterator(), nextNode); return false; } nodesStack.push(new AnalyzedNode(nextNode));// put this node on top } } } return true; } /** * Method which checks components which concentrate more than one input for potential deadlocks.<br> * If, for example, join component merges data from two flows which both originate at the same * node (e.g. data reader) then deadlock situation can occure when the join waits for data reader to send next * record on one port and the reader waits for join to consume record on the other port.<br> * If such situation is found, all input ports (Edges) of join has to be buffered. * * @param nodesStack * @return */ private static void inspectMultipleFeeds(Stack nodesStack) { InputPort port; Node prevNode; Set nodesEncountered = new HashSet(); Node startNode=((AnalyzedNode) nodesStack.peek()).getNode(); while (!nodesStack.empty()) { port = ((AnalyzedNode) nodesStack.peek()).getNextInPort(); if (port == null) { // this node has no more input ports // we have to remove it from already visited nodes as the is the end of road. nodesStack.pop(); } else { prevNode = port.getWriter(); if (prevNode != null) { // have we seen this node before ? if yes, then we need to buffer start node (its // input ports if (!nodesEncountered.add(prevNode.getId())) { for (int i=0;i<startNode.getInPorts().size();i++){ //TODO: potential problem if port is not backed by EDGE - this should not happen Object edge=startNode.getInputPort(i); //assert edge instanceof Edge : "Port not backed by Edge object !"; if(((Edge)edge).getEdgeType() == EdgeTypeEnum.DIRECT || ((Edge)edge).getEdgeType() == EdgeTypeEnum.DIRECT_FAST_PROPAGATE) { ((Edge)edge).setEdgeType(EdgeTypeEnum.BUFFERED); // DEBUG //System.out.println(((Edge)edge).getID()+" edge should be set to TYPE_BUFFERED."); logger.debug(((Edge)edge).getId()+" edge has been set to TYPE_BUFFERED."); } } } nodesStack.push(new AnalyzedNode(prevNode));// put this node on top } } } } /** * Finds all the successors of Nodes from source Set * * @param source Set of source Nodes * @param destination Set of all immediate successors of Nodes from <source> set * @exception GraphConfigurationException Description of the Exception * @since April 18, 2002 */ protected static void findNodesSuccessors(Set source, Set destination) throws GraphConfigurationException { Iterator nodeIterator = source.iterator(); Iterator portIterator; OutputPort outPort; Node currentNode; Node nextNode; // remove all previous items from dest. destination.clear(); // iterate through all source nodes while (nodeIterator.hasNext()) { currentNode = ((Node) nodeIterator.next()); portIterator = currentNode.getOutPorts().iterator(); // iterate through all output ports // some other node is perhaps connected to these ports while (portIterator.hasNext()) { outPort = (OutputPort) portIterator.next(); // is some Node reading data produced by our source node ? nextNode = outPort.getReader(); if (nextNode != null) { if (currentNode.getPhase().getPhaseNum() > nextNode.getPhase().getPhaseNum()) { logger.error("Wrong phase order between components: " + currentNode.getId() + " phase: " + currentNode.getPhase() + " and " + nextNode.getId() + " phase: " + nextNode.getPhase()); throw new GraphConfigurationException("Wrong phase order !"); } destination.add(nextNode); } } } } /** * This is only for reporting problems * * @param iterator Description of the Parameter * @param problemNode Description of the Parameter */ protected static void dumpNodesReferences(Iterator iterator, Node problemNode) { logger.debug("Dump of references between nodes:"); logger.debug("Detected loop when encountered node " + problemNode.getId()); logger.debug("Chain of references:"); StringBuffer buffer=new StringBuffer(64); while (iterator.hasNext()) { buffer.append(((AnalyzedNode) iterator.next()).getNode().getId()); buffer.append(" -> "); } buffer.append(problemNode.getId()); logger.debug(buffer.toString()); } /** * This method puts Nodes of the graph into appropriate Phase objects (Edges too). * Phases are run one by one and when finished, all Nodes&Edges in phase are * destroyed (memory is freed and resources reclaimed).<br> * Then next phase is started. * * @param nodes Description of the Parameter * @param edges Description of the Parameter * @param phases Description of the Parameter * @throws GraphConfigurationException */ public static void analyzeEdges(List edges) throws GraphConfigurationException { Phase readerPhase; Phase writerPhase; Edge edge; // analyze edges (whether they need to be buffered and put them into proper phases // edges connecting nodes from two different phases has to be put into both phases for (Iterator iterator=edges.iterator();iterator.hasNext();) { edge = (Edge) iterator.next(); readerPhase = edge.getReader().getPhase(); writerPhase = edge.getWriter().getPhase(); writerPhase.addEdge(edge); if (readerPhase != writerPhase ) { // edge connecting two nodes belonging to different phases // has to be buffered edge.setEdgeType(EdgeTypeEnum.PHASE_CONNECTION); } } } /** * Description of the Class * * @author dpavlis * @since 12. �nor 2004 * @revision $Revision$ */ private static class AnalyzedNode { Node node; int analyzedOutPort; int analyzedInPort; /** *Constructor for the AnalyzedNode object * * @param node Description of the Parameter */ AnalyzedNode(Node node) { this.node = node; analyzedOutPort = 0; analyzedInPort = 0; } /** * Gets the nextPort attribute of the AnalyzedNode object * * @return The nextPort value */ OutputPort getNextOutPort() { if (analyzedOutPort >= node.getOutPorts().size()) { return null; } else { return node.getOutputPort(analyzedOutPort++); } } InputPort getNextInPort() { if (analyzedInPort >= node.getInPorts().size()) { return null; } else { return node.getInputPort(analyzedInPort++); } } /** * Gets the node attribute of the AnalyzedNode object * * @return The node value */ Node getNode() { return node; } } /** * Apply disabled property of node to graph. Called in graph initial phase. * @throws GraphConfigurationException */ public static void disableNodesInPhases(TransformationGraph graph) throws GraphConfigurationException { Set<Node> nodesToRemove = new HashSet<Node>(); Phase[] phases = graph.getPhases(); for (int i = 0; i < phases.length; i++) { nodesToRemove.clear(); for (Node node : phases[i].getNodes().values()) { if (node.getEnabled() == EnabledEnum.DISABLED) { nodesToRemove.add(node); disconnectAllEdges(node); } else if (node.getEnabled() == EnabledEnum.PASS_THROUGH) { nodesToRemove.add(node); final Edge inEdge = (Edge) node.getInputPort(node .getPassThroughInputPort()); final Edge outEdge = (Edge) node.getOutputPort(node .getPassThroughOutputPort()); if (inEdge == null || outEdge == null) { disconnectAllEdges(node); continue; } final Node sourceNode = inEdge.getWriter(); final Node targetNode = outEdge.getReader(); final int sourceIdx = inEdge.getOutputPortNumber(); final int targetIdx = outEdge.getInputPortNumber(); disconnectAllEdges(node); sourceNode.addOutputPort(sourceIdx, inEdge); targetNode.addInputPort(targetIdx, inEdge); try { node.getGraph().addEdge(inEdge); } catch (GraphConfigurationException e) { logger.error("Unexpected error: " + e.getMessage()); e.printStackTrace(); } } } for(Node node : nodesToRemove) { phases[i].deleteNode(node); } } } /** * Disconnect all edges connected to the given node. * @param node * @throws GraphConfigurationException */ private static void disconnectAllEdges(Node node) throws GraphConfigurationException { for(Iterator it1 = node.getInPorts().iterator(); it1.hasNext();) { final Edge edge = (Edge) it1.next(); Node writer = edge.getWriter(); if(writer != null) writer.removeOutputPort(edge); node.getGraph().deleteEdge(edge); } for(Iterator it1 = node.getOutPorts().iterator(); it1.hasNext();) { final Edge edge = (Edge) it1.next(); final Node reader = edge.getReader(); if(reader != null) reader.removeInputPort(edge); node.getGraph().deleteEdge(edge); } } } /* * end class TransformationGraphAnalyzer */
cloveretl.engine/src/org/jetel/graph/TransformationGraphAnalyzer.java
/* * jETeL/Clover - Java based ETL application framework. * Copyright (C) 2002-04 David Pavlis <[email protected]> * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA * */ // FILE: c:/projects/jetel/org/jetel/graph/TransformationGraph.java package org.jetel.graph; import java.io.PrintStream; import java.util.Arrays; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; import java.util.Stack; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jetel.enums.EdgeTypeEnum; import org.jetel.enums.EnabledEnum; import org.jetel.exception.GraphConfigurationException; /* * import org.apache.log4j.Logger; * import org.apache.log4j.BasicConfigurator; */ /** * A class that analyzes relations between Nodes and Edges of the Transformation Graph * * @author D.Pavlis * @since April 2, 2002 * @revision $Revision$ * @see OtherClasses */ public class TransformationGraphAnalyzer { static Log logger = LogFactory.getLog(TransformationGraphAnalyzer.class); static PrintStream log = System.out;// default info messages to stdout /** * Returns list (precisely array) of all Nodes. The order of Nodes listed is such that * any parent Node is guaranteed to be listed befor child Node. * The circular references between nodes should be detected. * * @param nodes Description of the Parameter * @return Description of the Returned Value * @exception GraphConfigurationException Description of the Exception * @since July 29, 2002 */ public static List<Node> analyzeGraphTopology(List<Node> nodes) throws GraphConfigurationException { Set<Node> set1 = new HashSet<Node>(); Set<Node> set2 = new HashSet<Node>(); Set<Node> actualSet; Set<Node> enumerationOfNodes = new LinkedHashSet<Node>(nodes.size()); Stack<AnalyzedNode> nodesStack = new Stack<AnalyzedNode>(); List<Node> rootNodes; Node node; Iterator iterator; // initial populating of set1 - with root Nodes only iterator = nodes.iterator(); while (iterator.hasNext()) { node = (Node) iterator.next(); if (node.isRoot()) { set1.add(node); } } if (set1.isEmpty()) { logger.error("No root Nodes detected! There must be at least one root node defined." + " (Root node is node with output ports defined only.)"); throw new GraphConfigurationException("No root node!"); } // we need root nodes to traverse graph rootNodes = new LinkedList<Node>(set1); // DETECTING CIRCULAR REFERENCES IN GRAPH iterator = rootNodes.iterator(); while (iterator.hasNext()) { nodesStack.clear(); nodesStack.push(new AnalyzedNode((Node) iterator.next())); if (!inspectCircularReference(nodesStack)) { throw new GraphConfigurationException("Circular reference found in graph !"); } } // enumerate all nodes actualSet = set1; // initialize - actualSet is set1 for the very first run while (!actualSet.isEmpty()) { // add individual nodes from set enumerationOfNodes.addAll(actualSet); // find successors , switch actualSet if (actualSet == set1) { findNodesSuccessors(set1, set2); actualSet = set2; } else { findNodesSuccessors(set2, set1); actualSet = set1; } } // returning nodes ordered by their appearance in the graph -> not really guratanteed that it // works for all configurations, but should be sufficient return Arrays.asList(enumerationOfNodes.toArray(new Node[enumerationOfNodes.size()])); } /** * Method which analyzes the need of forcing buffered edge in case * when one component feeds through multiple output ports other components * and dead-lock could occure. See inspectMultipleFeeds() method. * * @param nodes */ public static void analyzeMultipleFeeds(List nodes){ Stack nodesStack = new Stack(); List nodesToAnalyze = new LinkedList(); Node node; Iterator iterator; // set up initial list of nodes to analyze // ontly those with 2 or more input ports need inspection iterator = nodes.iterator(); while (iterator.hasNext()) { node = (Node) iterator.next(); if (node.getInPorts().size()>1 ) { nodesToAnalyze.add(node); } } // DETECTING buffering needs iterator = nodesToAnalyze.iterator(); while (iterator.hasNext()) { nodesStack.clear(); nodesStack.push(new AnalyzedNode((Node) iterator.next())); inspectMultipleFeeds(nodesStack); } } /** * Tests whether there is no loop/cycle in path from root node to leaf node * This test must be run for each root note to ensure that the whole graph is free of cycles * It assumes that the IDs of individual nodes are unique -> it is constraint imposed by design * * @param nodesStack Stack with one elemen - root node from which to start analyzing * @return true if path has no loops, otherwise false */ private static boolean inspectCircularReference(Stack nodesStack) { OutputPort port; Node nextNode; Set nodesEncountered = new HashSet(); while (!nodesStack.empty()) { port = ((AnalyzedNode) nodesStack.peek()).getNextOutPort(); if (port == null) { // this node has no more ports (offsprings) // we have to remove it from already visited nodes nodesEncountered.remove(((AnalyzedNode) nodesStack.pop()).getNode().getId()); } else { nextNode = port.getReader(); //DEBUG ! System.out.println("-"+nextNode.getID()); if (nextNode != null) { // have we seen this node before ? if yes, then it is a loop if (!nodesEncountered.add(nextNode.getId())) { dumpNodesReferences(nodesStack.iterator(), nextNode); return false; } nodesStack.push(new AnalyzedNode(nextNode));// put this node on top } } } return true; } /** * Method which checks components which concentrate more than one input for potential deadlocks.<br> * If, for example, join component merges data from two flows which both originate at the same * node (e.g. data reader) then deadlock situation can occure when the join waits for data reader to send next * record on one port and the reader waits for join to consume record on the other port.<br> * If such situation is found, all input ports (Edges) of join has to be buffered. * * @param nodesStack * @return */ private static void inspectMultipleFeeds(Stack nodesStack) { InputPort port; Node prevNode; Set nodesEncountered = new HashSet(); Node startNode=((AnalyzedNode) nodesStack.peek()).getNode(); while (!nodesStack.empty()) { port = ((AnalyzedNode) nodesStack.peek()).getNextInPort(); if (port == null) { // this node has no more input ports // we have to remove it from already visited nodes as the is the end of road. nodesStack.pop(); } else { prevNode = port.getWriter(); if (prevNode != null) { // have we seen this node before ? if yes, then we need to buffer start node (its // input ports if (!nodesEncountered.add(prevNode.getId())) { for (int i=0;i<startNode.getInPorts().size();i++){ //TODO: potential problem if port is not backed by EDGE - this should not happen Object edge=startNode.getInputPort(i); //assert edge instanceof Edge : "Port not backed by Edge object !"; if(((Edge)edge).getEdgeType() == EdgeTypeEnum.DIRECT || ((Edge)edge).getEdgeType() == EdgeTypeEnum.DIRECT_FAST_PROPAGATE) { ((Edge)edge).setEdgeType(EdgeTypeEnum.BUFFERED); // DEBUG //System.out.println(((Edge)edge).getID()+" edge should be set to TYPE_BUFFERED."); logger.debug(((Edge)edge).getId()+" edge has been set to TYPE_BUFFERED."); } } } nodesStack.push(new AnalyzedNode(prevNode));// put this node on top } } } } /** * Finds all the successors of Nodes from source Set * * @param source Set of source Nodes * @param destination Set of all immediate successors of Nodes from <source> set * @exception GraphConfigurationException Description of the Exception * @since April 18, 2002 */ protected static void findNodesSuccessors(Set source, Set destination) throws GraphConfigurationException { Iterator nodeIterator = source.iterator(); Iterator portIterator; OutputPort outPort; Node currentNode; Node nextNode; // remove all previous items from dest. destination.clear(); // iterate through all source nodes while (nodeIterator.hasNext()) { currentNode = ((Node) nodeIterator.next()); portIterator = currentNode.getOutPorts().iterator(); // iterate through all output ports // some other node is perhaps connected to these ports while (portIterator.hasNext()) { outPort = (OutputPort) portIterator.next(); // is some Node reading data produced by our source node ? nextNode = outPort.getReader(); if (nextNode != null) { if (currentNode.getPhase().getPhaseNum() > nextNode.getPhase().getPhaseNum()) { logger.error("Wrong phase order between components: " + currentNode.getId() + " phase: " + currentNode.getPhase() + " and " + nextNode.getId() + " phase: " + nextNode.getPhase()); throw new GraphConfigurationException("Wrong phase order !"); } destination.add(nextNode); } } } } /** * This is only for reporting problems * * @param iterator Description of the Parameter * @param problemNode Description of the Parameter */ protected static void dumpNodesReferences(Iterator iterator, Node problemNode) { logger.debug("Dump of references between nodes:"); logger.debug("Detected loop when encountered node " + problemNode.getId()); logger.debug("Chain of references:"); StringBuffer buffer=new StringBuffer(64); while (iterator.hasNext()) { buffer.append(((AnalyzedNode) iterator.next()).getNode().getId()); buffer.append(" -> "); } buffer.append(problemNode.getId()); logger.debug(buffer.toString()); } /** * This method puts Nodes of the graph into appropriate Phase objects (Edges too). * Phases are run one by one and when finished, all Nodes&Edges in phase are * destroyed (memory is freed and resources reclaimed).<br> * Then next phase is started. * * @param nodes Description of the Parameter * @param edges Description of the Parameter * @param phases Description of the Parameter * @throws GraphConfigurationException */ public static void analyzeEdges(List edges) throws GraphConfigurationException { Phase readerPhase; Phase writerPhase; Edge edge; // analyze edges (whether they need to be buffered and put them into proper phases // edges connecting nodes from two different phases has to be put into both phases for (Iterator iterator=edges.iterator();iterator.hasNext();) { edge = (Edge) iterator.next(); readerPhase = edge.getReader().getPhase(); writerPhase = edge.getWriter().getPhase(); writerPhase.addEdge(edge); if (readerPhase != writerPhase ) { // edge connecting two nodes belonging to different phases // has to be buffered edge.setEdgeType(EdgeTypeEnum.PHASE_CONNECTION); } } } /** * Description of the Class * * @author dpavlis * @since 12. �nor 2004 * @revision $Revision$ */ private static class AnalyzedNode { Node node; int analyzedOutPort; int analyzedInPort; /** *Constructor for the AnalyzedNode object * * @param node Description of the Parameter */ AnalyzedNode(Node node) { this.node = node; analyzedOutPort = 0; analyzedInPort = 0; } /** * Gets the nextPort attribute of the AnalyzedNode object * * @return The nextPort value */ OutputPort getNextOutPort() { if (analyzedOutPort >= node.getOutPorts().size()) { return null; } else { return node.getOutputPort(analyzedOutPort++); } } InputPort getNextInPort() { if (analyzedInPort >= node.getInPorts().size()) { return null; } else { return node.getInputPort(analyzedInPort++); } } /** * Gets the node attribute of the AnalyzedNode object * * @return The node value */ Node getNode() { return node; } } /** * Apply disabled property of node to graph. Called in graph initial phase. * @throws GraphConfigurationException */ public static void disableNodesInPhases(TransformationGraph graph) throws GraphConfigurationException { Set<Node> nodesToRemove = new HashSet<Node>(); Phase[] phases = graph.getPhases(); for (int i = 0; i < phases.length; i++) { nodesToRemove.clear(); for (Node node : phases[i].getNodes().values()) { if (node.getEnabled() == EnabledEnum.DISABLED) { nodesToRemove.add(node); disconnectAllEdges(node); } else if (node.getEnabled() == EnabledEnum.PASS_THROUGH) { nodesToRemove.add(node); final Edge inEdge = (Edge) node.getInputPort(node .getPassThroughInputPort()); final Edge outEdge = (Edge) node.getOutputPort(node .getPassThroughOutputPort()); if (inEdge == null || outEdge == null) { disconnectAllEdges(node); continue; } final Node sourceNode = inEdge.getWriter(); final Node targetNode = outEdge.getReader(); final int sourceIdx = inEdge.getOutputPortNumber(); final int targetIdx = outEdge.getInputPortNumber(); disconnectAllEdges(node); sourceNode.addOutputPort(sourceIdx, inEdge); targetNode.addInputPort(targetIdx, inEdge); try { node.getGraph().addEdge(inEdge); } catch (GraphConfigurationException e) { logger.error("Unexpected error: " + e.getMessage()); e.printStackTrace(); } } } for(Node node : nodesToRemove) { phases[i].deleteNode(node); } if (phases[i].getNodes().isEmpty()) { graph.removePhase(phases[i]); } } } /** * Disconnect all edges connected to the given node. * @param node * @throws GraphConfigurationException */ private static void disconnectAllEdges(Node node) throws GraphConfigurationException { for(Iterator it1 = node.getInPorts().iterator(); it1.hasNext();) { final Edge edge = (Edge) it1.next(); Node writer = edge.getWriter(); if(writer != null) writer.removeOutputPort(edge); node.getGraph().deleteEdge(edge); } for(Iterator it1 = node.getOutPorts().iterator(); it1.hasNext();) { final Edge edge = (Edge) it1.next(); final Node reader = edge.getReader(); if(reader != null) reader.removeInputPort(edge); node.getGraph().deleteEdge(edge); } } } /* * end class TransformationGraphAnalyzer */
UPDATE: empty phase has to be still part of the graph - for server phases synchronization. git-svn-id: 7003860f782148507aa0d02fa3b12992383fb6a5@7655 a09ad3ba-1a0f-0410-b1b9-c67202f10d70
cloveretl.engine/src/org/jetel/graph/TransformationGraphAnalyzer.java
UPDATE: empty phase has to be still part of the graph - for server phases synchronization.
<ide><path>loveretl.engine/src/org/jetel/graph/TransformationGraphAnalyzer.java <ide> for(Node node : nodesToRemove) { <ide> phases[i].deleteNode(node); <ide> } <del> <del> if (phases[i].getNodes().isEmpty()) { <del> graph.removePhase(phases[i]); <del> } <ide> } <ide> } <ide>
Java
apache-2.0
error: pathspec 'src/test/java/io/rocketscience/java/lang/CauseTest.java' did not match any file(s) known to git
8e28a5486e7dc57f6772e3526aafcc4735f2635e
1
amygithub/vavr,ummels/vavr,amygithub/vavr,ummels/vavr,dx-pbuckley/vavr,dx-pbuckley/vavr
package io.rocketscience.java.lang; import static org.fest.assertions.api.Assertions.assertThat; import org.junit.Test; public class CauseTest { @Test public void testName() throws Exception { final Cause cause = Cause.of(new OutOfMemoryError()); assertThat(cause.isFatal()).isTrue(); } }
src/test/java/io/rocketscience/java/lang/CauseTest.java
unit tests
src/test/java/io/rocketscience/java/lang/CauseTest.java
unit tests
<ide><path>rc/test/java/io/rocketscience/java/lang/CauseTest.java <add>package io.rocketscience.java.lang; <add> <add>import static org.fest.assertions.api.Assertions.assertThat; <add> <add>import org.junit.Test; <add> <add>public class CauseTest { <add> <add> @Test <add> public void testName() throws Exception { <add> final Cause cause = Cause.of(new OutOfMemoryError()); <add> assertThat(cause.isFatal()).isTrue(); <add> <add> } <add> <add>}
JavaScript
mit
286175530dbc2e16893b7c12821ca382ec6756c5
0
casal033/UMM3601ursamajor,casal033/UMM3601ursamajor,casal033/UMM3601ursamajor
'use strict'; angular.module('umm3601ursamajorApp') .controller('StatuseditorCtrl', function ($scope, $http, Auth, $location, User, Modal, socket) { if(Auth.isAdmin() || Auth.isChair()) { } else { $location.path('/'); } $scope.users = User.query(); $scope.isAdmin = Auth.isAdmin; $scope.isChair = Auth.isChair; $scope.statusArray = []; $scope.submissions = []; $http.get('/api/statuss').success(function(statusArray) { $scope.statusArray = statusArray; $scope.statusArray.sort(function(a, b){return b.priority- a.priority}); }); $http.get('/api/submissions').success(function(submissions) { $scope.submissions = submissions; socket.syncUpdates('submission', $scope.submissions); }); $scope.getStatuses = function(){ $http.get('/api/statuss').success(function(statusArray) { $scope.statusArray = statusArray; $scope.statusArray.sort(function(a, b){return b.priority- a.priority}); }); }; $scope.statusEditorColor = function(status){ return {'border-left': '4px solid rgb(' + status.color.red + ',' + status.color.green + ',' + status.color.blue + ')'}; }; $scope.deleteSubmissionConfirm = function(item){ Modal.confirm.delete($scope.deleteSubmission)(item.strict, item); }; $scope.deleteStatus = function(item){ var r = confirm("Are you sure you want to delete this status? All statuses with this status will need to be changed.") if(r == true) { $http.delete('/api/statuss/' + item._id).success(function () { $scope.statusArray.splice($scope.statusArray.indexOf(item), 1); }); var threshold = item.priority; for (var j = 0; j < $scope.statusArray.length; j++) { if ($scope.statusArray[j].priority != 15 && $scope.statusArray[j].priority != -15) { if ($scope.statusArray[j].priority > threshold) { $scope.statusArray[j].priority--; $http.patch('/api/statuss/' + $scope.statusArray[j]._id, {priority: $scope.statusArray[j].priority}) } } } } }; $scope.findEmptyPriority = function(status){ var count = 2; for(var j = 0; j < status.length; j++) { for (var i = 0; i < status.length; i++) { if (status[i].priority == count) { count++; } } } return count; }; $scope.addStatus = function() { var r = confirm("Are you sure you want to add a status?") if(r == true) { $http.post('/api/statuss/', { strict: "Default Status", color: {red: 0, green: 0, blue: 0, alpha: 1}, emailSubject: "", emailBody: "", priority: $scope.findEmptyPriority($scope.statusArray), required: false }).success(function () { console.log("Successfully added new status") $scope.getStatuses(); }); } }; $scope.requiredStatus = function(status){ return(status.required); }; // $scope.submitChanges = function(status) { // var r = confirm("Are you sure you want to edit this status?"); // var strict = ""; // var conflict = false; // var priorityOne = false; // var x = $scope.statusArray.indexOf(status); // if (r) { // for (var i = 0; i < $scope.statusArray.length; i++) { // if ($scope.statusArray[i].priority == status.priority) { // if ($scope.statusArray[i]._id != status._id) { // conflict = true; // } // // } // if (status.priority == 1) { // priorityOne = true; // } // } // if (!conflict && !priorityOne) { // $http.patch('/api/statuss/' + $scope.statusArray[x]._id, // { // strict: $scope.statusArray[x].strict, // color: $scope.statusArray[x].color, // emailSubject: $scope.statusArray[x].emailSubject, // emailBody: $scope.statusArray[x].emailBody, // priority: $scope.statusArray[x].priority // } // ).success(function () { // $location.path('/admin'); // for (var j = 0; j < $scope.submissions.length; j++) { // if ($scope.submissions[j].status.strict == strict) { // console.log("things were detected to be different"); // $scope.submissions[j].status.strict = $scope.statusArray[x].strict; // $http.patch('/api/submissions/' + $scope.submissions[j]._id, { // status: {strict: $scope.statusArray[x].strict, text: $scope.submissions[j].status.text} // }) // // // } else { // alert("There already exists a status with this priority.") // } // } // }) // } // // } // } $scope.submitChanges = function(status) { var r = confirm("Are you sure you want to edit this status?"); var strict = ""; var problem = false; var x = $scope.statusArray.indexOf(status); if (r) { for (var i = 0; i < $scope.statusArray.length; i++) { if ($scope.statusArray[i].priority == status.priority) { if ($scope.statusArray[i]._id != status._id) { problem = true; } } if ((status.priority <= 1 || status.priority >= 15) && (status.required == false)) { problem = true; } } if (!problem) { $http.get('/api/statuss/' + $scope.statusArray[x]._id).success(function (oldStatus) { strict = oldStatus.strict; $http.patch('/api/statuss/' + $scope.statusArray[x]._id, { strict: $scope.statusArray[x].strict, color: $scope.statusArray[x].color, emailSubject: $scope.statusArray[x].emailSubject, emailBody: $scope.statusArray[x].emailBody, priority: $scope.statusArray[x].priority } ).success(function () { $location.path('/admin'); for (var j = 0; j < $scope.submissions.length; j++) { if ($scope.submissions[j].status.strict == strict) { console.log("things were detected to be different"); $scope.submissions[j].status.strict = $scope.statusArray[x].strict; $http.patch('/api/submissions/' + $scope.submissions[j]._id, { status: {strict: $scope.statusArray[x].strict, text: $scope.submissions[j].status.text} }) } } }) }) } else { //alert("There already exists a status with this priority.") alert("There is a problem using this priority (priority is less than 2, greater than 14, or shares a priority with another status). Please, pick a new one.") } } } });
client/app/statuseditor/statuseditor.controller.js
'use strict'; angular.module('umm3601ursamajorApp') .controller('StatuseditorCtrl', function ($scope, $http, Auth, $location, User, Modal, socket) { if(Auth.isAdmin() || Auth.isChair()) { } else { $location.path('/'); } $scope.users = User.query(); $scope.isAdmin = Auth.isAdmin; $scope.isChair = Auth.isChair; $scope.statusArray = []; $scope.submissions = []; $http.get('/api/statuss').success(function(statusArray) { $scope.statusArray = statusArray; }); $http.get('/api/submissions').success(function(submissions) { $scope.submissions = submissions; socket.syncUpdates('submission', $scope.submissions); }); $scope.getStatuses = function(){ $http.get('/api/statuss').success(function(statusArray) { $scope.statusArray = statusArray; }); }; $scope.statusEditorColor = function(status){ return {'border-left': '4px solid rgb(' + status.color.red + ',' + status.color.green + ',' + status.color.blue + ')'}; }; $scope.deleteSubmissionConfirm = function(item){ Modal.confirm.delete($scope.deleteSubmission)(item.strict, item); }; $scope.deleteStatus = function(item){ var r = confirm("Are you sure you want to delete this status? All statuses with this status will need to be changed.") if(r == true) { $http.delete('/api/statuss/' + item._id).success(function () { $scope.statusArray.splice($scope.statusArray.indexOf(item), 1); }); var threshold = item.priority; for (var j = 0; j < $scope.statusArray.length; j++) { if ($scope.statusArray[j].priority != 15 && $scope.statusArray[j].priority != -15) { if ($scope.statusArray[j].priority > threshold) { $scope.statusArray[j].priority--; $http.patch('/api/statuss/' + $scope.statusArray[j]._id, {priority: $scope.statusArray[j].priority}) } } } } }; $scope.findEmptyPriority = function(status){ var count = 2; for(var j = 0; j < status.length; j++) { for (var i = 0; i < status.length; i++) { if (status[i].priority == count) { count++; } } } return count; }; $scope.addStatus = function() { var r = confirm("Are you sure you want to add a status?") if(r == true) { $http.post('/api/statuss/', { strict: "Default Status", color: {red: 0, green: 0, blue: 0, alpha: 1}, emailSubject: "", emailBody: "", priority: $scope.findEmptyPriority($scope.statusArray), required: false }).success(function () { console.log("Successfully added new status") $scope.getStatuses(); }); } }; $scope.requiredStatus = function(status){ return(status.required); }; // $scope.submitChanges = function(status) { // var r = confirm("Are you sure you want to edit this status?"); // var strict = ""; // var conflict = false; // var priorityOne = false; // var x = $scope.statusArray.indexOf(status); // if (r) { // for (var i = 0; i < $scope.statusArray.length; i++) { // if ($scope.statusArray[i].priority == status.priority) { // if ($scope.statusArray[i]._id != status._id) { // conflict = true; // } // // } // if (status.priority == 1) { // priorityOne = true; // } // } // if (!conflict && !priorityOne) { // $http.patch('/api/statuss/' + $scope.statusArray[x]._id, // { // strict: $scope.statusArray[x].strict, // color: $scope.statusArray[x].color, // emailSubject: $scope.statusArray[x].emailSubject, // emailBody: $scope.statusArray[x].emailBody, // priority: $scope.statusArray[x].priority // } // ).success(function () { // $location.path('/admin'); // for (var j = 0; j < $scope.submissions.length; j++) { // if ($scope.submissions[j].status.strict == strict) { // console.log("things were detected to be different"); // $scope.submissions[j].status.strict = $scope.statusArray[x].strict; // $http.patch('/api/submissions/' + $scope.submissions[j]._id, { // status: {strict: $scope.statusArray[x].strict, text: $scope.submissions[j].status.text} // }) // // // } else { // alert("There already exists a status with this priority.") // } // } // }) // } // // } // } $scope.submitChanges = function(status) { var r = confirm("Are you sure you want to edit this status?"); var strict = ""; var problem = false; var x = $scope.statusArray.indexOf(status); if (r) { for (var i = 0; i < $scope.statusArray.length; i++) { if ($scope.statusArray[i].priority == status.priority) { if ($scope.statusArray[i]._id != status._id) { problem = true; } } if ((status.priority <= 1 || status.priority >= 15) && (status.required == false)) { problem = true; } } if (!problem) { $http.get('/api/statuss/' + $scope.statusArray[x]._id).success(function (oldStatus) { strict = oldStatus.strict; $http.patch('/api/statuss/' + $scope.statusArray[x]._id, { strict: $scope.statusArray[x].strict, color: $scope.statusArray[x].color, emailSubject: $scope.statusArray[x].emailSubject, emailBody: $scope.statusArray[x].emailBody, priority: $scope.statusArray[x].priority } ).success(function () { $location.path('/admin'); for (var j = 0; j < $scope.submissions.length; j++) { if ($scope.submissions[j].status.strict == strict) { console.log("things were detected to be different"); $scope.submissions[j].status.strict = $scope.statusArray[x].strict; $http.patch('/api/submissions/' + $scope.submissions[j]._id, { status: {strict: $scope.statusArray[x].strict, text: $scope.submissions[j].status.text} }) } } }) }) } else { //alert("There already exists a status with this priority.") alert("There is a problem using this priority (priority is less than 2, greater than 14, or shares a priority with another status). Please, pick a new one.") } } } });
Ordered statuses in editor
client/app/statuseditor/statuseditor.controller.js
Ordered statuses in editor
<ide><path>lient/app/statuseditor/statuseditor.controller.js <ide> <ide> $http.get('/api/statuss').success(function(statusArray) { <ide> $scope.statusArray = statusArray; <add> $scope.statusArray.sort(function(a, b){return b.priority- a.priority}); <ide> }); <ide> <ide> $http.get('/api/submissions').success(function(submissions) { <ide> $scope.getStatuses = function(){ <ide> $http.get('/api/statuss').success(function(statusArray) { <ide> $scope.statusArray = statusArray; <add> $scope.statusArray.sort(function(a, b){return b.priority- a.priority}); <ide> }); <ide> }; <ide>
Java
agpl-3.0
ab6ad24312c9e6df0b52556a821be4609f1c64a8
0
ngaut/sql-layer,qiuyesuifeng/sql-layer,wfxiang08/sql-layer-1,relateiq/sql-layer,relateiq/sql-layer,ngaut/sql-layer,jaytaylor/sql-layer,wfxiang08/sql-layer-1,wfxiang08/sql-layer-1,relateiq/sql-layer,shunwang/sql-layer-1,shunwang/sql-layer-1,relateiq/sql-layer,ngaut/sql-layer,wfxiang08/sql-layer-1,shunwang/sql-layer-1,jaytaylor/sql-layer,qiuyesuifeng/sql-layer,jaytaylor/sql-layer,shunwang/sql-layer-1,qiuyesuifeng/sql-layer,jaytaylor/sql-layer,qiuyesuifeng/sql-layer,ngaut/sql-layer
package com.akiban.cserver.service.memcache; import java.io.ByteArrayOutputStream; import java.util.Map; import java.util.Set; import com.akiban.cserver.service.session.Session; import com.akiban.cserver.service.session.SessionImpl; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelHandler; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.DefaultChannelGroup; import com.akiban.cserver.api.HapiProcessor; import com.thimbleware.jmemcached.Cache; import com.thimbleware.jmemcached.CacheElement; import com.thimbleware.jmemcached.LocalCacheElement; import com.thimbleware.jmemcached.MemCacheDaemon; import com.thimbleware.jmemcached.protocol.Command; import com.thimbleware.jmemcached.protocol.CommandMessage; import com.thimbleware.jmemcached.protocol.ResponseMessage; import com.thimbleware.jmemcached.protocol.exceptions.UnknownCommandException; /** * Processes CommandMessage and generate ResponseMessage, shared among all channels. * * Inspried by: com.thimbleware.jmemcached.protocol.MemcachedCommandHandler */ @ChannelHandler.Sharable final class AkibanCommandHandler extends SimpleChannelUpstreamHandler { interface FormatGetter { HapiProcessor.Outputter getFormat(); } private final ThreadLocal<Session> session = new ThreadLocal<Session>() { @Override protected Session initialValue() { return new SessionImpl(); } }; /** * State variables that are universal for entire service. * The handler *must* be declared with a ChannelPipelineCoverage of "all". */ private final HapiProcessor hapiProcessor; private final DefaultChannelGroup channelGroup; private static final Log LOG = LogFactory.getLog(MemcacheService.class); private final FormatGetter formatGetter; public AkibanCommandHandler(HapiProcessor hapiProcessor, DefaultChannelGroup channelGroup, FormatGetter formatGetter) { this.hapiProcessor = hapiProcessor; this.channelGroup = channelGroup; this.formatGetter = formatGetter; } /** * On open we manage some statistics, and add this connection to the channel group. */ @Override public void channelOpen(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { channelGroup.add(context.getChannel()); } /** * Track stats and then remove from channel group */ @Override public void channelClosed(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { channelGroup.remove(context.getChannel()); } /** * Eat the exception, probably an improperly closed client. */ @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception { LOG.error("Command handler caught exception: " + e, e.getCause()); } /** * Turn CommandMessages into executions against the CS and then pass on downstream message */ @Override @SuppressWarnings("unchecked") public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { if (!(event.getMessage() instanceof CommandMessage)) { // Ignore what this encoder can't encode. context.sendUpstream(event); return; } CommandMessage<CacheElement> command = (CommandMessage<CacheElement>) event.getMessage(); Command cmdOp = command.cmd; if(LOG.isDebugEnabled()) { StringBuilder msg = new StringBuilder(); msg.append(command.cmd); if(command.element != null) { msg.append(" ").append(command.element.getKeystring()); } for(int i = 0; i < command.keys.size(); ++i) { msg.append(" ").append(command.keys.get(i)); } LOG.debug(msg.toString()); } Channel channel = event.getChannel(); switch(cmdOp) { case GET: case GETS: handleGets(context, command, channel); break; case SET: handleSet(context, command, channel); break; case CAS: handleCas(context, command, channel); break; case ADD: handleAdd(context, command, channel); break; case REPLACE: handleReplace(context, command, channel); break; case APPEND: handleAppend(context, command, channel); break; case PREPEND: handlePrepend(context, command, channel); break; case INCR: handleIncr(context, command, channel); break; case DECR: handleDecr(context, command, channel); break; case DELETE: handleDelete(context, command, channel); break; case STATS: handleStats(context, command, channel); break; case VERSION: handleVersion(context, command, channel); break; case QUIT: handleQuit(channel); break; case FLUSH_ALL: handleFlush(context, command, channel); break; default: if(cmdOp == null) { handleNoOp(context, command); } else { throw new UnknownCommandException("unknown command:" + cmdOp); } } } protected void handleNoOp(ChannelHandlerContext context, CommandMessage<CacheElement> command) { Channels.fireMessageReceived(context, new ResponseMessage(command)); } protected void handleFlush(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { boolean flushSuccess = false; // flushSuccess = cache.flush_all(command.time) Channels.fireMessageReceived(context, new ResponseMessage(command).withFlushResponse(flushSuccess), channel.getRemoteAddress()); } protected void handleQuit(Channel channel) { channel.disconnect(); } protected void handleVersion(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { ResponseMessage responseMessage = new ResponseMessage(command); responseMessage.version = MemCacheDaemon.memcachedVersion; Channels.fireMessageReceived(context, responseMessage, channel.getRemoteAddress()); } protected void handleStats(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { // String option = ""; // if(command.keys.size() > 0) { // option = new String(command.keys.get(0)); // } Map<String, Set<String>> statResponse = null; // statResponse = cache.stat(option) Channels.fireMessageReceived(context, new ResponseMessage(command).withStatResponse(statResponse), channel.getRemoteAddress()); } protected void handleDelete(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.DeleteResponse dr = null; //dr = cache.delete(command.keys.get(0), command.time); Channels.fireMessageReceived(context, new ResponseMessage(command).withDeleteResponse(dr), channel.getRemoteAddress()); } protected void handleDecr(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Integer incrDecrResp = null; //incDecrResp = cache.get_add(command.keys.get(0), -1 * command.incrAmount); Channels.fireMessageReceived(context, new ResponseMessage(command).withIncrDecrResponse(incrDecrResp), channel.getRemoteAddress()); } protected void handleIncr(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Integer incrDecrResp = null; //incRecrResp = cache.get_add(command.keys.get(0), command.incrAmount); // TODO support default value and expiry!! Channels.fireMessageReceived(context, new ResponseMessage(command).withIncrDecrResponse(incrDecrResp), channel.getRemoteAddress()); } protected void handlePrepend(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.prepend(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleAppend(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.append(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleReplace(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.replace(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleAdd(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.add(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleCas(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.cas(command.cas_key, command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleSet(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.set(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleGets(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { String[] keys = new String[command.keys.size()]; keys = command.keys.toArray(keys); byte[] key = keys[0].getBytes(); String request = new String(key); byte[] result_bytes; try { AkibanByteOutputStream outputStream = new AkibanByteOutputStream(1024); hapiProcessor.processRequest(session.get(), request, formatGetter.getFormat(), outputStream); result_bytes = outputStream.getBytesNoCopy(); } catch (Exception e) { result_bytes = ("error: " + e.getMessage()).getBytes(); } CacheElement[] results = null; if(result_bytes != null) { LocalCacheElement element = new LocalCacheElement(keys[0]); element.setData(result_bytes); results = new CacheElement[] { element }; } ResponseMessage<CacheElement> resp = new ResponseMessage<CacheElement>(command).withElements(results); Channels.fireMessageReceived(context, resp, channel.getRemoteAddress()); } private static class AkibanByteOutputStream extends ByteArrayOutputStream { private AkibanByteOutputStream(int size) { super(size); } public byte[] getBytesNoCopy() { return buf; } } }
src/main/java/com/akiban/cserver/service/memcache/AkibanCommandHandler.java
package com.akiban.cserver.service.memcache; import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.util.Map; import java.util.Set; import com.akiban.cserver.service.session.Session; import com.akiban.cserver.service.session.SessionImpl; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelHandler; import org.jboss.netty.channel.ChannelHandlerContext; import org.jboss.netty.channel.ChannelStateEvent; import org.jboss.netty.channel.Channels; import org.jboss.netty.channel.ExceptionEvent; import org.jboss.netty.channel.MessageEvent; import org.jboss.netty.channel.SimpleChannelUpstreamHandler; import org.jboss.netty.channel.group.DefaultChannelGroup; import com.akiban.cserver.api.HapiProcessor; import com.thimbleware.jmemcached.Cache; import com.thimbleware.jmemcached.CacheElement; import com.thimbleware.jmemcached.LocalCacheElement; import com.thimbleware.jmemcached.MemCacheDaemon; import com.thimbleware.jmemcached.protocol.Command; import com.thimbleware.jmemcached.protocol.CommandMessage; import com.thimbleware.jmemcached.protocol.ResponseMessage; import com.thimbleware.jmemcached.protocol.exceptions.UnknownCommandException; /** * Processes CommandMessage and generate ResponseMessage, shared among all channels. * * Inspried by: com.thimbleware.jmemcached.protocol.MemcachedCommandHandler */ @ChannelHandler.Sharable final class AkibanCommandHandler extends SimpleChannelUpstreamHandler { interface FormatGetter { HapiProcessor.Outputter getFormat(); } private final ThreadLocal<Session> session = new ThreadLocal<Session>() { @Override protected Session initialValue() { return new SessionImpl(); } }; /** * State variables that are universal for entire service. * The handler *must* be declared with a ChannelPipelineCoverage of "all". */ private final HapiProcessor hapiProcessor; private final DefaultChannelGroup channelGroup; private static final Log LOG = LogFactory.getLog(MemcacheService.class); private final FormatGetter formatGetter; public AkibanCommandHandler(HapiProcessor hapiProcessor, DefaultChannelGroup channelGroup, FormatGetter formatGetter) { this.hapiProcessor = hapiProcessor; this.channelGroup = channelGroup; this.formatGetter = formatGetter; } /** * On open we manage some statistics, and add this connection to the channel group. */ @Override public void channelOpen(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { ByteBuffer payload = ByteBuffer.allocate(65536); context.setAttachment(payload); channelGroup.add(context.getChannel()); } /** * Track stats and then remove from channel group */ @Override public void channelClosed(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { context.setAttachment(null); channelGroup.remove(context.getChannel()); } /** * Eat the exception, probably an improperly closed client. */ @Override public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception { LOG.error("Command handler caught exception: " + e, e.getCause()); } /** * Turn CommandMessages into executions against the CS and then pass on downstream message */ @Override @SuppressWarnings("unchecked") public void messageReceived(ChannelHandlerContext context, MessageEvent event) throws Exception { if (!(event.getMessage() instanceof CommandMessage)) { // Ignore what this encoder can't encode. context.sendUpstream(event); return; } CommandMessage<CacheElement> command = (CommandMessage<CacheElement>) event.getMessage(); Command cmdOp = command.cmd; if(LOG.isDebugEnabled()) { StringBuilder msg = new StringBuilder(); msg.append(command.cmd); if(command.element != null) { msg.append(" ").append(command.element.getKeystring()); } for(int i = 0; i < command.keys.size(); ++i) { msg.append(" ").append(command.keys.get(i)); } LOG.debug(msg.toString()); } Channel channel = event.getChannel(); switch(cmdOp) { case GET: case GETS: handleGets(context, command, channel); break; case SET: handleSet(context, command, channel); break; case CAS: handleCas(context, command, channel); break; case ADD: handleAdd(context, command, channel); break; case REPLACE: handleReplace(context, command, channel); break; case APPEND: handleAppend(context, command, channel); break; case PREPEND: handlePrepend(context, command, channel); break; case INCR: handleIncr(context, command, channel); break; case DECR: handleDecr(context, command, channel); break; case DELETE: handleDelete(context, command, channel); break; case STATS: handleStats(context, command, channel); break; case VERSION: handleVersion(context, command, channel); break; case QUIT: handleQuit(channel); break; case FLUSH_ALL: handleFlush(context, command, channel); break; default: if(cmdOp == null) { handleNoOp(context, command); } else { throw new UnknownCommandException("unknown command:" + cmdOp); } } } protected void handleNoOp(ChannelHandlerContext context, CommandMessage<CacheElement> command) { Channels.fireMessageReceived(context, new ResponseMessage(command)); } protected void handleFlush(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { boolean flushSuccess = false; // flushSuccess = cache.flush_all(command.time) Channels.fireMessageReceived(context, new ResponseMessage(command).withFlushResponse(flushSuccess), channel.getRemoteAddress()); } protected void handleQuit(Channel channel) { channel.disconnect(); } protected void handleVersion(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { ResponseMessage responseMessage = new ResponseMessage(command); responseMessage.version = MemCacheDaemon.memcachedVersion; Channels.fireMessageReceived(context, responseMessage, channel.getRemoteAddress()); } protected void handleStats(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { // String option = ""; // if(command.keys.size() > 0) { // option = new String(command.keys.get(0)); // } Map<String, Set<String>> statResponse = null; // statResponse = cache.stat(option) Channels.fireMessageReceived(context, new ResponseMessage(command).withStatResponse(statResponse), channel.getRemoteAddress()); } protected void handleDelete(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.DeleteResponse dr = null; //dr = cache.delete(command.keys.get(0), command.time); Channels.fireMessageReceived(context, new ResponseMessage(command).withDeleteResponse(dr), channel.getRemoteAddress()); } protected void handleDecr(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Integer incrDecrResp = null; //incDecrResp = cache.get_add(command.keys.get(0), -1 * command.incrAmount); Channels.fireMessageReceived(context, new ResponseMessage(command).withIncrDecrResponse(incrDecrResp), channel.getRemoteAddress()); } protected void handleIncr(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Integer incrDecrResp = null; //incRecrResp = cache.get_add(command.keys.get(0), command.incrAmount); // TODO support default value and expiry!! Channels.fireMessageReceived(context, new ResponseMessage(command).withIncrDecrResponse(incrDecrResp), channel.getRemoteAddress()); } protected void handlePrepend(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.prepend(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleAppend(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.append(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleReplace(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.replace(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleAdd(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.add(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleCas(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.cas(command.cas_key, command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleSet(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { Cache.StoreResponse ret = null; //ret = cache.set(command.element); Channels.fireMessageReceived(context, new ResponseMessage(command).withResponse(ret), channel.getRemoteAddress()); } protected void handleGets(ChannelHandlerContext context, CommandMessage<CacheElement> command, Channel channel) { String[] keys = new String[command.keys.size()]; keys = command.keys.toArray(keys); byte[] key = keys[0].getBytes(); String request = new String(key); byte[] result_bytes; try { AkibanByteOutputStream outputStream = new AkibanByteOutputStream(1024); hapiProcessor.processRequest(session.get(), request, formatGetter.getFormat(), outputStream); result_bytes = outputStream.getBytesNoCopy(); } catch (Exception e) { result_bytes = ("error: " + e.getMessage()).getBytes(); } CacheElement[] results = null; if(result_bytes != null) { LocalCacheElement element = new LocalCacheElement(keys[0]); element.setData(result_bytes); results = new CacheElement[] { element }; } ResponseMessage<CacheElement> resp = new ResponseMessage<CacheElement>(command).withElements(results); Channels.fireMessageReceived(context, resp, channel.getRemoteAddress()); } private static class AkibanByteOutputStream extends ByteArrayOutputStream { private AkibanByteOutputStream(int size) { super(size); } public byte[] getBytesNoCopy() { return buf; } } }
Removing an unneeded (and expensive!) ByteBuffer allocation
src/main/java/com/akiban/cserver/service/memcache/AkibanCommandHandler.java
Removing an unneeded (and expensive!) ByteBuffer allocation
<ide><path>rc/main/java/com/akiban/cserver/service/memcache/AkibanCommandHandler.java <ide> package com.akiban.cserver.service.memcache; <ide> <ide> import java.io.ByteArrayOutputStream; <del>import java.nio.ByteBuffer; <ide> import java.util.Map; <ide> import java.util.Set; <ide> <ide> */ <ide> @Override <ide> public void channelOpen(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { <del> ByteBuffer payload = ByteBuffer.allocate(65536); <del> context.setAttachment(payload); <ide> channelGroup.add(context.getChannel()); <ide> } <ide> <ide> */ <ide> @Override <ide> public void channelClosed(ChannelHandlerContext context, ChannelStateEvent event) throws Exception { <del> context.setAttachment(null); <ide> channelGroup.remove(context.getChannel()); <ide> } <ide>
JavaScript
mit
05c5fa057dcd885191cb504db72cff10fef80ee5
0
syzoj/syzoj,syzoj/syzoj
let Problem = syzoj.model('problem'); let JudgeState = syzoj.model('judge_state'); let FormattedCode = syzoj.model('formatted_code'); let CustomTest = syzoj.model('custom_test'); let WaitingJudge = syzoj.model('waiting_judge'); let Contest = syzoj.model('contest'); let ProblemTag = syzoj.model('problem_tag'); let ProblemTagMap = syzoj.model('problem_tag_map'); let Article = syzoj.model('article'); const Sequelize = require('sequelize'); let Judger = syzoj.lib('judger'); let CodeFormatter = syzoj.lib('code_formatter'); app.get('/problems', async (req, res) => { try { const sort = req.query.sort || syzoj.config.sorting.problem.field; const order = req.query.order || syzoj.config.sorting.problem.order; if (!['id', 'title', 'rating', 'ac_num', 'submit_num', 'ac_rate', 'publicize_time'].includes(sort) || !['asc', 'desc'].includes(order)) { throw new ErrorMessage('错误的排序参数。'); } let sortVal = sort; if (sort === 'ac_rate') { sortVal = { raw: 'ac_num / submit_num' }; } let where = {}; if (!res.locals.user || !await res.locals.user.hasPrivilege('manage_problem')) { if (res.locals.user) { where = { $or: { is_public: 1, user_id: res.locals.user.id } }; } else { where = { is_public: 1 }; } } let paginate = syzoj.utils.paginate(await Problem.count(where), req.query.page, syzoj.config.page.problem); let problems = await Problem.query(paginate, where, [[sortVal, order]]); await problems.forEachAsync(async problem => { problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.judge_state = await problem.getJudgeState(res.locals.user, true); problem.tags = await problem.getTags(); }); res.render('problems', { allowedManageTag: res.locals.user && await res.locals.user.hasPrivilege('manage_problem_tag'), problems: problems, paginate: paginate, curSort: sort, curOrder: order === 'asc' }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problems/search', async (req, res) => { try { let id = parseInt(req.query.keyword) || 0; const sort = req.query.sort || syzoj.config.sorting.problem.field; const order = req.query.order || syzoj.config.sorting.problem.order; if (!['id', 'title', 'rating', 'ac_num', 'submit_num', 'ac_rate'].includes(sort) || !['asc', 'desc'].includes(order)) { throw new ErrorMessage('错误的排序参数。'); } let where = { $or: { title: { $like: `%${req.query.keyword}%` }, id: id } }; if (!res.locals.user || !await res.locals.user.hasPrivilege('manage_problem')) { if (res.locals.user) { where = { $and: [ where, { $or: { is_public: 1, user_id: res.locals.user.id } } ] }; } else { where = { $and: [ where, { is_public: 1 } ] }; } } let sortVal = sort; if (sort === 'ac_rate') { sortVal = { raw: 'ac_num / submit_num' }; } let paginate = syzoj.utils.paginate(await Problem.count(where), req.query.page, syzoj.config.page.problem); let problems = await Problem.query(paginate, where, [syzoj.db.literal('`id` = ' + id + ' DESC'), [sortVal, order]]); await problems.forEachAsync(async problem => { problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.judge_state = await problem.getJudgeState(res.locals.user, true); problem.tags = await problem.getTags(); }); res.render('problems', { allowedManageTag: res.locals.user && await res.locals.user.hasPrivilege('manage_problem_tag'), problems: problems, paginate: paginate, curSort: sort, curOrder: order === 'asc' }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problems/tag/:tagIDs', async (req, res) => { try { let tagIDs = Array.from(new Set(req.params.tagIDs.split(',').map(x => parseInt(x)))); let tags = await tagIDs.mapAsync(async tagID => ProblemTag.fromID(tagID)); const sort = req.query.sort || syzoj.config.sorting.problem.field; const order = req.query.order || syzoj.config.sorting.problem.order; if (!['id', 'title', 'rating', 'ac_num', 'submit_num', 'ac_rate'].includes(sort) || !['asc', 'desc'].includes(order)) { throw new ErrorMessage('错误的排序参数。'); } let sortVal; if (sort === 'ac_rate') { sortVal = '`problem`.`ac_num` / `problem`.`submit_num`'; } else { sortVal = '`problem`.`' + sort + '`'; } // Validate the tagIDs for (let tag of tags) { if (!tag) { return res.redirect(syzoj.utils.makeUrl(['problems'])); } } let sql = 'SELECT * FROM `problem` WHERE\n'; for (let tagID of tagIDs) { if (tagID !== tagIDs[0]) { sql += 'AND\n'; } sql += '`problem`.`id` IN (SELECT `problem_id` FROM `problem_tag_map` WHERE `tag_id` = ' + tagID + ')'; } if (!res.locals.user || !await res.locals.user.hasPrivilege('manage_problem')) { if (res.locals.user) { sql += 'AND (`problem`.`is_public` = 1 OR `problem`.`user_id` = ' + res.locals.user.id + ')'; } else { sql += 'AND (`problem`.`is_public` = 1)'; } } let paginate = syzoj.utils.paginate(await Problem.count(sql), req.query.page, syzoj.config.page.problem); let problems = await Problem.query(sql + ` ORDER BY ${sortVal} ${order} ` + paginate.toSQL()); await problems.forEachAsync(async problem => { problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.judge_state = await problem.getJudgeState(res.locals.user, true); problem.tags = await problem.getTags(); }); res.render('problems', { allowedManageTag: res.locals.user && await res.locals.user.hasPrivilege('manage_problem_tag'), problems: problems, tags: tags, paginate: paginate, curSort: sort, curOrder: order === 'asc' }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedUseBy(res.locals.user)) { throw new ErrorMessage('您没有权限进行此操作。'); } problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.allowedManage = await problem.isAllowedManageBy(res.locals.user); if (problem.is_public || problem.allowedEdit) { await syzoj.utils.markdown(problem, ['description', 'input_format', 'output_format', 'example', 'limit_and_hint']); } else { throw new ErrorMessage('您没有权限进行此操作。'); } let state = await problem.getJudgeState(res.locals.user, false); problem.tags = await problem.getTags(); await problem.loadRelationships(); let testcases = await syzoj.utils.parseTestdata(problem.getTestdataPath(), problem.type === 'submit-answer'); let discussionCount = await Article.count({ problem_id: id }); res.render('problem', { problem: problem, state: state, lastLanguage: res.locals.user ? await res.locals.user.getLastSubmitLanguage() : null, testcases: testcases, discussionCount: discussionCount }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id/export', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem || !problem.is_public) throw new ErrorMessage('无此题目。'); let obj = { title: problem.title, description: problem.description, input_format: problem.input_format, output_format: problem.output_format, example: problem.example, limit_and_hint: problem.limit_and_hint, time_limit: problem.time_limit, memory_limit: problem.memory_limit, have_additional_file: problem.additional_file_id != null, file_io: problem.file_io, file_io_input_name: problem.file_io_input_name, file_io_output_name: problem.file_io_output_name, type: problem.type, tags: [] }; let tags = await problem.getTags(); obj.tags = tags.map(tag => tag.name); res.send({ success: true, obj: obj }); } catch (e) { syzoj.log(e); res.send({ success: false, error: e }); } }); app.get('/problem/:id/edit', async (req, res) => { try { let id = parseInt(req.params.id) || 0; let problem = await Problem.fromID(id); if (!problem) { if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': req.originalUrl }) }); problem = await Problem.create(); problem.id = id; problem.allowedEdit = true; problem.tags = []; problem.new = true; } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.tags = await problem.getTags(); } problem.allowedManage = await problem.isAllowedManageBy(res.locals.user); res.render('problem_edit', { problem: problem }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/edit', async (req, res) => { try { let id = parseInt(req.params.id) || 0; let problem = await Problem.fromID(id); if (!problem) { if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': req.originalUrl }) }); problem = await Problem.create(); if (await res.locals.user.hasPrivilege('manage_problem')) { let customID = parseInt(req.body.id); if (customID) { if (await Problem.fromID(customID)) throw new ErrorMessage('ID 已被使用。'); problem.id = customID; } else if (id) problem.id = id; } problem.user_id = res.locals.user.id; problem.publicizer_id = res.locals.user.id; } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (await res.locals.user.hasPrivilege('manage_problem')) { let customID = parseInt(req.body.id); if (customID && customID !== id) { if (await Problem.fromID(customID)) throw new ErrorMessage('ID 已被使用。'); await problem.changeID(customID); } } } if (!req.body.title.trim()) throw new ErrorMessage('题目名不能为空。'); problem.title = req.body.title; problem.description = req.body.description; problem.input_format = req.body.input_format; problem.output_format = req.body.output_format; problem.example = req.body.example; problem.limit_and_hint = req.body.limit_and_hint; problem.is_anonymous = (req.body.is_anonymous === 'on'); // Save the problem first, to have the `id` allocated await problem.save(); if (!req.body.tags) { req.body.tags = []; } else if (!Array.isArray(req.body.tags)) { req.body.tags = [req.body.tags]; } let newTagIDs = await req.body.tags.map(x => parseInt(x)).filterAsync(async x => ProblemTag.fromID(x)); await problem.setTags(newTagIDs); res.redirect(syzoj.utils.makeUrl(['problem', problem.id])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id/import', async (req, res) => { try { let id = parseInt(req.params.id) || 0; let problem = await Problem.fromID(id); if (!problem) { if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': req.originalUrl }) }); problem = await Problem.create(); problem.id = id; problem.new = true; problem.user_id = res.locals.user.id; problem.publicizer_id = res.locals.user.id; } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); } problem.allowedManage = await problem.isAllowedManageBy(res.locals.user); res.render('problem_import', { problem: problem }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/import', async (req, res) => { try { let id = parseInt(req.params.id) || 0; let problem = await Problem.fromID(id); if (!problem) { if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': req.originalUrl }) }); problem = await Problem.create(); if (await res.locals.user.hasPrivilege('manage_problem')) { let customID = parseInt(req.body.id); if (customID) { if (await Problem.fromID(customID)) throw new ErrorMessage('ID 已被使用。'); problem.id = customID; } else if (id) problem.id = id; } problem.user_id = res.locals.user.id; problem.publicizer_id = res.locals.user.id; } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); } let request = require('request-promise'); let url = require('url'); let json = await request({ uri: req.body.url + (req.body.url.endsWith('/') ? 'export' : '/export'), timeout: 1500, json: true }); if (!json.success) throw new ErrorMessage('题目加载失败。', null, json.error); if (!json.obj.title.trim()) throw new ErrorMessage('题目名不能为空。'); problem.title = json.obj.title; problem.description = json.obj.description; problem.input_format = json.obj.input_format; problem.output_format = json.obj.output_format; problem.example = json.obj.example; problem.limit_and_hint = json.obj.limit_and_hint; problem.time_limit = json.obj.time_limit; problem.memory_limit = json.obj.memory_limit; problem.file_io = json.obj.file_io; problem.file_io_input_name = json.obj.file_io_input_name; problem.file_io_output_name = json.obj.file_io_output_name; if (json.obj.type) problem.type = json.obj.type; let validateMsg = await problem.validate(); if (validateMsg) throw new ErrorMessage('无效的题目数据配置。', null, validateMsg); await problem.save(); let tagIDs = (await json.obj.tags.mapAsync(name => ProblemTag.findOne({ where: { name: name } }))).filter(x => x).map(tag => tag.id); await problem.setTags(tagIDs); let download = require('download'); let tmp = require('tmp-promise'); let tmpFile = await tmp.file(); let fs = require('bluebird').promisifyAll(require('fs')); try { let data = await download(req.body.url + (req.body.url.endsWith('/') ? 'testdata/download' : '/testdata/download')); await fs.writeFileAsync(tmpFile.path, data); await problem.updateTestdata(tmpFile.path, await res.locals.user.hasPrivilege('manage_problem')); if (json.obj.have_additional_file) { let additional_file = await download(req.body.url + (req.body.url.endsWith('/') ? 'download/additional_file' : '/download/additional_file')); await fs.writeFileAsync(tmpFile.path, additional_file); await problem.updateFile(tmpFile.path, 'additional_file', await res.locals.user.hasPrivilege('manage_problem')); } } catch (e) { syzoj.log(e); } res.redirect(syzoj.utils.makeUrl(['problem', problem.id])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); // The 'manage' is not `allow manage`'s 'manage', I just have no better name for it. app.get('/problem/:id/manage', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); await problem.loadRelationships(); let testcases = await syzoj.utils.parseTestdata(problem.getTestdataPath(), problem.type === 'submit-answer'); res.render('problem_manage', { problem: problem, testcases: testcases }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/manage', app.multer.fields([{ name: 'testdata', maxCount: 1 }, { name: 'additional_file', maxCount: 1 }]), async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); await problem.loadRelationships(); problem.time_limit = req.body.time_limit; problem.memory_limit = req.body.memory_limit; if (req.body.type === 'traditional') { problem.file_io = req.body.io_method === 'file-io'; problem.file_io_input_name = req.body.file_io_input_name; problem.file_io_output_name = req.body.file_io_output_name; } if (problem.type === 'submit-answer' && req.body.type !== 'submit-answer' || problem.type !== 'submit-answer' && req.body.type === 'submit-answer') { if (await JudgeState.count({ problem_id: id }) !== 0) { throw new ErrorMessage('已有提交的题目不允许在提交答案和非提交答案之间更改。'); } } problem.type = req.body.type; let validateMsg = await problem.validate(); if (validateMsg) throw new ErrorMessage('无效的题目数据配置。', null, validateMsg); if (req.files['testdata']) { await problem.updateTestdata(req.files['testdata'][0].path, await res.locals.user.hasPrivilege('manage_problem')); } if (req.files['additional_file']) { await problem.updateFile(req.files['additional_file'][0].path, 'additional_file', await res.locals.user.hasPrivilege('manage_problem')); } await problem.save(); res.redirect(syzoj.utils.makeUrl(['problem', id, 'manage'])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); // Set problem public async function setPublic(req, res, is_public) { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); let allowedManage = await problem.isAllowedManageBy(res.locals.user); if (!allowedManage) throw new ErrorMessage('您没有权限进行此操作。'); problem.is_public = is_public; problem.publicizer_id = res.locals.user.id; problem.publicize_time = new Date(); await problem.save(); JudgeState.model.update( { is_public: is_public }, { where: { problem_id: id } } ); res.redirect(syzoj.utils.makeUrl(['problem', id])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } } app.post('/problem/:id/public', async (req, res) => { await setPublic(req, res, true); }); app.post('/problem/:id/dis_public', async (req, res) => { await setPublic(req, res, false); }); app.post('/problem/:id/submit', app.multer.fields([{ name: 'answer', maxCount: 1 }]), async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); const curUser = res.locals.user; if (!problem) throw new ErrorMessage('无此题目。'); if (problem.type !== 'submit-answer' && !syzoj.config.enabled_languages.includes(req.body.language)) throw new ErrorMessage('不支持该语言。'); if (!curUser) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': syzoj.utils.makeUrl(['problem', id]) }) }); let judge_state; if (problem.type === 'submit-answer') { let File = syzoj.model('file'), path; if (!req.files['answer']) { // Submited by editor try { path = await File.zipFiles(JSON.parse(req.body.answer_by_editor)); } catch (e) { throw new ErrorMessage('无法解析提交数据。'); } } else { if (req.files['answer'][0].size > syzoj.config.limit.submit_answer) throw new ErrorMessage('答案文件太大。'); path = req.files['answer'][0].path; } let file = await File.upload(path, 'answer'); let size = await file.getUnzipSize(); if (size > syzoj.config.limit.submit_answer) throw new ErrorMessage('答案文件太大。'); if (!file.md5) throw new ErrorMessage('上传答案文件失败。'); judge_state = await JudgeState.create({ code: file.md5, code_length: size, language: null, user_id: curUser.id, problem_id: req.params.id, is_public: problem.is_public }); } else { let code; if (req.files['answer']) { if (req.files['answer'][0].size > syzoj.config.limit.submit_code) throw new ErrorMessage('代码文件太大。'); let fs = Promise.promisifyAll(require('fs')); code = (await fs.readFileAsync(req.files['answer'][0].path)).toString(); } else { if (req.body.code.length > syzoj.config.limit.submit_code) throw new ErrorMessage('代码太长。'); code = req.body.code; } judge_state = await JudgeState.create({ code: code, code_length: code.length, language: req.body.language, user_id: curUser.id, problem_id: req.params.id, is_public: problem.is_public }); } let contest_id = parseInt(req.query.contest_id); let contest; if (contest_id) { contest = await Contest.fromID(contest_id); if (!contest) throw new ErrorMessage('无此比赛。'); if ((!contest.isRunning()) && (!await contest.isSupervisior(curUser))) throw new ErrorMessage('比赛未开始或已结束。'); let problems_id = await contest.getProblems(); if (!problems_id.includes(id)) throw new ErrorMessage('无此题目。'); judge_state.type = 1; judge_state.type_info = contest_id; await judge_state.save(); } else { if (!await problem.isAllowedUseBy(curUser)) throw new ErrorMessage('您没有权限进行此操作。'); judge_state.type = 0; await judge_state.save(); } await judge_state.updateRelatedInfo(true); if (problem.type !== 'submit-answer' && syzoj.languages[req.body.language].format) { let key = syzoj.utils.getFormattedCodeKey(judge_state.code, req.body.language); let formattedCode = await FormattedCode.findOne({ where: { key: key } }); if (!formattedCode) { let formatted = await CodeFormatter(judge_state.code, syzoj.languages[req.body.language].format); if (formatted) { formattedCode = await FormattedCode.create({ key: key, code: formatted }); try { await formattedCode.save(); } catch (e) {} } } } try { await Judger.judge(judge_state, problem, contest_id ? 3 : 2); judge_state.pending = true; judge_state.status = 'Waiting'; await judge_state.save(); } catch (err) { throw new ErrorMessage(`无法开始评测:${err.toString()}`); } if (contest && (!await contest.isSupervisior(curUser))) { res.redirect(syzoj.utils.makeUrl(['contest', contest_id, 'submissions'])); } else { res.redirect(syzoj.utils.makeUrl(['submission', judge_state.id])); } } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/delete', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!problem.isAllowedManageBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); await problem.delete(); res.redirect(syzoj.utils.makeUrl(['problem'])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id/testdata', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); let testdata = await problem.listTestdata(); let testcases = await syzoj.utils.parseTestdata(problem.getTestdataPath(), problem.type === 'submit-answer'); problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user) res.render('problem_data', { problem: problem, testdata: testdata, testcases: testcases }); } catch (e) { syzoj.log(e); res.status(404); res.render('error', { err: e }); } }); app.post('/problem/:id/testdata/upload', app.multer.array('file'), async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (req.files) { for (let file of req.files) { await problem.uploadTestdataSingleFile(file.originalname, file.path, file.size, await res.locals.user.hasPrivilege('manage_problem')); } } res.redirect(syzoj.utils.makeUrl(['problem', id, 'testdata'])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/testdata/delete/:filename', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); await problem.deleteTestdataSingleFile(req.params.filename); res.redirect(syzoj.utils.makeUrl(['problem', id, 'testdata'])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id/testdata/download/:filename?', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (!req.params.filename) { if (!await syzoj.utils.isFile(problem.getTestdataArchivePath())) { await problem.makeTestdataZip(); } } let path = require('path'); let filename = req.params.filename ? path.join(problem.getTestdataPath(), req.params.filename) : (problem.getTestdataArchivePath()); if (!await syzoj.utils.isFile(filename)) throw new ErrorMessage('文件不存在。'); res.download(filename, path.basename(filename)); } catch (e) { syzoj.log(e); res.status(404); res.render('error', { err: e }); } }); app.get('/problem/:id/download/additional_file', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); // XXX: Reduce duplication (see the '/problem/:id/submit' handler) let contest_id = parseInt(req.query.contest_id); if (contest_id) { let contest = await Contest.fromID(contest_id); if (!contest) throw new ErrorMessage('无此比赛。'); if (!contest.isRunning()) throw new ErrorMessage('比赛未开始或已结束。'); let problems_id = await contest.getProblems(); if (!problems_id.includes(id)) throw new ErrorMessage('无此题目。'); } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); } await problem.loadRelationships(); if (!problem.additional_file) throw new ErrorMessage('无附加文件。'); res.download(problem.additional_file.getPath(), `additional_file_${id}.zip`); } catch (e) { syzoj.log(e); res.status(404); res.render('error', { err: e }); } }); app.get('/problem/:id/statistics/:type', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); let count = await problem.countStatistics(req.params.type); if (count === null) throw new ErrorMessage('无此统计类型。'); let paginate = syzoj.utils.paginate(count, req.query.page, syzoj.config.page.problem_statistics); let statistics = await problem.getStatistics(req.params.type, paginate); await statistics.judge_state.forEachAsync(async x => x.loadRelationships()); res.render('statistics', { statistics: statistics, paginate: paginate, problem: problem }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); /* app.post('/problem/:id/custom-test', app.multer.fields([{ name: 'code_upload', maxCount: 1 }, { name: 'input_file', maxCount: 1 }]), async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': syzoj.utils.makeUrl(['problem', id]) }) }); if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); let filepath; if (req.files['input_file']) { if (req.files['input_file'][0].size > syzoj.config.limit.custom_test_input) throw new ErrorMessage('输入数据过长。'); filepath = req.files['input_file'][0].path; } else { if (req.body.input_file_textarea.length > syzoj.config.limit.custom_test_input) throw new ErrorMessage('输入数据过长。'); filepath = await require('tmp-promise').tmpName({ template: '/tmp/tmp-XXXXXX' }); await require('fs-extra').writeFileAsync(filepath, req.body.input_file_textarea); } let code; if (req.files['code_upload']) { if (req.files['code_upload'][0].size > syzoj.config.limit.submit_code) throw new ErrorMessage('代码过长。'); code = (await require('fs-extra').readFileAsync(req.files['code_upload'][0].path)).toString(); } else { if (req.body.code.length > syzoj.config.limit.submit_code) throw new ErrorMessage('代码过长。'); code = req.body.code; } let custom_test = await CustomTest.create({ input_filepath: filepath, code: code, language: req.body.language, user_id: res.locals.user.id, problem_id: id }); await custom_test.save(); let waiting_judge = await WaitingJudge.create({ judge_id: custom_test.id, priority: 3, type: 'custom_test' }); await waiting_judge.save(); res.send({ id: custom_test.id }); } catch (e) { syzoj.log(e); res.send({ err: e }); } }); */
modules/problem.js
let Problem = syzoj.model('problem'); let JudgeState = syzoj.model('judge_state'); let FormattedCode = syzoj.model('formatted_code'); let CustomTest = syzoj.model('custom_test'); let WaitingJudge = syzoj.model('waiting_judge'); let Contest = syzoj.model('contest'); let ProblemTag = syzoj.model('problem_tag'); let ProblemTagMap = syzoj.model('problem_tag_map'); let Article = syzoj.model('article'); const Sequelize = require('sequelize'); let Judger = syzoj.lib('judger'); let CodeFormatter = syzoj.lib('code_formatter'); app.get('/problems', async (req, res) => { try { const sort = req.query.sort || syzoj.config.sorting.problem.field; const order = req.query.order || syzoj.config.sorting.problem.order; if (!['id', 'title', 'rating', 'ac_num', 'submit_num', 'ac_rate', 'publicize_time'].includes(sort) || !['asc', 'desc'].includes(order)) { throw new ErrorMessage('错误的排序参数。'); } let sortVal = sort; if (sort === 'ac_rate') { sortVal = { raw: 'ac_num / submit_num' }; } let where = {}; if (!res.locals.user || !await res.locals.user.hasPrivilege('manage_problem')) { if (res.locals.user) { where = { $or: { is_public: 1, user_id: res.locals.user.id } }; } else { where = { is_public: 1 }; } } let paginate = syzoj.utils.paginate(await Problem.count(where), req.query.page, syzoj.config.page.problem); let problems = await Problem.query(paginate, where, [[sortVal, order]]); await problems.forEachAsync(async problem => { problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.judge_state = await problem.getJudgeState(res.locals.user, true); problem.tags = await problem.getTags(); }); res.render('problems', { allowedManageTag: res.locals.user && await res.locals.user.hasPrivilege('manage_problem_tag'), problems: problems, paginate: paginate, curSort: sort, curOrder: order === 'asc' }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problems/search', async (req, res) => { try { let id = parseInt(req.query.keyword) || 0; const sort = req.query.sort || syzoj.config.sorting.problem.field; const order = req.query.order || syzoj.config.sorting.problem.order; if (!['id', 'title', 'rating', 'ac_num', 'submit_num', 'ac_rate'].includes(sort) || !['asc', 'desc'].includes(order)) { throw new ErrorMessage('错误的排序参数。'); } let where = { $or: { title: { $like: `%${req.query.keyword}%` }, id: id } }; if (!res.locals.user || !await res.locals.user.hasPrivilege('manage_problem')) { if (res.locals.user) { where = { $and: [ where, { $or: { is_public: 1, user_id: res.locals.user.id } } ] }; } else { where = { $and: [ where, { is_public: 1 } ] }; } } let sortVal = sort; if (sort === 'ac_rate') { sortVal = { raw: 'ac_num / submit_num' }; } let paginate = syzoj.utils.paginate(await Problem.count(where), req.query.page, syzoj.config.page.problem); let problems = await Problem.query(paginate, where, [syzoj.db.literal('`id` = ' + id + ' DESC'), [sortVal, order]]); await problems.forEachAsync(async problem => { problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.judge_state = await problem.getJudgeState(res.locals.user, true); problem.tags = await problem.getTags(); }); res.render('problems', { allowedManageTag: res.locals.user && await res.locals.user.hasPrivilege('manage_problem_tag'), problems: problems, paginate: paginate, curSort: sort, curOrder: order === 'asc' }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problems/tag/:tagIDs', async (req, res) => { try { let tagIDs = Array.from(new Set(req.params.tagIDs.split(',').map(x => parseInt(x)))); let tags = await tagIDs.mapAsync(async tagID => ProblemTag.fromID(tagID)); const sort = req.query.sort || syzoj.config.sorting.problem.field; const order = req.query.order || syzoj.config.sorting.problem.order; if (!['id', 'title', 'rating', 'ac_num', 'submit_num', 'ac_rate'].includes(sort) || !['asc', 'desc'].includes(order)) { throw new ErrorMessage('错误的排序参数。'); } let sortVal; if (sort === 'ac_rate') { sortVal = '`problem`.`ac_num` / `problem`.`submit_num`'; } else { sortVal = '`problem`.`' + sort + '`'; } // Validate the tagIDs for (let tag of tags) { if (!tag) { return res.redirect(syzoj.utils.makeUrl(['problems'])); } } let sql = 'SELECT * FROM `problem` WHERE\n'; for (let tagID of tagIDs) { if (tagID !== tagIDs[0]) { sql += 'AND\n'; } sql += '`problem`.`id` IN (SELECT `problem_id` FROM `problem_tag_map` WHERE `tag_id` = ' + tagID + ')'; } if (!res.locals.user || !await res.locals.user.hasPrivilege('manage_problem')) { if (res.locals.user) { sql += 'AND (`problem`.`is_public` = 1 OR `problem`.`user_id` = ' + res.locals.user.id + ')'; } else { sql += 'AND (`problem`.`is_public` = 1)'; } } let paginate = syzoj.utils.paginate(await Problem.count(sql), req.query.page, syzoj.config.page.problem); let problems = await Problem.query(sql + ` ORDER BY ${sortVal} ${order} ` + paginate.toSQL()); await problems.forEachAsync(async problem => { problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.judge_state = await problem.getJudgeState(res.locals.user, true); problem.tags = await problem.getTags(); }); res.render('problems', { allowedManageTag: res.locals.user && await res.locals.user.hasPrivilege('manage_problem_tag'), problems: problems, tags: tags, paginate: paginate, curSort: sort, curOrder: order === 'asc' }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedUseBy(res.locals.user)) { throw new ErrorMessage('您没有权限进行此操作。'); } problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.allowedManage = await problem.isAllowedManageBy(res.locals.user); if (problem.is_public || problem.allowedEdit) { await syzoj.utils.markdown(problem, ['description', 'input_format', 'output_format', 'example', 'limit_and_hint']); } else { throw new ErrorMessage('您没有权限进行此操作。'); } let state = await problem.getJudgeState(res.locals.user, false); problem.tags = await problem.getTags(); await problem.loadRelationships(); let testcases = await syzoj.utils.parseTestdata(problem.getTestdataPath(), problem.type === 'submit-answer'); let discussionCount = await Article.count({ problem_id: id }); res.render('problem', { problem: problem, state: state, lastLanguage: res.locals.user ? await res.locals.user.getLastSubmitLanguage() : null, testcases: testcases, discussionCount: discussionCount }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id/export', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem || !problem.is_public) throw new ErrorMessage('无此题目。'); let obj = { title: problem.title, description: problem.description, input_format: problem.input_format, output_format: problem.output_format, example: problem.example, limit_and_hint: problem.limit_and_hint, time_limit: problem.time_limit, memory_limit: problem.memory_limit, have_additional_file: problem.additional_file_id !== null, file_io: problem.file_io, file_io_input_name: problem.file_io_input_name, file_io_output_name: problem.file_io_output_name, type: problem.type, tags: [] }; let tags = await problem.getTags(); obj.tags = tags.map(tag => tag.name); res.send({ success: true, obj: obj }); } catch (e) { syzoj.log(e); res.send({ success: false, error: e }); } }); app.get('/problem/:id/edit', async (req, res) => { try { let id = parseInt(req.params.id) || 0; let problem = await Problem.fromID(id); if (!problem) { if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': req.originalUrl }) }); problem = await Problem.create(); problem.id = id; problem.allowedEdit = true; problem.tags = []; problem.new = true; } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user); problem.tags = await problem.getTags(); } problem.allowedManage = await problem.isAllowedManageBy(res.locals.user); res.render('problem_edit', { problem: problem }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/edit', async (req, res) => { try { let id = parseInt(req.params.id) || 0; let problem = await Problem.fromID(id); if (!problem) { if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': req.originalUrl }) }); problem = await Problem.create(); if (await res.locals.user.hasPrivilege('manage_problem')) { let customID = parseInt(req.body.id); if (customID) { if (await Problem.fromID(customID)) throw new ErrorMessage('ID 已被使用。'); problem.id = customID; } else if (id) problem.id = id; } problem.user_id = res.locals.user.id; problem.publicizer_id = res.locals.user.id; } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (await res.locals.user.hasPrivilege('manage_problem')) { let customID = parseInt(req.body.id); if (customID && customID !== id) { if (await Problem.fromID(customID)) throw new ErrorMessage('ID 已被使用。'); await problem.changeID(customID); } } } if (!req.body.title.trim()) throw new ErrorMessage('题目名不能为空。'); problem.title = req.body.title; problem.description = req.body.description; problem.input_format = req.body.input_format; problem.output_format = req.body.output_format; problem.example = req.body.example; problem.limit_and_hint = req.body.limit_and_hint; problem.is_anonymous = (req.body.is_anonymous === 'on'); // Save the problem first, to have the `id` allocated await problem.save(); if (!req.body.tags) { req.body.tags = []; } else if (!Array.isArray(req.body.tags)) { req.body.tags = [req.body.tags]; } let newTagIDs = await req.body.tags.map(x => parseInt(x)).filterAsync(async x => ProblemTag.fromID(x)); await problem.setTags(newTagIDs); res.redirect(syzoj.utils.makeUrl(['problem', problem.id])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id/import', async (req, res) => { try { let id = parseInt(req.params.id) || 0; let problem = await Problem.fromID(id); if (!problem) { if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': req.originalUrl }) }); problem = await Problem.create(); problem.id = id; problem.new = true; problem.user_id = res.locals.user.id; problem.publicizer_id = res.locals.user.id; } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); } problem.allowedManage = await problem.isAllowedManageBy(res.locals.user); res.render('problem_import', { problem: problem }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/import', async (req, res) => { try { let id = parseInt(req.params.id) || 0; let problem = await Problem.fromID(id); if (!problem) { if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': req.originalUrl }) }); problem = await Problem.create(); if (await res.locals.user.hasPrivilege('manage_problem')) { let customID = parseInt(req.body.id); if (customID) { if (await Problem.fromID(customID)) throw new ErrorMessage('ID 已被使用。'); problem.id = customID; } else if (id) problem.id = id; } problem.user_id = res.locals.user.id; problem.publicizer_id = res.locals.user.id; } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); } let request = require('request-promise'); let url = require('url'); let json = await request({ uri: req.body.url + (req.body.url.endsWith('/') ? 'export' : '/export'), timeout: 1500, json: true }); if (!json.success) throw new ErrorMessage('题目加载失败。', null, json.error); if (!json.obj.title.trim()) throw new ErrorMessage('题目名不能为空。'); problem.title = json.obj.title; problem.description = json.obj.description; problem.input_format = json.obj.input_format; problem.output_format = json.obj.output_format; problem.example = json.obj.example; problem.limit_and_hint = json.obj.limit_and_hint; problem.time_limit = json.obj.time_limit; problem.memory_limit = json.obj.memory_limit; problem.file_io = json.obj.file_io; problem.file_io_input_name = json.obj.file_io_input_name; problem.file_io_output_name = json.obj.file_io_output_name; if (json.obj.type) problem.type = json.obj.type; let validateMsg = await problem.validate(); if (validateMsg) throw new ErrorMessage('无效的题目数据配置。', null, validateMsg); await problem.save(); let tagIDs = (await json.obj.tags.mapAsync(name => ProblemTag.findOne({ where: { name: name } }))).filter(x => x).map(tag => tag.id); await problem.setTags(tagIDs); let download = require('download'); let tmp = require('tmp-promise'); let tmpFile = await tmp.file(); let fs = require('bluebird').promisifyAll(require('fs')); try { let data = await download(req.body.url + (req.body.url.endsWith('/') ? 'testdata/download' : '/testdata/download')); await fs.writeFileAsync(tmpFile.path, data); await problem.updateTestdata(tmpFile.path, await res.locals.user.hasPrivilege('manage_problem')); if (json.obj.have_additional_file) { let additional_file = await download(req.body.url + (req.body.url.endsWith('/') ? 'download/additional_file' : '/download/additional_file')); await fs.writeFileAsync(tmpFile.path, additional_file); await problem.updateFile(tmpFile.path, 'additional_file', await res.locals.user.hasPrivilege('manage_problem')); } } catch (e) { syzoj.log(e); } res.redirect(syzoj.utils.makeUrl(['problem', problem.id])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); // The 'manage' is not `allow manage`'s 'manage', I just have no better name for it. app.get('/problem/:id/manage', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); await problem.loadRelationships(); let testcases = await syzoj.utils.parseTestdata(problem.getTestdataPath(), problem.type === 'submit-answer'); res.render('problem_manage', { problem: problem, testcases: testcases }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/manage', app.multer.fields([{ name: 'testdata', maxCount: 1 }, { name: 'additional_file', maxCount: 1 }]), async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); await problem.loadRelationships(); problem.time_limit = req.body.time_limit; problem.memory_limit = req.body.memory_limit; if (req.body.type === 'traditional') { problem.file_io = req.body.io_method === 'file-io'; problem.file_io_input_name = req.body.file_io_input_name; problem.file_io_output_name = req.body.file_io_output_name; } if (problem.type === 'submit-answer' && req.body.type !== 'submit-answer' || problem.type !== 'submit-answer' && req.body.type === 'submit-answer') { if (await JudgeState.count({ problem_id: id }) !== 0) { throw new ErrorMessage('已有提交的题目不允许在提交答案和非提交答案之间更改。'); } } problem.type = req.body.type; let validateMsg = await problem.validate(); if (validateMsg) throw new ErrorMessage('无效的题目数据配置。', null, validateMsg); if (req.files['testdata']) { await problem.updateTestdata(req.files['testdata'][0].path, await res.locals.user.hasPrivilege('manage_problem')); } if (req.files['additional_file']) { await problem.updateFile(req.files['additional_file'][0].path, 'additional_file', await res.locals.user.hasPrivilege('manage_problem')); } await problem.save(); res.redirect(syzoj.utils.makeUrl(['problem', id, 'manage'])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); // Set problem public async function setPublic(req, res, is_public) { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); let allowedManage = await problem.isAllowedManageBy(res.locals.user); if (!allowedManage) throw new ErrorMessage('您没有权限进行此操作。'); problem.is_public = is_public; problem.publicizer_id = res.locals.user.id; problem.publicize_time = new Date(); await problem.save(); JudgeState.model.update( { is_public: is_public }, { where: { problem_id: id } } ); res.redirect(syzoj.utils.makeUrl(['problem', id])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } } app.post('/problem/:id/public', async (req, res) => { await setPublic(req, res, true); }); app.post('/problem/:id/dis_public', async (req, res) => { await setPublic(req, res, false); }); app.post('/problem/:id/submit', app.multer.fields([{ name: 'answer', maxCount: 1 }]), async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); const curUser = res.locals.user; if (!problem) throw new ErrorMessage('无此题目。'); if (problem.type !== 'submit-answer' && !syzoj.config.enabled_languages.includes(req.body.language)) throw new ErrorMessage('不支持该语言。'); if (!curUser) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': syzoj.utils.makeUrl(['problem', id]) }) }); let judge_state; if (problem.type === 'submit-answer') { let File = syzoj.model('file'), path; if (!req.files['answer']) { // Submited by editor try { path = await File.zipFiles(JSON.parse(req.body.answer_by_editor)); } catch (e) { throw new ErrorMessage('无法解析提交数据。'); } } else { if (req.files['answer'][0].size > syzoj.config.limit.submit_answer) throw new ErrorMessage('答案文件太大。'); path = req.files['answer'][0].path; } let file = await File.upload(path, 'answer'); let size = await file.getUnzipSize(); if (size > syzoj.config.limit.submit_answer) throw new ErrorMessage('答案文件太大。'); if (!file.md5) throw new ErrorMessage('上传答案文件失败。'); judge_state = await JudgeState.create({ code: file.md5, code_length: size, language: null, user_id: curUser.id, problem_id: req.params.id, is_public: problem.is_public }); } else { let code; if (req.files['answer']) { if (req.files['answer'][0].size > syzoj.config.limit.submit_code) throw new ErrorMessage('代码文件太大。'); let fs = Promise.promisifyAll(require('fs')); code = (await fs.readFileAsync(req.files['answer'][0].path)).toString(); } else { if (req.body.code.length > syzoj.config.limit.submit_code) throw new ErrorMessage('代码太长。'); code = req.body.code; } judge_state = await JudgeState.create({ code: code, code_length: code.length, language: req.body.language, user_id: curUser.id, problem_id: req.params.id, is_public: problem.is_public }); } let contest_id = parseInt(req.query.contest_id); let contest; if (contest_id) { contest = await Contest.fromID(contest_id); if (!contest) throw new ErrorMessage('无此比赛。'); if ((!contest.isRunning()) && (!await contest.isSupervisior(curUser))) throw new ErrorMessage('比赛未开始或已结束。'); let problems_id = await contest.getProblems(); if (!problems_id.includes(id)) throw new ErrorMessage('无此题目。'); judge_state.type = 1; judge_state.type_info = contest_id; await judge_state.save(); } else { if (!await problem.isAllowedUseBy(curUser)) throw new ErrorMessage('您没有权限进行此操作。'); judge_state.type = 0; await judge_state.save(); } await judge_state.updateRelatedInfo(true); if (problem.type !== 'submit-answer' && syzoj.languages[req.body.language].format) { let key = syzoj.utils.getFormattedCodeKey(judge_state.code, req.body.language); let formattedCode = await FormattedCode.findOne({ where: { key: key } }); if (!formattedCode) { let formatted = await CodeFormatter(judge_state.code, syzoj.languages[req.body.language].format); if (formatted) { formattedCode = await FormattedCode.create({ key: key, code: formatted }); try { await formattedCode.save(); } catch (e) {} } } } try { await Judger.judge(judge_state, problem, contest_id ? 3 : 2); judge_state.pending = true; judge_state.status = 'Waiting'; await judge_state.save(); } catch (err) { throw new ErrorMessage(`无法开始评测:${err.toString()}`); } if (contest && (!await contest.isSupervisior(curUser))) { res.redirect(syzoj.utils.makeUrl(['contest', contest_id, 'submissions'])); } else { res.redirect(syzoj.utils.makeUrl(['submission', judge_state.id])); } } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/delete', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!problem.isAllowedManageBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); await problem.delete(); res.redirect(syzoj.utils.makeUrl(['problem'])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id/testdata', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); let testdata = await problem.listTestdata(); let testcases = await syzoj.utils.parseTestdata(problem.getTestdataPath(), problem.type === 'submit-answer'); problem.allowedEdit = await problem.isAllowedEditBy(res.locals.user) res.render('problem_data', { problem: problem, testdata: testdata, testcases: testcases }); } catch (e) { syzoj.log(e); res.status(404); res.render('error', { err: e }); } }); app.post('/problem/:id/testdata/upload', app.multer.array('file'), async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (req.files) { for (let file of req.files) { await problem.uploadTestdataSingleFile(file.originalname, file.path, file.size, await res.locals.user.hasPrivilege('manage_problem')); } } res.redirect(syzoj.utils.makeUrl(['problem', id, 'testdata'])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.post('/problem/:id/testdata/delete/:filename', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedEditBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); await problem.deleteTestdataSingleFile(req.params.filename); res.redirect(syzoj.utils.makeUrl(['problem', id, 'testdata'])); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); app.get('/problem/:id/testdata/download/:filename?', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); if (!req.params.filename) { if (!await syzoj.utils.isFile(problem.getTestdataArchivePath())) { await problem.makeTestdataZip(); } } let path = require('path'); let filename = req.params.filename ? path.join(problem.getTestdataPath(), req.params.filename) : (problem.getTestdataArchivePath()); if (!await syzoj.utils.isFile(filename)) throw new ErrorMessage('文件不存在。'); res.download(filename, path.basename(filename)); } catch (e) { syzoj.log(e); res.status(404); res.render('error', { err: e }); } }); app.get('/problem/:id/download/additional_file', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); // XXX: Reduce duplication (see the '/problem/:id/submit' handler) let contest_id = parseInt(req.query.contest_id); if (contest_id) { let contest = await Contest.fromID(contest_id); if (!contest) throw new ErrorMessage('无此比赛。'); if (!contest.isRunning()) throw new ErrorMessage('比赛未开始或已结束。'); let problems_id = await contest.getProblems(); if (!problems_id.includes(id)) throw new ErrorMessage('无此题目。'); } else { if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); } await problem.loadRelationships(); if (!problem.additional_file) throw new ErrorMessage('无附加文件。'); res.download(problem.additional_file.getPath(), `additional_file_${id}.zip`); } catch (e) { syzoj.log(e); res.status(404); res.render('error', { err: e }); } }); app.get('/problem/:id/statistics/:type', async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); let count = await problem.countStatistics(req.params.type); if (count === null) throw new ErrorMessage('无此统计类型。'); let paginate = syzoj.utils.paginate(count, req.query.page, syzoj.config.page.problem_statistics); let statistics = await problem.getStatistics(req.params.type, paginate); await statistics.judge_state.forEachAsync(async x => x.loadRelationships()); res.render('statistics', { statistics: statistics, paginate: paginate, problem: problem }); } catch (e) { syzoj.log(e); res.render('error', { err: e }); } }); /* app.post('/problem/:id/custom-test', app.multer.fields([{ name: 'code_upload', maxCount: 1 }, { name: 'input_file', maxCount: 1 }]), async (req, res) => { try { let id = parseInt(req.params.id); let problem = await Problem.fromID(id); if (!problem) throw new ErrorMessage('无此题目。'); if (!res.locals.user) throw new ErrorMessage('请登录后继续。', { '登录': syzoj.utils.makeUrl(['login'], { 'url': syzoj.utils.makeUrl(['problem', id]) }) }); if (!await problem.isAllowedUseBy(res.locals.user)) throw new ErrorMessage('您没有权限进行此操作。'); let filepath; if (req.files['input_file']) { if (req.files['input_file'][0].size > syzoj.config.limit.custom_test_input) throw new ErrorMessage('输入数据过长。'); filepath = req.files['input_file'][0].path; } else { if (req.body.input_file_textarea.length > syzoj.config.limit.custom_test_input) throw new ErrorMessage('输入数据过长。'); filepath = await require('tmp-promise').tmpName({ template: '/tmp/tmp-XXXXXX' }); await require('fs-extra').writeFileAsync(filepath, req.body.input_file_textarea); } let code; if (req.files['code_upload']) { if (req.files['code_upload'][0].size > syzoj.config.limit.submit_code) throw new ErrorMessage('代码过长。'); code = (await require('fs-extra').readFileAsync(req.files['code_upload'][0].path)).toString(); } else { if (req.body.code.length > syzoj.config.limit.submit_code) throw new ErrorMessage('代码过长。'); code = req.body.code; } let custom_test = await CustomTest.create({ input_filepath: filepath, code: code, language: req.body.language, user_id: res.locals.user.id, problem_id: id }); await custom_test.save(); let waiting_judge = await WaitingJudge.create({ judge_id: custom_test.id, priority: 3, type: 'custom_test' }); await waiting_judge.save(); res.send({ id: custom_test.id }); } catch (e) { syzoj.log(e); res.send({ err: e }); } }); */
Fix have_additional_file always true
modules/problem.js
Fix have_additional_file always true
<ide><path>odules/problem.js <ide> limit_and_hint: problem.limit_and_hint, <ide> time_limit: problem.time_limit, <ide> memory_limit: problem.memory_limit, <del> have_additional_file: problem.additional_file_id !== null, <add> have_additional_file: problem.additional_file_id != null, <ide> file_io: problem.file_io, <ide> file_io_input_name: problem.file_io_input_name, <ide> file_io_output_name: problem.file_io_output_name,
Java
apache-2.0
12db8730028519d141f41fb88ae2d618458a6737
0
spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework,spring-projects/spring-framework
/* * Copyright 2002-2013 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.context.request; /** * Abstraction for accessing attribute objects associated with a request. * Supports access to request-scoped attributes as well as to session-scoped * attributes, with the optional notion of a "global session". * * <p>Can be implemented for any kind of request/session mechanism, * in particular for servlet requests and portlet requests. * * @author Juergen Hoeller * @since 2.0 * @see ServletRequestAttributes * @see org.springframework.web.portlet.context.PortletRequestAttributes */ public interface RequestAttributes { /** * Constant that indicates request scope. */ int SCOPE_REQUEST = 0; /** * Constant that indicates session scope. * <p>This preferably refers to a locally isolated session, if such * a distinction is available (for example, in a Portlet environment). * Else, it simply refers to the common session. */ int SCOPE_SESSION = 1; /** * Constant that indicates global session scope. * <p>This explicitly refers to a globally shared session, if such * a distinction is available (for example, in a Portlet environment). * Else, it simply refers to the common session. */ int SCOPE_GLOBAL_SESSION = 2; /** * Name of the standard reference to the request object: "request". * @see #resolveReference */ String REFERENCE_REQUEST = "request"; /** * Name of the standard reference to the session object: "session". * @see #resolveReference */ String REFERENCE_SESSION = "session"; /** * Return the value for the scoped attribute of the given name, if any. * @param name the name of the attribute * @param scope the scope identifier * @return the current attribute value, or {@code null} if not found */ Object getAttribute(String name, int scope); /** * Set the value for the scoped attribute of the given name, * replacing an existing value (if any). * @param name the name of the attribute * @param scope the scope identifier * @param value the value for the attribute */ void setAttribute(String name, Object value, int scope); /** * Remove the scoped attribute of the given name, if it exists. * <p>Note that an implementation should also remove a registered destruction * callback for the specified attribute, if any. It does, however, <i>not</i> * need to <i>execute</i> a registered destruction callback in this case, * since the object will be destroyed by the caller (if appropriate). * @param name the name of the attribute * @param scope the scope identifier */ void removeAttribute(String name, int scope); /** * Retrieve the names of all attributes in the scope. * @param scope the scope identifier * @return the attribute names as String array */ String[] getAttributeNames(int scope); /** * Register a callback to be executed on destruction of the * specified attribute in the given scope. * <p>Implementations should do their best to execute the callback * at the appropriate time: that is, at request completion or session * termination, respectively. If such a callback is not supported by the * underlying runtime environment, the callback <i>must be ignored</i> * and a corresponding warning should be logged. * <p>Note that 'destruction' usually corresponds to destruction of the * entire scope, not to the individual attribute having been explicitly * removed by the application. If an attribute gets removed via this * facade's {@link #removeAttribute(String, int)} method, any registered * destruction callback should be disabled as well, assuming that the * removed object will be reused or manually destroyed. * <p><b>NOTE:</b> Callback objects should generally be serializable if * they are being registered for a session scope. Otherwise the callback * (or even the entire session) might not survive web app restarts. * @param name the name of the attribute to register the callback for * @param callback the destruction callback to be executed * @param scope the scope identifier */ void registerDestructionCallback(String name, Runnable callback, int scope); /** * Resolve the contextual reference for the given key, if any. * <p>At a minimum: the HttpServletRequest/PortletRequest reference for key * "request", and the HttpSession/PortletSession reference for key "session". * @param key the contextual key * @return the corresponding object, or {@code null} if none found */ Object resolveReference(String key); /** * Return an id for the current underlying session. * @return the session id as String (never {@code null}) */ String getSessionId(); /** * Expose the best available mutex for the underlying session: * that is, an object to synchronize on for the underlying session. * @return the session mutex to use (never {@code null}) */ Object getSessionMutex(); }
spring-web/src/main/java/org/springframework/web/context/request/RequestAttributes.java
/* * Copyright 2002-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.context.request; /** * Abstraction for accessing attribute objects associated with a request. * Supports access to request-scoped attributes as well as to session-scoped * attributes, with the optional notion of a "global session". * * <p>Can be implemented for any kind of request/session mechanism, * in particular for servlet requests and portlet requests. * * @author Juergen Hoeller * @since 2.0 * @see ServletRequestAttributes * @see org.springframework.web.portlet.context.PortletRequestAttributes */ public interface RequestAttributes { /** * Constant that indicates request scope. */ int SCOPE_REQUEST = 0; /** * Constant that indicates session scope. * <p>This preferably refers to a locally isolated session, if such * a distinction is available (for example, in a Portlet environment). * Else, it simply refers to the common session. */ int SCOPE_SESSION = 1; /** * Constant that indicates global session scope. * <p>This explicitly refers to a globally shared session, if such * a distinction is available (for example, in a Portlet environment). * Else, it simply refers to the common session. */ int SCOPE_GLOBAL_SESSION = 2; /** * Name of the standard reference to the request object: "request". * @see #resolveReference */ String REFERENCE_REQUEST = "request"; /** * Name of the standard reference to the session object: "session". * @see #resolveReference */ String REFERENCE_SESSION = "session"; /** * Return the value for the scoped attribute of the given name, if any. * @param name the name of the attribute * @param scope the scope identifier * @return the current attribute value, or {@code null} if not found */ Object getAttribute(String name, int scope); /** * Set the value for the scoped attribute of the given name, * replacing an existing value (if any). * @param name the name of the attribute * @param scope the scope identifier * @param value the value for the attribute */ void setAttribute(String name, Object value, int scope); /** * Remove the scoped attribute of the given name, if it exists. * <p>Note that an implementation should also remove a registered destruction * callback for the specified attribute, if any. It does, however, <i>not</i> * need to <i>execute</i> a registered destruction callback in this case, * since the object will be destroyed by the caller (if appropriate). * @param name the name of the attribute * @param scope the scope identifier */ void removeAttribute(String name, int scope); /** * Retrieve the names of all attributes in the scope. * @param scope the scope identifier * @return the attribute names as String array */ String[] getAttributeNames(int scope); /** * Register a callback to be executed on destruction of the * specified attribute in the given scope. * <p>Implementations should do their best to execute the callback * at the appropriate time: that is, at request completion or session * termination, respectively. If such a callback is not supported by the * underlying runtime environment, the callback <i>must be ignored</i> * and a corresponding warning should be logged. * <p>Note that 'destruction' usually corresponds to destruction of the * entire scope, not to the individual attribute having been explicitly * removed by the application. If an attribute gets removed via this * facade's {@link #removeAttribute(String, int)} method, any registered * destruction callback should be disabled as well, assuming that the * removed object will be reused or manually destroyed. * <p><b>NOTE:</b> Callback objects should generally be serializable if * they are being registered for a session scope. Otherwise the callback * (or even the entire session) might not survive web app restarts. * @param name the name of the attribute to register the callback for * @param callback the destruction callback to be executed * @param scope the scope identifier */ void registerDestructionCallback(String name, Runnable callback, int scope); /** * Resolve the contextual reference for the given key, if any. * <p>At a minimum: the HttpServletRequest/PortletRequest reference for key * "request", and the HttpSession/PortletSession reference for key "session". * @param key the contextual key * @return the corresponding object, or {@code null} if none found */ Object resolveReference(String key); /** * Return an id for the current underlying session. * @return the session id as String (never {@code null} */ String getSessionId(); /** * Expose the best available mutex for the underlying session: * that is, an object to synchronize on for the underlying session. * @return the session mutex to use (never {@code null} */ Object getSessionMutex(); }
Polish Javadoc in RequestAttributes
spring-web/src/main/java/org/springframework/web/context/request/RequestAttributes.java
Polish Javadoc in RequestAttributes
<ide><path>pring-web/src/main/java/org/springframework/web/context/request/RequestAttributes.java <ide> /* <del> * Copyright 2002-2012 the original author or authors. <add> * Copyright 2002-2013 the original author or authors. <ide> * <ide> * Licensed under the Apache License, Version 2.0 (the "License"); <ide> * you may not use this file except in compliance with the License. <ide> <ide> /** <ide> * Return an id for the current underlying session. <del> * @return the session id as String (never {@code null} <add> * @return the session id as String (never {@code null}) <ide> */ <ide> String getSessionId(); <ide> <ide> /** <ide> * Expose the best available mutex for the underlying session: <ide> * that is, an object to synchronize on for the underlying session. <del> * @return the session mutex to use (never {@code null} <add> * @return the session mutex to use (never {@code null}) <ide> */ <ide> Object getSessionMutex(); <ide>
Java
agpl-3.0
1b3dff8cf4d55149dcae3081ca0e9a3626179480
0
o2oa/o2oa,o2oa/o2oa,o2oa/o2oa,o2oa/o2oa,o2oa/o2oa
package com.x.base.core.project; import com.x.base.core.project.annotation.Module; import com.x.base.core.project.annotation.ModuleCategory; import com.x.base.core.project.annotation.ModuleType; @Module(type = ModuleType.ASSEMBLE, category = ModuleCategory.OFFICIAL, name = "工作任务管理", packageName = "com.x.teamwork.assemble.control", containerEntities = { "com.x.teamwork.core.entity.Project", "com.x.teamwork.core.entity.ProjectDetail", "com.x.teamwork.core.entity.CustomExtFieldRele", "com.x.teamwork.core.entity.ProjectGroup", "com.x.teamwork.core.entity.ProjectTemplate","com.x.teamwork.core.entity.TaskListTemplate", "com.x.teamwork.core.entity.ProjectGroupRele", "com.x.teamwork.core.entity.Task", "com.x.teamwork.core.entity.TaskGroup", "com.x.teamwork.core.entity.TaskGroupRele", "com.x.teamwork.core.entity.TaskDetail", "com.x.teamwork.core.entity.TaskExtField", "com.x.teamwork.core.entity.TaskList", "com.x.teamwork.core.entity.TaskListRele", "com.x.teamwork.core.entity.TaskRelevance", "com.x.teamwork.core.entity.TaskView", "com.x.teamwork.core.entity.SystemConfig", "com.x.teamwork.core.entity.SystemConfigLobValue", "com.x.teamwork.core.entity.Review", "com.x.teamwork.core.entity.BatchOperation", "com.x.teamwork.core.entity.TaskTag", "com.x.teamwork.core.entity.TaskTagRele", "com.x.teamwork.core.entity.Attachment","com.x.teamwork.core.entity.ProjectConfig", "com.x.teamwork.core.entity.Chat", "com.x.teamwork.core.entity.ChatContent","com.x.teamwork.core.entity.Priority", "com.x.teamwork.core.entity.Dynamic", "com.x.teamwork.core.entity.DynamicDetail" }, storeJars = { "x_organization_core_entity", "x_organization_core_express", "x_teamwork_core_entity" }) public class x_teamwork_assemble_control extends Deployable { }
o2server/x_base_core_project/src/main/java/com/x/base/core/project/x_teamwork_assemble_control.java
package com.x.base.core.project; import com.x.base.core.project.annotation.Module; import com.x.base.core.project.annotation.ModuleCategory; import com.x.base.core.project.annotation.ModuleType; @Module(type = ModuleType.ASSEMBLE, category = ModuleCategory.OFFICIAL, name = "工作任务管理", packageName = "com.x.teamwork.assemble.control", containerEntities = { "com.x.teamwork.core.entity.Project", "com.x.teamwork.core.entity.ProjectDetail", "com.x.teamwork.core.entity.ProjectExtFieldRele", "com.x.teamwork.core.entity.ProjectGroup", "com.x.teamwork.core.entity.ProjectTemplate","com.x.teamwork.core.entity.TaskListTemplate", "com.x.teamwork.core.entity.ProjectGroupRele", "com.x.teamwork.core.entity.Task", "com.x.teamwork.core.entity.TaskGroup", "com.x.teamwork.core.entity.TaskGroupRele", "com.x.teamwork.core.entity.TaskDetail", "com.x.teamwork.core.entity.TaskExtField", "com.x.teamwork.core.entity.TaskList", "com.x.teamwork.core.entity.TaskListRele", "com.x.teamwork.core.entity.TaskRelevance", "com.x.teamwork.core.entity.TaskView", "com.x.teamwork.core.entity.SystemConfig", "com.x.teamwork.core.entity.SystemConfigLobValue", "com.x.teamwork.core.entity.Review", "com.x.teamwork.core.entity.BatchOperation", "com.x.teamwork.core.entity.TaskTag", "com.x.teamwork.core.entity.TaskTagRele", "com.x.teamwork.core.entity.Attachment","com.x.teamwork.core.entity.ProjectConfig", "com.x.teamwork.core.entity.Chat", "com.x.teamwork.core.entity.ChatContent","com.x.teamwork.core.entity.Priority", "com.x.teamwork.core.entity.Dynamic", "com.x.teamwork.core.entity.DynamicDetail" }, storeJars = { "x_organization_core_entity", "x_organization_core_express", "x_teamwork_core_entity" }) public class x_teamwork_assemble_control extends Deployable { }
teamwork添加自定义字段服务
o2server/x_base_core_project/src/main/java/com/x/base/core/project/x_teamwork_assemble_control.java
teamwork添加自定义字段服务
<ide><path>2server/x_base_core_project/src/main/java/com/x/base/core/project/x_teamwork_assemble_control.java <ide> @Module(type = ModuleType.ASSEMBLE, category = ModuleCategory.OFFICIAL, name = "工作任务管理", packageName = "com.x.teamwork.assemble.control", <ide> containerEntities = { <ide> "com.x.teamwork.core.entity.Project", "com.x.teamwork.core.entity.ProjectDetail", <del> "com.x.teamwork.core.entity.ProjectExtFieldRele", "com.x.teamwork.core.entity.ProjectGroup", <add> "com.x.teamwork.core.entity.CustomExtFieldRele", "com.x.teamwork.core.entity.ProjectGroup", <ide> "com.x.teamwork.core.entity.ProjectTemplate","com.x.teamwork.core.entity.TaskListTemplate", <ide> "com.x.teamwork.core.entity.ProjectGroupRele", "com.x.teamwork.core.entity.Task", <ide> "com.x.teamwork.core.entity.TaskGroup", "com.x.teamwork.core.entity.TaskGroupRele",
Java
apache-2.0
cd3a0b0f17f142cb09a829594162845788e0b18e
0
okankurtulus/droidparts,yanchenko/droidparts,b-cuts/droidparts,vovan888/droidparts,droidparts/droidparts
/** * Copyright 2012 Alex Yanchenko * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.droidparts.net.http; import org.apache.http.HttpStatus; public class HTTPException extends Exception { private static final long serialVersionUID = 1L; private int respCode = -1; public HTTPException(Throwable cause) { super(cause); } public HTTPException(int respCode, String respBody) { super(respBody); this.respCode = respCode; } /** * @see HttpStatus */ public int getResponseCode() { return respCode; } @Override public String toString() { if (respCode != -1) { StringBuilder sb = new StringBuilder(); sb.append("Response code: "); sb.append(respCode); sb.append(", body: "); sb.append(getMessage()); return sb.toString(); } else { return super.toString(); } } }
extra/src/org/droidparts/net/http/HTTPException.java
/** * Copyright 2012 Alex Yanchenko * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.droidparts.net.http; import org.apache.http.HttpStatus; public class HTTPException extends Exception { private static final long serialVersionUID = 1L; private int respCode = -1; public HTTPException(Throwable cause) { super(cause); } public HTTPException(int respCode, String respMsg) { super(respMsg); this.respCode = respCode; } /** * @see HttpStatus */ public int getResponseCode() { return respCode; } @Override public String getMessage() { StringBuilder sb = new StringBuilder(); if (respCode != -1) { sb.append("Response code: "); sb.append(respCode); sb.append(", "); } sb.append("Message: "); sb.append(super.getMessage()); return sb.toString(); } }
Simplified HTTPException 2.
extra/src/org/droidparts/net/http/HTTPException.java
Simplified HTTPException 2.
<ide><path>xtra/src/org/droidparts/net/http/HTTPException.java <ide> super(cause); <ide> } <ide> <del> public HTTPException(int respCode, String respMsg) { <del> super(respMsg); <add> public HTTPException(int respCode, String respBody) { <add> super(respBody); <ide> this.respCode = respCode; <ide> } <ide> <ide> } <ide> <ide> @Override <del> public String getMessage() { <del> StringBuilder sb = new StringBuilder(); <add> public String toString() { <ide> if (respCode != -1) { <add> StringBuilder sb = new StringBuilder(); <ide> sb.append("Response code: "); <ide> sb.append(respCode); <del> sb.append(", "); <add> sb.append(", body: "); <add> sb.append(getMessage()); <add> return sb.toString(); <add> } else { <add> return super.toString(); <ide> } <del> sb.append("Message: "); <del> sb.append(super.getMessage()); <del> return sb.toString(); <ide> } <ide> <ide> }
Java
mit
e0c7c2ef7503aa868503216698d1d251946667d3
0
uq-eresearch/aorra,uq-eresearch/aorra,uq-eresearch/aorra,uq-eresearch/aorra
package charts.builder; import java.lang.reflect.Modifier; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import org.reflections.Reflections; import play.Logger; import charts.Chart; import charts.ChartType; import charts.Region; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.Singleton; @Singleton public class DefaultChartBuilder implements ChartBuilder { private final List<ChartTypeBuilder> builders = detectBuilders(); private final DataSourceFactory dataSourceFactory; @Inject public DefaultChartBuilder(DataSourceFactory dsf) { this.dataSourceFactory = dsf; } @Override public List<Chart> getCharts(String id, ChartType type, List<Region> regions, Map<String, String> parameters) throws Exception { DataSource datasource = dataSourceFactory.getDataSource(id); return getCharts(datasource, type, regions, parameters); } private List<Chart> getCharts(DataSource datasource, ChartType type, List<Region> regions, Map<String, String> parameters) { final List<Chart> result = Lists.newLinkedList(); for (final ChartTypeBuilder builder : builders) { try { List<Chart> charts = builder.build(datasource, type, regions, parameters); if(!charts.isEmpty()) { result.addAll(charts); } } catch(Exception e) { Logger.warn(String.format("caught exception while building charts (type %s," + " regions %s, parameters %s)", type, regions, parameters), e); } } // make sure charts are sorted by region // https://github.com/uq-eresearch/aorra/issues/44 Collections.sort(result, new Comparator<Chart>() { @Override public int compare(Chart c1, Chart c2) { return getRegion(c1).compareTo(getRegion(c2)); } private Region getRegion(Chart c) { return c.getDescription().getRegion(); } }); return result; } private static List<ChartTypeBuilder> detectBuilders() { final ImmutableList.Builder<ChartTypeBuilder> b = new ImmutableList.Builder<ChartTypeBuilder>(); for (Class<? extends ChartTypeBuilder> builderClass : new Reflections( "charts.builder").getSubTypesOf(ChartTypeBuilder.class)) { if (builderClass.isInterface()) continue; if (Modifier.isAbstract(builderClass.getModifiers())) continue; try { Logger.debug("Found chart builder: "+builderClass.getCanonicalName()); b.add(builderClass.newInstance()); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } return b.build(); } }
app/charts/builder/DefaultChartBuilder.java
package charts.builder; import java.lang.reflect.Modifier; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import org.reflections.Reflections; import play.Logger; import charts.Chart; import charts.ChartType; import charts.Region; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.inject.Inject; import com.google.inject.Singleton; @Singleton public class DefaultChartBuilder implements ChartBuilder { private final List<ChartTypeBuilder> builders = detectBuilders(); private final DataSourceFactory dataSourceFactory; @Inject public DefaultChartBuilder(DataSourceFactory dsf) { this.dataSourceFactory = dsf; } @Override public List<Chart> getCharts(String id, ChartType type, List<Region> regions, Map<String, String> parameters) throws Exception { DataSource datasource = dataSourceFactory.getDataSource(id); return getCharts(datasource, type, regions, parameters); } private List<Chart> getCharts(DataSource datasource, ChartType type, List<Region> regions, Map<String, String> parameters) { final List<Chart> result = Lists.newLinkedList(); for (final ChartTypeBuilder builder : builders) { try { List<Chart> charts = builder.build(datasource, type, regions, parameters); if(!charts.isEmpty()) { result.addAll(charts); } } catch(Exception e) { e.printStackTrace(); } } // make sure charts are sorted by region // https://github.com/uq-eresearch/aorra/issues/44 Collections.sort(result, new Comparator<Chart>() { @Override public int compare(Chart c1, Chart c2) { return getRegion(c1).compareTo(getRegion(c2)); } private Region getRegion(Chart c) { return c.getDescription().getRegion(); } }); return result; } private static List<ChartTypeBuilder> detectBuilders() { final ImmutableList.Builder<ChartTypeBuilder> b = new ImmutableList.Builder<ChartTypeBuilder>(); for (Class<? extends ChartTypeBuilder> builderClass : new Reflections( "charts.builder").getSubTypesOf(ChartTypeBuilder.class)) { if (builderClass.isInterface()) continue; if (Modifier.isAbstract(builderClass.getModifiers())) continue; try { Logger.debug("Found chart builder: "+builderClass.getCanonicalName()); b.add(builderClass.newInstance()); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } } return b.build(); } }
log exception through logger rather than to stdout
app/charts/builder/DefaultChartBuilder.java
log exception through logger rather than to stdout
<ide><path>pp/charts/builder/DefaultChartBuilder.java <ide> result.addAll(charts); <ide> } <ide> } catch(Exception e) { <del> e.printStackTrace(); <add> Logger.warn(String.format("caught exception while building charts (type %s," + <add> " regions %s, parameters %s)", type, regions, parameters), e); <ide> } <ide> } <ide> // make sure charts are sorted by region <ide> } <ide> return b.build(); <ide> } <del> <del> <ide> }
JavaScript
mit
5ea68b6a7f705d51e98585e9f135d2ea39f0ece9
0
ampatspell/ember-cli-sofa,ampatspell/ember-cli-sofa
/* global emit */ import Ember from 'ember'; import { configurations, registerModels, registerQueries, registerRelationships, cleanup, next, wait } from '../helpers/setup'; import { Relationship, Query, Model, prefix, hasMany } from 'sofa'; const { computed, RSVP: { all } } = Ember; const ddoc = { views: { 'all': { map(doc) { if(doc.type !== 'duck') { return; } emit(doc._id, null); } } } }; configurations(({ module, test, createStore }) => { let store; let db; let AllDucks = Query.extend({ find: computed(function() { return { ddoc: 'ducks', view: 'all' }; }) }); let AllDucksRelationship = Relationship.extend({ query: 'all-ducks' }); let Duck = Model.extend({ id: prefix(), }); let Root = Model.extend({ ducks: hasMany('duck', { inverse: null, relationship: 'all-ducks' }) }); function flush() { store = createStore(); db = store.get('db.main'); db.set('modelNames', [ 'root', 'duck' ]); } module('has-many-collection', () => { registerModels({ Duck, Root }); registerQueries({ AllDucks }); registerRelationships({ AllDucks: AllDucksRelationship }); flush(); return cleanup(store, [ 'main' ]).then(() => { return db.get('documents.design').save('ducks', ddoc); }); }); test('models are initially matched', assert => { [ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id })); let root = db.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow', 'red', 'green' ]); return root.get('ducks.promise'); }); test('new models are added to collection', assert => { db.model('duck', { id: 'yellow' }); let root = db.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow' ]); db.model('duck', { id: 'red' }); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow', 'red' ]); db.model('duck', { id: 'green' }); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow', 'red', 'green' ]); return root.get('ducks.promise'); }); test('destroyed isNew model is removed from coll', assert => { [ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id })); let root = db.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow', 'red', 'green' ]); root.get('ducks').objectAt(0).destroy(); return next().then(() => { assert.deepEqual(root.get('ducks').mapBy('id'), [ 'red', 'green' ]); return root.get('ducks.promise'); }); }); test('assign database after model creation', assert => { db.model('duck', { id: 'yellow' }); let root = store.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), []); root.set('database', db); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow' ]); }); test('autoload', assert => { let root; return all([ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id }).save())).then(() => { flush(); root = db.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), []); return wait(null, 100); }).then(() => { assert.deepEqual(root.get('ducks').mapBy('id'), [ 'green', 'red', 'yellow' ]); }); }); test('load', assert => { let root; return all([ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id }).save())).then(() => { flush(); root = db.model('root'); return root.get('ducks.promise'); }).then(() => { assert.deepEqual(root.get('ducks').mapBy('id'), [ 'green', 'red', 'yellow' ]); }); }); test('destroy', assert => { let root; let relation; return all([ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id }).save())).then(() => { flush(); root = db.model('root'); return root.get('ducks.promise'); }).then(() => { assert.deepEqual(root.get('ducks').mapBy('id'), [ 'green', 'red', 'yellow' ]); relation = root.get('ducks._relation'); assert.ok(relation); root.get('ducks').destroy(); return next(); }).then(() => { assert.ok(!relation.value); assert.ok(!root.get('ducks').isDestroying); assert.ok(relation.value); }); }); });
tests/unit/has-many-collection-test.js
/* global emit */ import Ember from 'ember'; import { configurations, registerModels, registerQueries, registerRelationships, cleanup, next, wait } from '../helpers/setup'; import { Relationship, Query, Model, prefix, hasMany } from 'sofa'; const { computed, RSVP: { all } } = Ember; const ddoc = { views: { 'all': { map(doc) { if(doc.type !== 'duck') { return; } emit(doc._id, null); } } } }; configurations(({ module, test, createStore }) => { let store; let db; let AllDucks = Query.extend({ find: computed(function() { return { ddoc: 'ducks', view: 'all' }; }) }); let AllDucksRelationship = Relationship.extend({ query: 'all-ducks' }); let Duck = Model.extend({ id: prefix(), }); let Root = Model.extend({ ducks: hasMany('duck', { inverse: null, relationship: 'all-ducks' }) }); function flush() { store = createStore(); db = store.get('db.main'); db.set('modelNames', [ 'root', 'duck' ]); } module('has-many-collection', () => { registerModels({ Duck, Root }); registerQueries({ AllDucks }); registerRelationships({ AllDucks: AllDucksRelationship }); flush(); return cleanup(store, [ 'main' ]).then(() => { return db.get('documents.design').save('ducks', ddoc); }); }); test('models are initially matched', assert => { [ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id })); let root = db.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow', 'red', 'green' ]); return root.get('ducks.promise'); }); test('new models are added to collection', assert => { db.model('duck', { id: 'yellow' }); let root = db.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow' ]); db.model('duck', { id: 'red' }); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow', 'red' ]); db.model('duck', { id: 'green' }); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow', 'red', 'green' ]); return root.get('ducks.promise'); }); test('destroyed isNew model is removed from coll', assert => { [ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id })); let root = db.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow', 'red', 'green' ]); root.get('ducks').objectAt(0).destroy(); return next().then(() => { assert.deepEqual(root.get('ducks').mapBy('id'), [ 'red', 'green' ]); return root.get('ducks.promise'); }); }); test('assign database after model creation', assert => { db.model('duck', { id: 'yellow' }); let root = store.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), []); root.set('database', db); assert.deepEqual(root.get('ducks').mapBy('id'), [ 'yellow' ]); }); test('autoload', assert => { let root; return all([ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id }).save())).then(() => { flush(); root = db.model('root'); assert.deepEqual(root.get('ducks').mapBy('id'), []); return wait(null, 100); }).then(() => { assert.deepEqual(root.get('ducks').mapBy('id'), [ 'green', 'red', 'yellow' ]); }); }); test('load', assert => { let root; return all([ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id }).save())).then(() => { flush(); root = db.model('root'); return root.get('ducks.promise'); }).then(() => { assert.deepEqual(root.get('ducks').mapBy('id'), [ 'green', 'red', 'yellow' ]); }); }); test.only('destroy', assert => { let root; let relation; return all([ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id }).save())).then(() => { flush(); root = db.model('root'); return root.get('ducks.promise'); }).then(() => { assert.deepEqual(root.get('ducks').mapBy('id'), [ 'green', 'red', 'yellow' ]); relation = root.get('ducks._relation'); assert.ok(relation); root.get('ducks').destroy(); return next(); }).then(() => { assert.ok(!relation.value); assert.ok(!root.get('ducks').isDestroying); assert.ok(relation.value); }); }); });
test.only removed
tests/unit/has-many-collection-test.js
test.only removed
<ide><path>ests/unit/has-many-collection-test.js <ide> }); <ide> }); <ide> <del> test.only('destroy', assert => { <add> test('destroy', assert => { <ide> let root; <ide> let relation; <ide> return all([ 'yellow', 'red', 'green' ].map(id => db.model('duck', { id }).save())).then(() => {
Java
apache-2.0
4a5567433c7a76826dbacff29caf4821f8c13fc4
0
xmpace/jetty-read,xmpace/jetty-read,xmpace/jetty-read,xmpace/jetty-read
// // ======================================================================== // Copyright (c) 1995-2013 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.util.ajax; import java.lang.reflect.Method; import java.util.Map; import org.eclipse.jetty.util.Loader; import org.eclipse.jetty.util.ajax.JSON.Output; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; /* ------------------------------------------------------------ */ /** * Convert an {@link Enum} to JSON. * If fromJSON is true in the constructor, the JSON generated will * be of the form {class="com.acme.TrafficLight",value="Green"} * If fromJSON is false, then only the string value of the enum is generated. * * */ public class JSONEnumConvertor implements JSON.Convertor { private static final Logger LOG = Log.getLogger(JSONEnumConvertor.class); private boolean _fromJSON; private Method _valueOf; { try { Class e = Loader.loadClass(getClass(),"java.lang.Enum"); _valueOf=e.getMethod("valueOf",new Class[]{Class.class,String.class}); } catch(Exception e) { throw new RuntimeException("!Enums",e); } } public JSONEnumConvertor() { this(false); } public JSONEnumConvertor(boolean fromJSON) { _fromJSON=fromJSON; } public Object fromJSON(Map map) { if (!_fromJSON) throw new UnsupportedOperationException(); try { Class c=Loader.loadClass(getClass(),(String)map.get("class")); return _valueOf.invoke(null,new Object[]{c,map.get("value")}); } catch(Exception e) { LOG.warn(e); } return null; } public void toJSON(Object obj, Output out) { if (_fromJSON) { out.addClass(obj.getClass()); out.add("value",((Enum)obj).name()); } else { out.add(((Enum)obj).name()); } } }
jetty-util/src/main/java/org/eclipse/jetty/util/ajax/JSONEnumConvertor.java
// // ======================================================================== // Copyright (c) 1995-2013 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.util.ajax; import java.lang.reflect.Method; import java.util.Map; import org.eclipse.jetty.util.Loader; import org.eclipse.jetty.util.ajax.JSON.Output; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; /* ------------------------------------------------------------ */ /** * Convert an {@link Enum} to JSON. * If fromJSON is true in the constructor, the JSON generated will * be of the form {class="com.acme.TrafficLight",value="Green"} * If fromJSON is false, then only the string value of the enum is generated. * * */ public class JSONEnumConvertor implements JSON.Convertor { private static final Logger LOG = Log.getLogger(JSONEnumConvertor.class); private boolean _fromJSON; private Method _valueOf; { try { Class e = Loader.loadClass(getClass(),"java.lang.Enum"); _valueOf=e.getMethod("valueOf",new Class[]{Class.class,String.class}); } catch(Exception e) { throw new RuntimeException("!Enums",e); } } public JSONEnumConvertor() { this(false); } public JSONEnumConvertor(boolean fromJSON) { _fromJSON=fromJSON; } public Object fromJSON(Map map) { if (!_fromJSON) throw new UnsupportedOperationException(); try { Class c=Loader.loadClass(getClass(),(String)map.get("class")); return _valueOf.invoke(null,new Object[]{c,map.get("value")}); } catch(Exception e) { LOG.warn(e); } return null; } public void toJSON(Object obj, Output out) { if (_fromJSON) { out.addClass(obj.getClass()); out.add("value",obj.toString()); } else { out.add(obj.toString()); } } }
413372 JSON Enum uses name rather than toString()
jetty-util/src/main/java/org/eclipse/jetty/util/ajax/JSONEnumConvertor.java
413372 JSON Enum uses name rather than toString()
<ide><path>etty-util/src/main/java/org/eclipse/jetty/util/ajax/JSONEnumConvertor.java <ide> if (_fromJSON) <ide> { <ide> out.addClass(obj.getClass()); <del> out.add("value",obj.toString()); <add> out.add("value",((Enum)obj).name()); <ide> } <ide> else <ide> { <del> out.add(obj.toString()); <add> out.add(((Enum)obj).name()); <ide> } <ide> } <ide>
Java
apache-2.0
695afb4ac009b1e3f31747f2150e1982de26356a
0
telstra/open-kilda,jonvestal/open-kilda,telstra/open-kilda,telstra/open-kilda,telstra/open-kilda,jonvestal/open-kilda,telstra/open-kilda,telstra/open-kilda,jonvestal/open-kilda,telstra/open-kilda,jonvestal/open-kilda,jonvestal/open-kilda
/* Copyright 2019 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openkilda.wfm.topology.floodlightrouter; import org.openkilda.config.KafkaTopicsConfig; import org.openkilda.persistence.PersistenceManager; import org.openkilda.persistence.spi.PersistenceProvider; import org.openkilda.wfm.LaunchEnvironment; import org.openkilda.wfm.topology.AbstractTopology; import org.openkilda.wfm.topology.floodlightrouter.bolts.BroadcastRequestBolt; import org.openkilda.wfm.topology.floodlightrouter.bolts.DiscoveryBolt; import org.openkilda.wfm.topology.floodlightrouter.bolts.ReplyBolt; import org.openkilda.wfm.topology.floodlightrouter.bolts.RequestBolt; import org.openkilda.wfm.topology.floodlightrouter.bolts.SpeakerRequestBolt; import org.apache.storm.generated.StormTopology; import org.apache.storm.kafka.bolt.KafkaBolt; import org.apache.storm.kafka.spout.KafkaSpout; import org.apache.storm.topology.TopologyBuilder; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * Floodlight topology. */ public class FloodlightRouterTopology extends AbstractTopology<FloodlightRouterTopologyConfig> { private final PersistenceManager persistenceManager; public FloodlightRouterTopology(LaunchEnvironment env) { super(env, FloodlightRouterTopologyConfig.class); persistenceManager = PersistenceProvider.getInstance().createPersistenceManager(configurationProvider); } private void createKildaFlowSpout(TopologyBuilder builder, int parallelism, List<String> kildaFlowTopics) { KafkaSpout kildaFlowSpout = buildKafkaSpout(kildaFlowTopics, ComponentType.KILDA_FLOW_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_FLOW_KAFKA_SPOUT, kildaFlowSpout, parallelism); } private void createKildaFlowKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaFlowKafkaBolt = buildKafkaBolt(topicsConfig.getFlowTopic()); builder.setBolt(ComponentType.KILDA_FLOW_KAFKA_BOLT, kildaFlowKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_FLOW_REPLY_BOLT, Stream.KILDA_FLOW); } private void createKildaFlowHsSpout(TopologyBuilder builder, int parallelism, List<String> kildaFlowTopics) { KafkaSpout kildaFlowHsSpout = buildKafkaSpoutForAbstractMessage(kildaFlowTopics, ComponentType.KILDA_FLOW_HS_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_FLOW_HS_KAFKA_SPOUT, kildaFlowHsSpout, parallelism); } private void createKildaFlowHsKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaFlowHsKafkaBolt = buildKafkaBoltWithAbstractMessageSupport(topicsConfig.getFlowHsSpeakerTopic()); builder.setBolt(ComponentType.KILDA_FLOW_HS_KAFKA_BOLT, kildaFlowHsKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_FLOW_HS_REPLY_BOLT, Stream.KILDA_HS_FLOW); } private void createKildaFlowReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaFlowTopics) { createKildaFlowSpout(builder, parallelism, kildaFlowTopics); createKildaFlowKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_FLOW); builder.setBolt(ComponentType.KILDA_FLOW_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_FLOW_KAFKA_SPOUT); } private void createKildaFlowHsReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaFlowHsTopics) { createKildaFlowHsSpout(builder, parallelism, kildaFlowHsTopics); createKildaFlowHsKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_HS_FLOW); builder.setBolt(ComponentType.KILDA_FLOW_HS_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_FLOW_HS_KAFKA_SPOUT); } private void createKildaPingSpout(TopologyBuilder builder, int parallelism, List<String> kildaPingTopics) { KafkaSpout kildaPingSpout = buildKafkaSpout(kildaPingTopics, ComponentType.KILDA_PING_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_PING_KAFKA_SPOUT, kildaPingSpout, parallelism); } private void createKildaPingKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaPingKafkaBolt = buildKafkaBolt(topicsConfig.getPingTopic()); builder.setBolt(ComponentType.KILDA_PING_KAFKA_BOLT, kildaPingKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_PING_REPLY_BOLT, Stream.KILDA_PING); } private void createKildaPingReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaPingTopics) { createKildaPingSpout(builder, parallelism, kildaPingTopics); createKildaPingKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_PING); builder.setBolt(ComponentType.KILDA_PING_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_PING_KAFKA_SPOUT); } private void createKildaStatsSpout(TopologyBuilder builder, int parallelism, List<String> kildaStatsTopics) { KafkaSpout kildaStatsSpout = buildKafkaSpout(kildaStatsTopics, ComponentType.KILDA_STATS_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_STATS_KAFKA_SPOUT, kildaStatsSpout, parallelism); } private void createKildaStatsKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaStatsKafkaBolt = buildKafkaBolt(topicsConfig.getStatsTopic()); builder.setBolt(ComponentType.KILDA_STATS_KAFKA_BOLT, kildaStatsKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_STATS_REPLY_BOLT, Stream.KILDA_STATS); } private void createKildaStatsReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaStatsTopics) { createKildaStatsSpout(builder, parallelism, kildaStatsTopics); createKildaStatsKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_STATS); builder.setBolt(ComponentType.KILDA_STATS_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_STATS_KAFKA_SPOUT); } private void createKildaIslLatencySpout(TopologyBuilder builder, int parallelism, List<String> kildaStatsTopics) { KafkaSpout kildaStatsSpout = buildKafkaSpout(kildaStatsTopics, ComponentType.KILDA_ISL_LATENCY_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_ISL_LATENCY_KAFKA_SPOUT, kildaStatsSpout, parallelism); } private void createKildaIslLatencyKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaStatsKafkaBolt = buildKafkaBolt(topicsConfig.getTopoIslLatencyTopic()); builder.setBolt(ComponentType.KILDA_ISL_LATENCY_KAFKA_BOLT, kildaStatsKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_ISL_LATENCY_REPLY_BOLT, Stream.KILDA_ISL_LATENCY); } private void createKildaIslLatencyReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaIslLatencyTopics) { createKildaIslLatencySpout(builder, parallelism, kildaIslLatencyTopics); createKildaIslLatencyKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_ISL_LATENCY); builder.setBolt(ComponentType.KILDA_ISL_LATENCY_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_ISL_LATENCY_KAFKA_SPOUT); } private void createKildaConnectedDevicesSpout(TopologyBuilder builder, int parallelism, List<String> topics) { KafkaSpout spout = buildKafkaSpout(topics, ComponentType.KILDA_CONNECTED_DEVICES_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_CONNECTED_DEVICES_KAFKA_SPOUT, spout, parallelism); } private void createKildaConnectedDevicesKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kafkaBolt = buildKafkaBolt(topicsConfig.getTopoConnectedDevicesTopic()); builder.setBolt(ComponentType.KILDA_CONNECTED_DEVICES_KAFKA_BOLT, kafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_CONNECTED_DEVICES_REPLY_BOLT, Stream.KILDA_CONNECTED_DEVICES); } private void createKildaConnectedDevicesReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> topics) { createKildaConnectedDevicesSpout(builder, parallelism, topics); createKildaConnectedDevicesKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_CONNECTED_DEVICES); builder.setBolt(ComponentType.KILDA_CONNECTED_DEVICES_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_CONNECTED_DEVICES_KAFKA_SPOUT); } private void createKildaSwitchManagerSpout(TopologyBuilder builder, int parallelism, List<String> kildaSwitchManagerTopics) { KafkaSpout kildaSwitchManagerSpout = buildKafkaSpout(kildaSwitchManagerTopics, ComponentType.KILDA_SWITCH_MANAGER_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_SWITCH_MANAGER_KAFKA_SPOUT, kildaSwitchManagerSpout, parallelism); } private void createKildaSwitchManagerKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaSwitchManagerKafkaBolt = buildKafkaBolt(topicsConfig.getTopoSwitchManagerTopic()); builder.setBolt(ComponentType.KILDA_SWITCH_MANAGER_KAFKA_BOLT, kildaSwitchManagerKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_REQUEST_BOLT, Stream.KILDA_SWITCH_MANAGER) .shuffleGrouping(ComponentType.KILDA_SWITCH_MANAGER_REPLY_BOLT, Stream.KILDA_SWITCH_MANAGER); } private void createKildaSwitchManagerReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaSwitchManagerTopics) { createKildaSwitchManagerSpout(builder, parallelism, kildaSwitchManagerTopics); createKildaSwitchManagerKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_SWITCH_MANAGER); builder.setBolt(ComponentType.KILDA_SWITCH_MANAGER_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_SWITCH_MANAGER_KAFKA_SPOUT); } private void createKildaNorthboundSpout(TopologyBuilder builder, int parallelism, List<String> kildaNorthboundTopics) { KafkaSpout kildaNorthboundSpout = buildKafkaSpout(kildaNorthboundTopics, ComponentType.NORTHBOUND_REPLY_KAFKA_SPOUT); builder.setSpout(ComponentType.NORTHBOUND_REPLY_KAFKA_SPOUT, kildaNorthboundSpout, parallelism); } private void createKildaNorthboundKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaNorthboundKafkaBolt = buildKafkaBolt(topicsConfig.getNorthboundTopic()); builder.setBolt(ComponentType.NORTHBOUND_REPLY_KAFKA_BOLT, kildaNorthboundKafkaBolt, parallelism) .shuffleGrouping(ComponentType.NORTHBOUND_REPLY_BOLT, Stream.NORTHBOUND_REPLY); } private void createKildaNorthboundReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaNorthboundTopics) { createKildaNorthboundSpout(builder, parallelism, kildaNorthboundTopics); createKildaNorthboundKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.NORTHBOUND_REPLY); builder.setBolt(ComponentType.NORTHBOUND_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.NORTHBOUND_REPLY_KAFKA_SPOUT); } private void createKildaNbWorkerSpout(TopologyBuilder builder, int parallelism, List<String> kildaNbWorkerTopics) { KafkaSpout kildaNbWorkerSpout = buildKafkaSpout(kildaNbWorkerTopics, ComponentType.KILDA_NB_WORKER_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_NB_WORKER_KAFKA_SPOUT, kildaNbWorkerSpout, parallelism); } private void createKildaNbWorkerKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaNbWorkerKafkaBolt = buildKafkaBolt(topicsConfig.getTopoNbTopic()); builder.setBolt(ComponentType.KILDA_NB_WORKER_KAFKA_BOLT, kildaNbWorkerKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_NB_WORKER_REPLY_BOLT, Stream.NB_WORKER); } private void createKildaNbWorkerReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaNbWorkerTopics) { createKildaNbWorkerSpout(builder, parallelism, kildaNbWorkerTopics); createKildaNbWorkerKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.NB_WORKER); builder.setBolt(ComponentType.KILDA_NB_WORKER_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_NB_WORKER_KAFKA_SPOUT); } private void createSpeakerFlowRequestStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { createSpeakerFlowRequestSpoutAndKafkaBolt(builder, parallelism, topicsConfig); createSpeakerFlowHsRequestSpoutAndKafkaBolt(builder, parallelism, topicsConfig); RequestBolt speakerFlowRequestBolt = new RequestBolt(Stream.SPEAKER_FLOW, Stream.SPEAKER_FLOW_HS, topologyConfig.getFloodlightRegions()); builder.setBolt(ComponentType.SPEAKER_FLOW_REQUEST_BOLT, speakerFlowRequestBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_FLOW_KAFKA_SPOUT) .shuffleGrouping(ComponentType.SPEAKER_FLOW_HS_KAFKA_SPOUT) .allGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); } private void createSpeakerFlowRequestSpoutAndKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout speakerFlowKafkaSpout = buildKafkaSpout(topicsConfig.getSpeakerFlowTopic(), ComponentType.SPEAKER_FLOW_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_FLOW_KAFKA_SPOUT, speakerFlowKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerFlowKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getSpeakerFlowRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_FLOW_KAFKA_BOLT, region), speakerFlowKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_FLOW_REQUEST_BOLT, Stream.formatWithRegion(Stream.SPEAKER_FLOW, region)); } } private void createSpeakerFlowHsRequestSpoutAndKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout speakerFlowKafkaSpout = buildKafkaSpoutForAbstractMessage(topicsConfig.getSpeakerFlowHsTopic(), ComponentType.SPEAKER_FLOW_HS_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_FLOW_HS_KAFKA_SPOUT, speakerFlowKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerFlowKafkaBolt = buildKafkaBoltWithAbstractMessageSupport( Stream.formatWithRegion(topicsConfig.getSpeakerFlowRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_FLOW_HS_KAFKA_BOLT, region), speakerFlowKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_FLOW_REQUEST_BOLT, Stream.formatWithRegion(Stream.SPEAKER_FLOW_HS, region)); } } private void createSpeakerFlowPingRequestStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout speakerPingKafkaSpout = buildKafkaSpout(topicsConfig.getSpeakerFlowPingTopic(), ComponentType.SPEAKER_PING_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_PING_KAFKA_SPOUT, speakerPingKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerPingKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getSpeakerFlowPingRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_PING_KAFKA_BOLT, region), speakerPingKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_PING_REQUEST_BOLT, Stream.formatWithRegion(Stream.SPEAKER_PING, region)); } RequestBolt speakerPingRequestBolt = new RequestBolt(Stream.SPEAKER_PING, topologyConfig.getFloodlightRegions()); builder.setBolt(ComponentType.SPEAKER_PING_REQUEST_BOLT, speakerPingRequestBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_PING_KAFKA_SPOUT) .allGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); } private void createSpeakerRequestStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout speakerKafkaSpout = buildKafkaSpout(topicsConfig.getSpeakerTopic(), ComponentType.SPEAKER_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_KAFKA_SPOUT, speakerKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getSpeakerRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_KAFKA_BOLT, region), speakerKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_REQUEST_BOLT, Stream.formatWithRegion(Stream.SPEAKER, region)); } SpeakerRequestBolt speakerRequestBolt = new SpeakerRequestBolt(Stream.SPEAKER, topologyConfig.getFloodlightRegions()); builder.setBolt(ComponentType.SPEAKER_REQUEST_BOLT, speakerRequestBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_KAFKA_SPOUT) .allGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); } private void createKildaTopoDiscoSpout(TopologyBuilder builder, int parallelism, List<String> kildaTopoDiscoTopics) { KafkaSpout kildaTopoDiscoSpout = buildKafkaSpout(kildaTopoDiscoTopics, ComponentType.KILDA_TOPO_DISCO_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_TOPO_DISCO_KAFKA_SPOUT, kildaTopoDiscoSpout, parallelism); } private void createKildaTopoDiscoKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaTopoDiscoKafkaBolt = buildKafkaBolt(topicsConfig.getTopoDiscoTopic()); builder.setBolt(ComponentType.KILDA_TOPO_DISCO_KAFKA_BOLT, kildaTopoDiscoKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.KILDA_TOPO_DISCO); } private void createKildaTopoDiscoReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaTopoDiscoTopics) { createKildaTopoDiscoSpout(builder, parallelism, kildaTopoDiscoTopics); createKildaTopoDiscoKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_TOPO_DISCO); builder.setBolt(ComponentType.KILDA_TOPO_DISCO_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_KAFKA_SPOUT); } private void createSpeakerDiscoSpout(TopologyBuilder builder, int parallelism, String kildaTopoDiscoTopic) { KafkaSpout speakerDiscoSpout = buildKafkaSpout(kildaTopoDiscoTopic, ComponentType.SPEAKER_DISCO_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_DISCO_KAFKA_SPOUT, speakerDiscoSpout, parallelism); } private void createSpeakerDiscoKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerDiscoKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getSpeakerDiscoRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_DISCO_KAFKA_BOLT, region), speakerDiscoKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.formatWithRegion(Stream.SPEAKER_DISCO, region)); } } private void createDiscoveryPipelines(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { List<String> kildaTopoDiscoTopics = new ArrayList<>(); for (String region : topologyConfig.getFloodlightRegions()) { kildaTopoDiscoTopics.add(Stream.formatWithRegion(topicsConfig.getTopoDiscoRegionTopic(), region)); } createKildaTopoDiscoSpout(builder, parallelism, kildaTopoDiscoTopics); createKildaTopoDiscoKafkaBolt(builder, parallelism, topicsConfig); createSpeakerDiscoSpout(builder, parallelism, topicsConfig.getSpeakerDiscoTopic()); createSpeakerDiscoKafkaBolt(builder, parallelism, topicsConfig); DiscoveryBolt discoveryBolt = new DiscoveryBolt( persistenceManager, topologyConfig.getFloodlightRegions(), topologyConfig.getFloodlightAliveTimeout(), topologyConfig.getFloodlightAliveInterval(), topologyConfig.getFloodlightDumpInterval()); builder.setBolt(ComponentType.KILDA_TOPO_DISCO_BOLT, discoveryBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_KAFKA_SPOUT) .shuffleGrouping(ComponentType.SPEAKER_DISCO_KAFKA_SPOUT); } private void createStatsStatsRequestStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout statsStatsRequestKafkaSpout = buildKafkaSpout(topicsConfig.getStatsStatsRequestPrivTopic(), ComponentType.STATS_STATS_REQUEST_KAFKA_SPOUT); builder.setSpout(ComponentType.STATS_STATS_REQUEST_KAFKA_SPOUT, statsStatsRequestKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt statsStatsRequestKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getStatsStatsRequestPrivRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.STATS_STATS_REQUEST_KAFKA_BOLT, region), statsStatsRequestKafkaBolt, parallelism) .shuffleGrouping(ComponentType.STATS_STATS_REQUEST_BOLT, Stream.formatWithRegion(Stream.STATS_STATS_REQUEST_PRIV, region)); } BroadcastRequestBolt speakerRequestBolt = new BroadcastRequestBolt(Stream.STATS_STATS_REQUEST_PRIV, topologyConfig.getFloodlightRegions()); builder.setBolt(ComponentType.STATS_STATS_REQUEST_BOLT, speakerRequestBolt, parallelism) .shuffleGrouping(ComponentType.STATS_STATS_REQUEST_KAFKA_SPOUT); } private void createFlStatsSwitchesStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaFlStatsSwitchesTopics) { KafkaSpout flStatsSwitchesSpout = buildKafkaSpout(kildaFlStatsSwitchesTopics, ComponentType.FL_STATS_SWITCHES_SPOUT); builder.setSpout(ComponentType.FL_STATS_SWITCHES_SPOUT, flStatsSwitchesSpout); ReplyBolt replyBolt = new ReplyBolt(Stream.FL_STATS_SWITCHES); builder.setBolt(ComponentType.FL_STATS_SWITCHES_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.FL_STATS_SWITCHES_SPOUT); KafkaBolt kildaFlStatsSwtichesKafkaBolt = buildKafkaBolt(topicsConfig.getFlStatsSwitchesPrivTopic()); builder.setBolt(ComponentType.FL_STATS_SWITCHES_KAFKA_BOLT, kildaFlStatsSwtichesKafkaBolt, parallelism) .shuffleGrouping(ComponentType.FL_STATS_SWITCHES_REPLY_BOLT, Stream.FL_STATS_SWITCHES); } @Override public StormTopology createTopology() { logger.info("Creating FlowTopology - {}", topologyName); TopologyBuilder builder = new TopologyBuilder(); Integer newParallelism = topologyConfig.getNewParallelism(); KafkaTopicsConfig topicsConfig = topologyConfig.getKafkaTopics(); Set<String> regions = topologyConfig.getFloodlightRegions(); // Floodlight -- kilda.flow --> Router List<String> kildaFlowTopics = new ArrayList<>(); List<String> kildaFlowHsTopics = new ArrayList<>(); for (String region: regions) { kildaFlowTopics.add(Stream.formatWithRegion(topicsConfig.getFlowRegionTopic(), region)); kildaFlowHsTopics.add(Stream.formatWithRegion(topicsConfig.getFlowHsSpeakerRegionTopic(), region)); } createKildaFlowReplyStream(builder, newParallelism, topicsConfig, kildaFlowTopics); createKildaFlowHsReplyStream(builder, newParallelism, topicsConfig, kildaFlowHsTopics); // Floodlight -- kilda.ping --> Router List<String> kildaPingTopics = new ArrayList<>(); for (String region: regions) { kildaPingTopics.add(Stream.formatWithRegion(topicsConfig.getPingRegionTopic(), region)); } createKildaPingReplyStream(builder, newParallelism, topicsConfig, kildaPingTopics); // Floodlight -- kilda.stats --> Router List<String> kildaStatsTopics = new ArrayList<>(); for (String region: regions) { kildaStatsTopics.add(Stream.formatWithRegion(topicsConfig.getStatsRegionTopic(), region)); } createKildaStatsReplyStream(builder, newParallelism, topicsConfig, kildaStatsTopics); // Floodlight -- kilda.topo.isl.latency --> Router List<String> kildaIslLatencyTopics = new ArrayList<>(); for (String region: regions) { kildaIslLatencyTopics.add(Stream.formatWithRegion(topicsConfig.getTopoIslLatencyRegionTopic(), region)); } createKildaIslLatencyReplyStream(builder, newParallelism, topicsConfig, kildaIslLatencyTopics); // Floodlight -- kilda.floodlight.connected.devices.priv --> Router List<String> kildaConnectedDevicesTopics = new ArrayList<>(); for (String region: regions) { kildaConnectedDevicesTopics.add(Stream.formatWithRegion( topicsConfig.getTopoConnectedDevicesRegionTopic(), region)); } createKildaConnectedDevicesReplyStream(builder, newParallelism, topicsConfig, kildaConnectedDevicesTopics); // Floodlight -- kilda.topo.switch.manager --> Router List<String> kildaSwitchManagerTopics = new ArrayList<>(); for (String region: regions) { kildaSwitchManagerTopics.add( Stream.formatWithRegion(topicsConfig.getTopoSwitchManagerRegionTopic(), region)); } createKildaSwitchManagerReplyStream(builder, newParallelism, topicsConfig, kildaSwitchManagerTopics); // Floodlight -- kilda.northbound --> Router List<String> kildaNorthboundTopics = new ArrayList<>(); for (String region: regions) { kildaNorthboundTopics.add(Stream.formatWithRegion(topicsConfig.getNorthboundRegionTopic(), region)); } createKildaNorthboundReplyStream(builder, newParallelism, topicsConfig, kildaNorthboundTopics); // Floodlight -- kilda.topo.nb --> Router List<String> kildaNbWorkerTopics = new ArrayList<>(); for (String region: regions) { kildaNbWorkerTopics.add(Stream.formatWithRegion(topicsConfig.getTopoNbRegionTopic(), region)); } createKildaNbWorkerReplyStream(builder, newParallelism, topicsConfig, kildaNbWorkerTopics); // Part3 Request to Floodlights Integer parallelism = topologyConfig.getParallelism(); // Storm -- kilda.speaker.flow --> Floodlight createSpeakerFlowRequestStream(builder, parallelism, topicsConfig); // Storm -- kilda.speaker.flow.ping --> Floodlight createSpeakerFlowPingRequestStream(builder, parallelism, topicsConfig); // Storm -- kilda.speaker --> Floodlight createSpeakerRequestStream(builder, parallelism, topicsConfig); // Storm -- kilda.speaker.disco --> Floodlight // Storm <-- kilda.topo.disco -- Floodlight createDiscoveryPipelines(builder, parallelism, topicsConfig); // Storm -- kilda.stats.stats-request.priv --> Floodlight createStatsStatsRequestStream(builder, parallelism, topicsConfig); // Storm <-- kilda.fl-stats.switches.priv -- Floodlight List<String> kildaFlStatsSwitchesTopics = regions.stream() .map(region -> Stream.formatWithRegion(topicsConfig.getFlStatsSwitchesPrivRegionTopic(), region)) .collect(Collectors.toList()); createFlStatsSwitchesStream(builder, parallelism, topicsConfig, kildaFlStatsSwitchesTopics); return builder.createTopology(); } /** * Topology entry point. */ public static void main(String[] args) { try { LaunchEnvironment env = new LaunchEnvironment(args); (new FloodlightRouterTopology(env)).setup(); } catch (Exception e) { System.exit(handleLaunchException(e)); } } }
services/wfm/src/main/java/org/openkilda/wfm/topology/floodlightrouter/FloodlightRouterTopology.java
/* Copyright 2019 Telstra Open Source * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openkilda.wfm.topology.floodlightrouter; import org.openkilda.config.KafkaTopicsConfig; import org.openkilda.persistence.PersistenceManager; import org.openkilda.persistence.spi.PersistenceProvider; import org.openkilda.wfm.LaunchEnvironment; import org.openkilda.wfm.topology.AbstractTopology; import org.openkilda.wfm.topology.floodlightrouter.bolts.BroadcastRequestBolt; import org.openkilda.wfm.topology.floodlightrouter.bolts.DiscoveryBolt; import org.openkilda.wfm.topology.floodlightrouter.bolts.ReplyBolt; import org.openkilda.wfm.topology.floodlightrouter.bolts.RequestBolt; import org.openkilda.wfm.topology.floodlightrouter.bolts.SpeakerRequestBolt; import org.apache.storm.generated.StormTopology; import org.apache.storm.kafka.bolt.KafkaBolt; import org.apache.storm.kafka.spout.KafkaSpout; import org.apache.storm.topology.TopologyBuilder; import java.util.ArrayList; import java.util.List; import java.util.Set; import java.util.stream.Collectors; /** * Floodlight topology. */ public class FloodlightRouterTopology extends AbstractTopology<FloodlightRouterTopologyConfig> { private final PersistenceManager persistenceManager; public FloodlightRouterTopology(LaunchEnvironment env) { super(env, FloodlightRouterTopologyConfig.class); persistenceManager = PersistenceProvider.getInstance().createPersistenceManager(configurationProvider); } private void createKildaFlowSpout(TopologyBuilder builder, int parallelism, List<String> kildaFlowTopics) { KafkaSpout kildaFlowSpout = buildKafkaSpout(kildaFlowTopics, ComponentType.KILDA_FLOW_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_FLOW_KAFKA_SPOUT, kildaFlowSpout, parallelism); } private void createKildaFlowKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaFlowKafkaBolt = buildKafkaBolt(topicsConfig.getFlowTopic()); builder.setBolt(ComponentType.KILDA_FLOW_KAFKA_BOLT, kildaFlowKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_FLOW_REPLY_BOLT, Stream.KILDA_FLOW); } private void createKildaFlowHsSpout(TopologyBuilder builder, int parallelism, List<String> kildaFlowTopics) { KafkaSpout kildaFlowHsSpout = buildKafkaSpoutForAbstractMessage(kildaFlowTopics, ComponentType.KILDA_FLOW_HS_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_FLOW_HS_KAFKA_SPOUT, kildaFlowHsSpout, parallelism); } private void createKildaFlowHsKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaFlowHsKafkaBolt = buildKafkaBoltWithAbstractMessageSupport(topicsConfig.getFlowHsSpeakerTopic()); builder.setBolt(ComponentType.KILDA_FLOW_HS_KAFKA_BOLT, kildaFlowHsKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_FLOW_HS_REPLY_BOLT, Stream.KILDA_HS_FLOW); } private void createKildaFlowReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaFlowTopics) { createKildaFlowSpout(builder, parallelism, kildaFlowTopics); createKildaFlowKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_FLOW); builder.setBolt(ComponentType.KILDA_FLOW_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_FLOW_KAFKA_SPOUT); } private void createKildaFlowHsReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaFlowHsTopics) { createKildaFlowHsSpout(builder, parallelism, kildaFlowHsTopics); createKildaFlowHsKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_HS_FLOW); builder.setBolt(ComponentType.KILDA_FLOW_HS_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_FLOW_HS_KAFKA_SPOUT); } private void createKildaPingSpout(TopologyBuilder builder, int parallelism, List<String> kildaPingTopics) { KafkaSpout kildaPingSpout = buildKafkaSpout(kildaPingTopics, ComponentType.KILDA_PING_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_PING_KAFKA_SPOUT, kildaPingSpout, parallelism); } private void createKildaPingKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaPingKafkaBolt = buildKafkaBolt(topicsConfig.getPingTopic()); builder.setBolt(ComponentType.KILDA_PING_KAFKA_BOLT, kildaPingKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_PING_REPLY_BOLT, Stream.KILDA_PING); } private void createKildaPingReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaPingTopics) { createKildaPingSpout(builder, parallelism, kildaPingTopics); createKildaPingKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_PING); builder.setBolt(ComponentType.KILDA_PING_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_PING_KAFKA_SPOUT); } private void createKildaStatsSpout(TopologyBuilder builder, int parallelism, List<String> kildaStatsTopics) { KafkaSpout kildaStatsSpout = buildKafkaSpout(kildaStatsTopics, ComponentType.KILDA_STATS_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_STATS_KAFKA_SPOUT, kildaStatsSpout, parallelism); } private void createKildaStatsKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaStatsKafkaBolt = buildKafkaBolt(topicsConfig.getStatsTopic()); builder.setBolt(ComponentType.KILDA_STATS_KAFKA_BOLT, kildaStatsKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_STATS_REPLY_BOLT, Stream.KILDA_STATS); } private void createKildaStatsReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaStatsTopics) { createKildaStatsSpout(builder, parallelism, kildaStatsTopics); createKildaStatsKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_STATS); builder.setBolt(ComponentType.KILDA_STATS_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_STATS_KAFKA_SPOUT); } private void createKildaIslLatencySpout(TopologyBuilder builder, int parallelism, List<String> kildaStatsTopics) { KafkaSpout kildaStatsSpout = buildKafkaSpout(kildaStatsTopics, ComponentType.KILDA_ISL_LATENCY_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_ISL_LATENCY_KAFKA_SPOUT, kildaStatsSpout, parallelism); } private void createKildaIslLatencyKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaStatsKafkaBolt = buildKafkaBolt(topicsConfig.getTopoIslLatencyTopic()); builder.setBolt(ComponentType.KILDA_ISL_LATENCY_KAFKA_BOLT, kildaStatsKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_ISL_LATENCY_REPLY_BOLT, Stream.KILDA_ISL_LATENCY); } private void createKildaIslLatencyReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaIslLatencyTopics) { createKildaIslLatencySpout(builder, parallelism, kildaIslLatencyTopics); createKildaIslLatencyKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_ISL_LATENCY); builder.setBolt(ComponentType.KILDA_ISL_LATENCY_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_ISL_LATENCY_KAFKA_SPOUT); } private void createKildaConnectedDevicesSpout(TopologyBuilder builder, int parallelism, List<String> topics) { KafkaSpout spout = buildKafkaSpout(topics, ComponentType.KILDA_CONNECTED_DEVICES_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_CONNECTED_DEVICES_KAFKA_SPOUT, spout, parallelism); } private void createKildaConnectedDevicesKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kafkaBolt = buildKafkaBolt(topicsConfig.getTopoConnectedDevicesTopic()); builder.setBolt(ComponentType.KILDA_CONNECTED_DEVICES_KAFKA_BOLT, kafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_CONNECTED_DEVICES_REPLY_BOLT, Stream.KILDA_CONNECTED_DEVICES); } private void createKildaConnectedDevicesReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> topics) { createKildaConnectedDevicesSpout(builder, parallelism, topics); createKildaConnectedDevicesKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_CONNECTED_DEVICES); builder.setBolt(ComponentType.KILDA_CONNECTED_DEVICES_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_CONNECTED_DEVICES_KAFKA_SPOUT); } private void createKildaSwitchManagerSpout(TopologyBuilder builder, int parallelism, List<String> kildaSwitchManagerTopics) { KafkaSpout kildaSwitchManagerSpout = buildKafkaSpout(kildaSwitchManagerTopics, ComponentType.KILDA_SWITCH_MANAGER_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_SWITCH_MANAGER_KAFKA_SPOUT, kildaSwitchManagerSpout, parallelism); } private void createKildaSwitchManagerKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaSwitchManagerKafkaBolt = buildKafkaBolt(topicsConfig.getTopoSwitchManagerTopic()); builder.setBolt(ComponentType.KILDA_SWITCH_MANAGER_KAFKA_BOLT, kildaSwitchManagerKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_REQUEST_BOLT, Stream.KILDA_SWITCH_MANAGER) .shuffleGrouping(ComponentType.KILDA_SWITCH_MANAGER_REPLY_BOLT, Stream.KILDA_SWITCH_MANAGER); } private void createKildaSwitchManagerReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaSwitchManagerTopics) { createKildaSwitchManagerSpout(builder, parallelism, kildaSwitchManagerTopics); createKildaSwitchManagerKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_SWITCH_MANAGER); builder.setBolt(ComponentType.KILDA_SWITCH_MANAGER_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_SWITCH_MANAGER_KAFKA_SPOUT); } private void createKildaNorthboundSpout(TopologyBuilder builder, int parallelism, List<String> kildaNorthboundTopics) { KafkaSpout kildaNorthboundSpout = buildKafkaSpout(kildaNorthboundTopics, ComponentType.NORTHBOUND_REPLY_KAFKA_SPOUT); builder.setSpout(ComponentType.NORTHBOUND_REPLY_KAFKA_SPOUT, kildaNorthboundSpout, parallelism); } private void createKildaNorthboundKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaNorthboundKafkaBolt = buildKafkaBolt(topicsConfig.getNorthboundTopic()); builder.setBolt(ComponentType.NORTHBOUND_REPLY_KAFKA_BOLT, kildaNorthboundKafkaBolt, parallelism) .shuffleGrouping(ComponentType.NORTHBOUND_REPLY_BOLT, Stream.NORTHBOUND_REPLY); } private void createKildaNorthboundReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaNorthboundTopics) { createKildaNorthboundSpout(builder, parallelism, kildaNorthboundTopics); createKildaNorthboundKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.NORTHBOUND_REPLY); builder.setBolt(ComponentType.NORTHBOUND_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.NORTHBOUND_REPLY_KAFKA_SPOUT); } private void createKildaNbWorkerSpout(TopologyBuilder builder, int parallelism, List<String> kildaNbWorkerTopics) { KafkaSpout kildaNbWorkerSpout = buildKafkaSpout(kildaNbWorkerTopics, ComponentType.KILDA_NB_WORKER_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_NB_WORKER_KAFKA_SPOUT, kildaNbWorkerSpout, parallelism); } private void createKildaNbWorkerKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaNbWorkerKafkaBolt = buildKafkaBolt(topicsConfig.getTopoNbTopic()); builder.setBolt(ComponentType.KILDA_NB_WORKER_KAFKA_BOLT, kildaNbWorkerKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_NB_WORKER_REPLY_BOLT, Stream.NB_WORKER); } private void createKildaNbWorkerReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaNbWorkerTopics) { createKildaNbWorkerSpout(builder, parallelism, kildaNbWorkerTopics); createKildaNbWorkerKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.NB_WORKER); builder.setBolt(ComponentType.KILDA_NB_WORKER_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_NB_WORKER_KAFKA_SPOUT); } private void createSpeakerFlowRequestStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { createSpeakerFlowRequestSpoutAndKafkaBolt(builder, parallelism, topicsConfig); createSpeakerFlowHsRequestSpoutAndKafkaBolt(builder, parallelism, topicsConfig); RequestBolt speakerFlowRequestBolt = new RequestBolt(Stream.SPEAKER_FLOW, Stream.SPEAKER_FLOW_HS, topologyConfig.getFloodlightRegions()); builder.setBolt(ComponentType.SPEAKER_FLOW_REQUEST_BOLT, speakerFlowRequestBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_FLOW_KAFKA_SPOUT) .shuffleGrouping(ComponentType.SPEAKER_FLOW_HS_KAFKA_SPOUT) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); } private void createSpeakerFlowRequestSpoutAndKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout speakerFlowKafkaSpout = buildKafkaSpout(topicsConfig.getSpeakerFlowTopic(), ComponentType.SPEAKER_FLOW_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_FLOW_KAFKA_SPOUT, speakerFlowKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerFlowKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getSpeakerFlowRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_FLOW_KAFKA_BOLT, region), speakerFlowKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_FLOW_REQUEST_BOLT, Stream.formatWithRegion(Stream.SPEAKER_FLOW, region)); } } private void createSpeakerFlowHsRequestSpoutAndKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout speakerFlowKafkaSpout = buildKafkaSpoutForAbstractMessage(topicsConfig.getSpeakerFlowHsTopic(), ComponentType.SPEAKER_FLOW_HS_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_FLOW_HS_KAFKA_SPOUT, speakerFlowKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerFlowKafkaBolt = buildKafkaBoltWithAbstractMessageSupport( Stream.formatWithRegion(topicsConfig.getSpeakerFlowRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_FLOW_HS_KAFKA_BOLT, region), speakerFlowKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_FLOW_REQUEST_BOLT, Stream.formatWithRegion(Stream.SPEAKER_FLOW_HS, region)); } } private void createSpeakerFlowPingRequestStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout speakerPingKafkaSpout = buildKafkaSpout(topicsConfig.getSpeakerFlowPingTopic(), ComponentType.SPEAKER_PING_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_PING_KAFKA_SPOUT, speakerPingKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerPingKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getSpeakerFlowPingRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_PING_KAFKA_BOLT, region), speakerPingKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_PING_REQUEST_BOLT, Stream.formatWithRegion(Stream.SPEAKER_PING, region)); } RequestBolt speakerPingRequestBolt = new RequestBolt(Stream.SPEAKER_PING, topologyConfig.getFloodlightRegions()); builder.setBolt(ComponentType.SPEAKER_PING_REQUEST_BOLT, speakerPingRequestBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_PING_KAFKA_SPOUT) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); } private void createSpeakerRequestStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout speakerKafkaSpout = buildKafkaSpout(topicsConfig.getSpeakerTopic(), ComponentType.SPEAKER_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_KAFKA_SPOUT, speakerKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getSpeakerRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_KAFKA_BOLT, region), speakerKafkaBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_REQUEST_BOLT, Stream.formatWithRegion(Stream.SPEAKER, region)); } SpeakerRequestBolt speakerRequestBolt = new SpeakerRequestBolt(Stream.SPEAKER, topologyConfig.getFloodlightRegions()); builder.setBolt(ComponentType.SPEAKER_REQUEST_BOLT, speakerRequestBolt, parallelism) .shuffleGrouping(ComponentType.SPEAKER_KAFKA_SPOUT) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); } private void createKildaTopoDiscoSpout(TopologyBuilder builder, int parallelism, List<String> kildaTopoDiscoTopics) { KafkaSpout kildaTopoDiscoSpout = buildKafkaSpout(kildaTopoDiscoTopics, ComponentType.KILDA_TOPO_DISCO_KAFKA_SPOUT); builder.setSpout(ComponentType.KILDA_TOPO_DISCO_KAFKA_SPOUT, kildaTopoDiscoSpout, parallelism); } private void createKildaTopoDiscoKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaBolt kildaTopoDiscoKafkaBolt = buildKafkaBolt(topicsConfig.getTopoDiscoTopic()); builder.setBolt(ComponentType.KILDA_TOPO_DISCO_KAFKA_BOLT, kildaTopoDiscoKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.KILDA_TOPO_DISCO); } private void createKildaTopoDiscoReplyStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaTopoDiscoTopics) { createKildaTopoDiscoSpout(builder, parallelism, kildaTopoDiscoTopics); createKildaTopoDiscoKafkaBolt(builder, parallelism, topicsConfig); ReplyBolt replyBolt = new ReplyBolt(Stream.KILDA_TOPO_DISCO); builder.setBolt(ComponentType.KILDA_TOPO_DISCO_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_KAFKA_SPOUT); } private void createSpeakerDiscoSpout(TopologyBuilder builder, int parallelism, String kildaTopoDiscoTopic) { KafkaSpout speakerDiscoSpout = buildKafkaSpout(kildaTopoDiscoTopic, ComponentType.SPEAKER_DISCO_KAFKA_SPOUT); builder.setSpout(ComponentType.SPEAKER_DISCO_KAFKA_SPOUT, speakerDiscoSpout, parallelism); } private void createSpeakerDiscoKafkaBolt(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt speakerDiscoKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getSpeakerDiscoRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.SPEAKER_DISCO_KAFKA_BOLT, region), speakerDiscoKafkaBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.formatWithRegion(Stream.SPEAKER_DISCO, region)); } } private void createDiscoveryPipelines(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { List<String> kildaTopoDiscoTopics = new ArrayList<>(); for (String region : topologyConfig.getFloodlightRegions()) { kildaTopoDiscoTopics.add(Stream.formatWithRegion(topicsConfig.getTopoDiscoRegionTopic(), region)); } createKildaTopoDiscoSpout(builder, parallelism, kildaTopoDiscoTopics); createKildaTopoDiscoKafkaBolt(builder, parallelism, topicsConfig); createSpeakerDiscoSpout(builder, parallelism, topicsConfig.getSpeakerDiscoTopic()); createSpeakerDiscoKafkaBolt(builder, parallelism, topicsConfig); DiscoveryBolt discoveryBolt = new DiscoveryBolt( persistenceManager, topologyConfig.getFloodlightRegions(), topologyConfig.getFloodlightAliveTimeout(), topologyConfig.getFloodlightAliveInterval(), topologyConfig.getFloodlightDumpInterval()); builder.setBolt(ComponentType.KILDA_TOPO_DISCO_BOLT, discoveryBolt, parallelism) .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_KAFKA_SPOUT) .shuffleGrouping(ComponentType.SPEAKER_DISCO_KAFKA_SPOUT); } private void createStatsStatsRequestStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig) { KafkaSpout statsStatsRequestKafkaSpout = buildKafkaSpout(topicsConfig.getStatsStatsRequestPrivTopic(), ComponentType.STATS_STATS_REQUEST_KAFKA_SPOUT); builder.setSpout(ComponentType.STATS_STATS_REQUEST_KAFKA_SPOUT, statsStatsRequestKafkaSpout); for (String region: topologyConfig.getFloodlightRegions()) { KafkaBolt statsStatsRequestKafkaBolt = buildKafkaBolt( Stream.formatWithRegion(topicsConfig.getStatsStatsRequestPrivRegionTopic(), region)); builder.setBolt(Stream.formatWithRegion(ComponentType.STATS_STATS_REQUEST_KAFKA_BOLT, region), statsStatsRequestKafkaBolt, parallelism) .shuffleGrouping(ComponentType.STATS_STATS_REQUEST_BOLT, Stream.formatWithRegion(Stream.STATS_STATS_REQUEST_PRIV, region)); } BroadcastRequestBolt speakerRequestBolt = new BroadcastRequestBolt(Stream.STATS_STATS_REQUEST_PRIV, topologyConfig.getFloodlightRegions()); builder.setBolt(ComponentType.STATS_STATS_REQUEST_BOLT, speakerRequestBolt, parallelism) .shuffleGrouping(ComponentType.STATS_STATS_REQUEST_KAFKA_SPOUT); } private void createFlStatsSwitchesStream(TopologyBuilder builder, int parallelism, KafkaTopicsConfig topicsConfig, List<String> kildaFlStatsSwitchesTopics) { KafkaSpout flStatsSwitchesSpout = buildKafkaSpout(kildaFlStatsSwitchesTopics, ComponentType.FL_STATS_SWITCHES_SPOUT); builder.setSpout(ComponentType.FL_STATS_SWITCHES_SPOUT, flStatsSwitchesSpout); ReplyBolt replyBolt = new ReplyBolt(Stream.FL_STATS_SWITCHES); builder.setBolt(ComponentType.FL_STATS_SWITCHES_REPLY_BOLT, replyBolt, parallelism) .shuffleGrouping(ComponentType.FL_STATS_SWITCHES_SPOUT); KafkaBolt kildaFlStatsSwtichesKafkaBolt = buildKafkaBolt(topicsConfig.getFlStatsSwitchesPrivTopic()); builder.setBolt(ComponentType.FL_STATS_SWITCHES_KAFKA_BOLT, kildaFlStatsSwtichesKafkaBolt, parallelism) .shuffleGrouping(ComponentType.FL_STATS_SWITCHES_REPLY_BOLT, Stream.FL_STATS_SWITCHES); } @Override public StormTopology createTopology() { logger.info("Creating FlowTopology - {}", topologyName); TopologyBuilder builder = new TopologyBuilder(); Integer parallelism = topologyConfig.getParallelism(); KafkaTopicsConfig topicsConfig = topologyConfig.getKafkaTopics(); Set<String> regions = topologyConfig.getFloodlightRegions(); // Floodlight -- kilda.flow --> Router List<String> kildaFlowTopics = new ArrayList<>(); List<String> kildaFlowHsTopics = new ArrayList<>(); for (String region: regions) { kildaFlowTopics.add(Stream.formatWithRegion(topicsConfig.getFlowRegionTopic(), region)); kildaFlowHsTopics.add(Stream.formatWithRegion(topicsConfig.getFlowHsSpeakerRegionTopic(), region)); } createKildaFlowReplyStream(builder, parallelism, topicsConfig, kildaFlowTopics); createKildaFlowHsReplyStream(builder, parallelism, topicsConfig, kildaFlowHsTopics); // Floodlight -- kilda.ping --> Router List<String> kildaPingTopics = new ArrayList<>(); for (String region: regions) { kildaPingTopics.add(Stream.formatWithRegion(topicsConfig.getPingRegionTopic(), region)); } createKildaPingReplyStream(builder, parallelism, topicsConfig, kildaPingTopics); // Floodlight -- kilda.stats --> Router List<String> kildaStatsTopics = new ArrayList<>(); for (String region: regions) { kildaStatsTopics.add(Stream.formatWithRegion(topicsConfig.getStatsRegionTopic(), region)); } createKildaStatsReplyStream(builder, parallelism, topicsConfig, kildaStatsTopics); // Floodlight -- kilda.topo.isl.latency --> Router List<String> kildaIslLatencyTopics = new ArrayList<>(); for (String region: regions) { kildaIslLatencyTopics.add(Stream.formatWithRegion(topicsConfig.getTopoIslLatencyRegionTopic(), region)); } createKildaIslLatencyReplyStream(builder, parallelism, topicsConfig, kildaIslLatencyTopics); // Floodlight -- kilda.floodlight.connected.devices.priv --> Router List<String> kildaConnectedDevicesTopics = new ArrayList<>(); for (String region: regions) { kildaConnectedDevicesTopics.add(Stream.formatWithRegion( topicsConfig.getTopoConnectedDevicesRegionTopic(), region)); } createKildaConnectedDevicesReplyStream(builder, parallelism, topicsConfig, kildaConnectedDevicesTopics); // Floodlight -- kilda.topo.switch.manager --> Router List<String> kildaSwitchManagerTopics = new ArrayList<>(); for (String region: regions) { kildaSwitchManagerTopics.add( Stream.formatWithRegion(topicsConfig.getTopoSwitchManagerRegionTopic(), region)); } createKildaSwitchManagerReplyStream(builder, parallelism, topicsConfig, kildaSwitchManagerTopics); // Floodlight -- kilda.northbound --> Router List<String> kildaNorthboundTopics = new ArrayList<>(); for (String region: regions) { kildaNorthboundTopics.add(Stream.formatWithRegion(topicsConfig.getNorthboundRegionTopic(), region)); } createKildaNorthboundReplyStream(builder, parallelism, topicsConfig, kildaNorthboundTopics); // Floodlight -- kilda.topo.nb --> Router List<String> kildaNbWorkerTopics = new ArrayList<>(); for (String region: regions) { kildaNbWorkerTopics.add(Stream.formatWithRegion(topicsConfig.getTopoNbRegionTopic(), region)); } createKildaNbWorkerReplyStream(builder, parallelism, topicsConfig, kildaNbWorkerTopics); // Part3 Request to Floodlights // Storm -- kilda.speaker.flow --> Floodlight createSpeakerFlowRequestStream(builder, parallelism, topicsConfig); // Storm -- kilda.speaker.flow.ping --> Floodlight createSpeakerFlowPingRequestStream(builder, parallelism, topicsConfig); // Storm -- kilda.speaker --> Floodlight createSpeakerRequestStream(builder, parallelism, topicsConfig); // Storm -- kilda.speaker.disco --> Floodlight // Storm <-- kilda.topo.disco -- Floodlight createDiscoveryPipelines(builder, parallelism, topicsConfig); // Storm -- kilda.stats.stats-request.priv --> Floodlight createStatsStatsRequestStream(builder, parallelism, topicsConfig); // Storm <-- kilda.fl-stats.switches.priv -- Floodlight List<String> kildaFlStatsSwitchesTopics = regions.stream() .map(region -> Stream.formatWithRegion(topicsConfig.getFlStatsSwitchesPrivRegionTopic(), region)) .collect(Collectors.toList()); createFlStatsSwitchesStream(builder, parallelism, topicsConfig, kildaFlStatsSwitchesTopics); return builder.createTopology(); } /** * Topology entry point. */ public static void main(String[] args) { try { LaunchEnvironment env = new LaunchEnvironment(args); (new FloodlightRouterTopology(env)).setup(); } catch (Exception e) { System.exit(handleLaunchException(e)); } } }
FloodlightRouter parallelism tune
services/wfm/src/main/java/org/openkilda/wfm/topology/floodlightrouter/FloodlightRouterTopology.java
FloodlightRouter parallelism tune
<ide><path>ervices/wfm/src/main/java/org/openkilda/wfm/topology/floodlightrouter/FloodlightRouterTopology.java <ide> builder.setBolt(ComponentType.SPEAKER_FLOW_REQUEST_BOLT, speakerFlowRequestBolt, parallelism) <ide> .shuffleGrouping(ComponentType.SPEAKER_FLOW_KAFKA_SPOUT) <ide> .shuffleGrouping(ComponentType.SPEAKER_FLOW_HS_KAFKA_SPOUT) <del> .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); <add> .allGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); <ide> } <ide> <ide> private void createSpeakerFlowRequestSpoutAndKafkaBolt(TopologyBuilder builder, int parallelism, <ide> topologyConfig.getFloodlightRegions()); <ide> builder.setBolt(ComponentType.SPEAKER_PING_REQUEST_BOLT, speakerPingRequestBolt, parallelism) <ide> .shuffleGrouping(ComponentType.SPEAKER_PING_KAFKA_SPOUT) <del> .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); <add> .allGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); <ide> } <ide> <ide> private void createSpeakerRequestStream(TopologyBuilder builder, int parallelism, <ide> topologyConfig.getFloodlightRegions()); <ide> builder.setBolt(ComponentType.SPEAKER_REQUEST_BOLT, speakerRequestBolt, parallelism) <ide> .shuffleGrouping(ComponentType.SPEAKER_KAFKA_SPOUT) <del> .shuffleGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); <add> .allGrouping(ComponentType.KILDA_TOPO_DISCO_BOLT, Stream.REGION_NOTIFICATION); <ide> } <ide> <ide> <ide> logger.info("Creating FlowTopology - {}", topologyName); <ide> <ide> TopologyBuilder builder = new TopologyBuilder(); <del> Integer parallelism = topologyConfig.getParallelism(); <add> Integer newParallelism = topologyConfig.getNewParallelism(); <ide> KafkaTopicsConfig topicsConfig = topologyConfig.getKafkaTopics(); <ide> Set<String> regions = topologyConfig.getFloodlightRegions(); <ide> // Floodlight -- kilda.flow --> Router <ide> kildaFlowTopics.add(Stream.formatWithRegion(topicsConfig.getFlowRegionTopic(), region)); <ide> kildaFlowHsTopics.add(Stream.formatWithRegion(topicsConfig.getFlowHsSpeakerRegionTopic(), region)); <ide> } <del> createKildaFlowReplyStream(builder, parallelism, topicsConfig, kildaFlowTopics); <del> createKildaFlowHsReplyStream(builder, parallelism, topicsConfig, kildaFlowHsTopics); <add> createKildaFlowReplyStream(builder, newParallelism, topicsConfig, kildaFlowTopics); <add> createKildaFlowHsReplyStream(builder, newParallelism, topicsConfig, kildaFlowHsTopics); <ide> <ide> // Floodlight -- kilda.ping --> Router <ide> List<String> kildaPingTopics = new ArrayList<>(); <ide> for (String region: regions) { <ide> kildaPingTopics.add(Stream.formatWithRegion(topicsConfig.getPingRegionTopic(), region)); <ide> } <del> createKildaPingReplyStream(builder, parallelism, topicsConfig, kildaPingTopics); <add> createKildaPingReplyStream(builder, newParallelism, topicsConfig, kildaPingTopics); <ide> <ide> // Floodlight -- kilda.stats --> Router <ide> List<String> kildaStatsTopics = new ArrayList<>(); <ide> for (String region: regions) { <ide> kildaStatsTopics.add(Stream.formatWithRegion(topicsConfig.getStatsRegionTopic(), region)); <ide> } <del> createKildaStatsReplyStream(builder, parallelism, topicsConfig, kildaStatsTopics); <add> createKildaStatsReplyStream(builder, newParallelism, topicsConfig, kildaStatsTopics); <ide> <ide> // Floodlight -- kilda.topo.isl.latency --> Router <ide> List<String> kildaIslLatencyTopics = new ArrayList<>(); <ide> for (String region: regions) { <ide> kildaIslLatencyTopics.add(Stream.formatWithRegion(topicsConfig.getTopoIslLatencyRegionTopic(), region)); <ide> } <del> createKildaIslLatencyReplyStream(builder, parallelism, topicsConfig, kildaIslLatencyTopics); <add> createKildaIslLatencyReplyStream(builder, newParallelism, topicsConfig, kildaIslLatencyTopics); <ide> <ide> // Floodlight -- kilda.floodlight.connected.devices.priv --> Router <ide> List<String> kildaConnectedDevicesTopics = new ArrayList<>(); <ide> kildaConnectedDevicesTopics.add(Stream.formatWithRegion( <ide> topicsConfig.getTopoConnectedDevicesRegionTopic(), region)); <ide> } <del> createKildaConnectedDevicesReplyStream(builder, parallelism, topicsConfig, kildaConnectedDevicesTopics); <add> createKildaConnectedDevicesReplyStream(builder, newParallelism, topicsConfig, kildaConnectedDevicesTopics); <ide> <ide> // Floodlight -- kilda.topo.switch.manager --> Router <ide> List<String> kildaSwitchManagerTopics = new ArrayList<>(); <ide> kildaSwitchManagerTopics.add( <ide> Stream.formatWithRegion(topicsConfig.getTopoSwitchManagerRegionTopic(), region)); <ide> } <del> createKildaSwitchManagerReplyStream(builder, parallelism, topicsConfig, kildaSwitchManagerTopics); <add> createKildaSwitchManagerReplyStream(builder, newParallelism, topicsConfig, kildaSwitchManagerTopics); <ide> <ide> // Floodlight -- kilda.northbound --> Router <ide> List<String> kildaNorthboundTopics = new ArrayList<>(); <ide> for (String region: regions) { <ide> kildaNorthboundTopics.add(Stream.formatWithRegion(topicsConfig.getNorthboundRegionTopic(), region)); <ide> } <del> createKildaNorthboundReplyStream(builder, parallelism, topicsConfig, kildaNorthboundTopics); <add> createKildaNorthboundReplyStream(builder, newParallelism, topicsConfig, kildaNorthboundTopics); <ide> <ide> // Floodlight -- kilda.topo.nb --> Router <ide> List<String> kildaNbWorkerTopics = new ArrayList<>(); <ide> for (String region: regions) { <ide> kildaNbWorkerTopics.add(Stream.formatWithRegion(topicsConfig.getTopoNbRegionTopic(), region)); <ide> } <del> createKildaNbWorkerReplyStream(builder, parallelism, topicsConfig, kildaNbWorkerTopics); <add> createKildaNbWorkerReplyStream(builder, newParallelism, topicsConfig, kildaNbWorkerTopics); <ide> <ide> // Part3 Request to Floodlights <add> Integer parallelism = topologyConfig.getParallelism(); <ide> // Storm -- kilda.speaker.flow --> Floodlight <ide> createSpeakerFlowRequestStream(builder, parallelism, topicsConfig); <ide>
Java
unlicense
dce8e7bb8600f7b6371b1187ab8afb946f61d451
0
carlosefonseca/CEFCommon
package com.carlosefonseca.common.utils; import android.content.Context; import android.content.res.Configuration; import android.content.res.Resources; import android.graphics.*; import android.graphics.drawable.BitmapDrawable; import android.media.ExifInterface; import android.os.AsyncTask; import android.os.Build; import android.os.Environment; import android.support.v4.util.LruCache; import android.text.Layout; import android.text.StaticLayout; import android.text.TextPaint; import android.util.DisplayMetrics; import android.view.View; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.widget.ImageView; import bolts.Task; import com.carlosefonseca.common.CFApp; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.security.InvalidParameterException; import java.util.Arrays; import java.util.HashSet; import java.util.concurrent.Callable; import java.util.regex.Pattern; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH; import static com.carlosefonseca.common.utils.CodeUtils.getTag; import static com.carlosefonseca.common.utils.NetworkingUtils.getLastSegmentOfURL; import static com.carlosefonseca.common.utils.NetworkingUtils.getFileFromUrlOrPath; /** * Util methods for manipulating images. */ @SuppressWarnings("UnusedDeclaration") public final class ImageUtils { private static final String TAG = getTag(ImageUtils.class); private static float density = 1f; private static DisplayMetrics displayMetrics; private static int screenLayout; private static File cacheDir; static { Context c = CFApp.getContext(); try { cacheDir = c.getCacheDir(); displayMetrics = c.getResources().getDisplayMetrics(); density = c.getResources().getDisplayMetrics().density; screenLayout = c.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK; } catch (Exception e) { Log.w(TAG, "" + e.getMessage(), (Object[])null); } } private static HashSet<String> imagesOnAssets; private ImageUtils() {} /** * Given a {@code BitmapFactory.Options} of an image and the desired size for that image, calculates the adequate * InSampleSize value. */ static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) { // Raw height and width of image final int height = options.outHeight; final int width = options.outWidth; int inSampleSize = 1; if (height > reqHeight && width > reqWidth) { inSampleSize = (int) Math.min(Math.floor((float) height / (float) reqHeight), Math.floor((float) width / (float) reqWidth)); } return inSampleSize; } /* public static void setDensity(Activity activity) { Log.i(TAG, "Width dp: " + ((1.0 * displayMetrics.widthDP) / density) + " density: " + density); } */ public static float getDensity() { return density; } public static int dp2px(int dp) { return Math.round(dp * density); } public static int dp2px(double dp) { return (int) Math.round(dp * density); } public static DisplayMetrics getDisplayMetrics() { if (displayMetrics == null) { displayMetrics = CFApp.getContext().getResources().getDisplayMetrics(); // throw new IllegalStateException("setDensity not yet invoked. Display Metrics aren't yet available."); } return displayMetrics; } /** * @param c * @param path * @param reqWidth Pixels * @param reqHeight Pixels * @throws IOException */ public static Bitmap decodeSampledBitmapFromFileOnAssets(Context c, String path, int reqWidth, int reqHeight) throws IOException { InputStream stream = c.getAssets().open(path); // First decode with inJustDecodeBounds=true to check dimensions BitmapFactory.Options options = null; if (reqWidth > 0 || reqHeight > 0) { options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; options.inPurgeable = true; options.inInputShareable = true; BitmapFactory.decodeStream(stream, null, options); // Calculate inSampleSize options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; stream.reset(); // stream.close(); // stream = SharedObjects.getContext().getAssets().open(path); } Bitmap bitmap = BitmapFactory.decodeStream(stream, null, options); stream.close(); return bitmap; } @Nullable private static Bitmap getPhotoFromAssets(String path) { try { InputStream stream = CFApp.getContext().getAssets().open(path); Bitmap bitmap = BitmapFactory.decodeStream(stream); stream.close(); return bitmap; } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); return null; } } @Nullable public static Bitmap decodeSampledBitmapFromFile(File path, int reqWidth, int reqHeight) { if (path == null) return null; return decodeSampledBitmapFromFile(path.getAbsolutePath(), reqWidth, reqHeight); } public static Bitmap getCachedPhoto(@Nullable File file, int widthDp, int heightDp, @Nullable String sizeName) { return getCachedPhotoPx(file, ((int) (widthDp * density)), (int) (heightDp * density), sizeName); } /** * Tries to get an image from the cache folder. If not found, tries to get the original image, scale it and save it to the * cache asynchronously. * * @param file The path to the image file. * @param widthPx Minimum width in DP's. * @param heightPx Minimum height in DP's. * @param sizeName An optional name to use for size of the the cached image. * @return Scaled and rotated image. * @see #getPhotoFromFile */ @Nullable public static Bitmap getCachedPhotoPx(@Nullable File file, int widthPx, int heightPx, @Nullable String sizeName) { Bitmap bitmap = null; if (file == null) return null; String name = file.getName(); String cacheName; File cacheFile = null; if (widthPx > 0 && heightPx > 0) { if (sizeName == null) { cacheName = name.substring(0, name.length() - 4) + "-" + widthPx + "x" + heightPx + ".png"; } else { cacheName = name.substring(0, name.length() - 4) + sizeName + ".png"; } cacheFile = new File(cacheDir, cacheName); if (cacheFile.exists()) { bitmap = BitmapFactory.decodeFile(cacheFile.getAbsolutePath()); } } if (bitmap == null) { bitmap = getPhotoFromFileOrAssetsPx(file, widthPx, heightPx); if (bitmap == null) return null; if (cacheFile != null) new ImageWriter(cacheFile, bitmap).execute(); } return bitmap; } @Deprecated @Nullable private static Bitmap getPhotoFromFileOrAssets(File file, int widthDp, int heightDp) { final Bitmap bitmap = tryPhotoFromFileOrAssets(file, widthDp, heightDp); if (bitmap == null) Log.w(TAG, "IMAGE DOES NOT EXIST " + file); return bitmap; } @Nullable private static Bitmap getPhotoFromFileOrAssetsPx(File file, int width, int height) { final Bitmap bitmap = tryPhotoFromFileOrAssetsPx(file, width, height); if (bitmap == null) Log.w(TAG, "IMAGE DOES NOT EXIST " + file); return bitmap; } @Deprecated @Nullable static Bitmap tryPhotoFromFileOrAssets(@Nullable File file, int widthDp, int heightDp) { if (file == null) return null; Bitmap bitmap = null; if (file.exists()) { bitmap = getPhotoFromFile(file.getAbsolutePath(), widthDp, heightDp); // DP's } else if (getImagesOnAssets().contains(file.getName())) { bitmap = getPhotoFromAssets(file.getName(), widthDp, heightDp); // PIXELS } return bitmap; } @Nullable static Bitmap tryPhotoFromFileOrAssetsPx(@Nullable File file, int widthPx, int heightPx) { if (file == null) return null; Bitmap bitmap = null; if (file.exists()) { bitmap = getPhotoFromFilePx(file.getAbsolutePath(), widthPx, heightPx); } else if (getImagesOnAssets().contains(file.getName())) { bitmap = getPhotoFromAssets(file.getName(), widthPx, heightPx); } return bitmap; } /** * Gets the aspect ratio (h/w) of an image be it on the full path or on the assets folder. * @return The ratio (h/w) or 0 if there's a problem with the image. */ public static double getAspectRatio(File file) { final BitmapFactory.Options imageBounds = getImageBounds(file); if (imageBounds == null) return 0; return 1.0 * imageBounds.outHeight / imageBounds.outWidth; } @Nullable static BitmapFactory.Options getImageBounds(File file) { Bitmap bitmap = null; BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; options.inPurgeable = true; options.inInputShareable = true; if (file.exists()) { BitmapFactory.decodeFile(file.getAbsolutePath(), options); } else if (getImagesOnAssets().contains(file.getName())) { try { InputStream stream = CFApp.getContext().getAssets().open(file.getName()); BitmapFactory.decodeStream(stream, null, options); } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); return null; } } else { return null; } return options; } // return 1.0 * options.outHeight / options.outWidth; @Nullable public static Bitmap getPhotoFromFileOrAssets(File file) { return getPhotoFromFileOrAssets(file, -1, -1); } public static class BitmapCanvas { public final Bitmap bitmap; public final Canvas canvas; public BitmapCanvas(Bitmap bitmap, Canvas canvas) { this.bitmap = bitmap; this.canvas = canvas; } } public static ImageUtils.BitmapCanvas canvasFromBitmap(Bitmap bitmap) { final Bitmap bitmap1 = Bitmap.createBitmap(bitmap.getWidth(), bitmap.getHeight(), Bitmap.Config.ARGB_8888); final BitmapCanvas bitmapCanvas = new BitmapCanvas(bitmap1, new Canvas(bitmap1)); bitmapCanvas.canvas.drawBitmap(bitmap, 0, 0, new Paint(Paint.DITHER_FLAG)); return bitmapCanvas; } public static interface RunnableWithBitmap { public void run(Bitmap bmp); } public static void getCachedPhotoAsync(final File file, final int widthDp, final int heightDp, final RunnableWithBitmap runnable) { new AsyncTask<Void, Void, Bitmap>() { @Nullable @Override protected Bitmap doInBackground(Void... params) { return getCachedPhoto(file, widthDp, heightDp, null); } @Override protected void onPostExecute(Bitmap bmp) { runnable.run(bmp); } }.execute(); } @Nullable public static AsyncTask<Void, Void, Bitmap> getCachedPhotoAsync(final String filenameOrUrl, final int widthDp, final int heightDp, final RunnableWithBitmap runnable) { if (filenameOrUrl.startsWith("http://")) { return new AsyncTask<Void, Void, Bitmap>() { @Nullable @Override protected Bitmap doInBackground(Void... params) { File fullPath = ResourceUtils.getFullPath(getLastSegmentOfURL(filenameOrUrl)); Bitmap cachedPhoto = getCachedPhoto(fullPath, widthDp, heightDp, null); if (cachedPhoto != null) return cachedPhoto; try { Bitmap bitmap = NetworkingUtils.loadBitmap(filenameOrUrl); new ImageWriter(fullPath, bitmap); return bitmap; } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); } return null; } @Override protected void onPostExecute(Bitmap bmp) { runnable.run(bmp); } }.execute(); } else { File file = filenameOrUrl.startsWith("/") ? new File(filenameOrUrl) : ResourceUtils.getFullPath(filenameOrUrl); getCachedPhotoAsync(file, widthDp, heightDp, runnable); } return null; } @Nullable public static Bitmap getPhotoFromFileOrAssets(String filenameOrUrl) { if (filenameOrUrl == null) return null; return getPhotoFromFileOrAssets(getFileFromUrlOrPath(filenameOrUrl), -1, -1); } /** * Convenience method to convert filenames or URLs to an existing file on disk that will then be loaded with {@link * #getCachedPhoto(java.io.File, int, int, String)}. * <p/> * Accepted {@code filenameOrUrl} options: * <ul> * <li>http://example.com/image.png</li> * <li>/sdcard/somefolder/image.png</li> * <li>image.png</li> * </ul> * <p/> * This method doesn't resize * * @param filenameOrUrl File "reference". */ @Nullable public static Bitmap getCachedPhoto(String filenameOrUrl) { return getCachedPhoto(filenameOrUrl, -1, -1); } /** * Convenience method to convert filenames or URLs to an existing file on disk that will then be loaded with {@link * #getCachedPhoto(java.io.File, int, int, String)}. * <p/> * Accepted {@code filenameOrUrl} options: * <ul> * <li>http://example.com/image.png</li> * <li>/sdcard/somefolder/image.png</li> * <li>image.png</li> * </ul> * * @param filenameOrUrl File "reference". * @param widthDp Desired width. * @param heightDp Desired height. */ @Nullable public static Bitmap getCachedPhoto(final String filenameOrUrl, final int widthDp, final int heightDp) { if (filenameOrUrl == null) return null; return getCachedPhoto(getFileFromUrlOrPath(filenameOrUrl), widthDp, heightDp, null); } /** * Convenience method to convert filenames or URLs to an existing file on disk that will then be loaded with {@link * #getResizedIcon(java.io.File, int, int)}. * <p/> * Accepted {@code filenameOrUrl} options: * <ul> * <li>http://example.com/image.png</li> * <li>/sdcard/somefolder/image.png</li> * <li>image.png</li> * </ul> * * @param filenameOrUrl File "reference". * @param widthDp Desired width. * @param heightDp Desired height. */ @Nullable public static Bitmap getResizedIcon(final String filenameOrUrl, final int widthDp, final int heightDp) { if (filenameOrUrl == null) return null; return getResizedIcon(getFileFromUrlOrPath(filenameOrUrl), widthDp, heightDp); } /** * * @param name * @param width Pixels * @param height Pixels * @return */ @Nullable public static Bitmap getPhotoFromAssets(String name, int width, int height) { try { return decodeSampledBitmapFromFileOnAssets(CFApp.getContext(), name, width, height); } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); } return null; } /** * Tries to get an image from the cache folder. If not found, tries to get the original image, scale it and save it to the * cache asynchronously. * * @param file The path to the image file. * @param widthDp Minimum width in DP's. * @param heightDp Minimum height in DP's. * @return Scaled image. * @see #getPhotoFromFile */ @Nullable public static Bitmap getResizedIcon(@Nullable File file, final int widthDp, final int heightDp) { Bitmap bitmap = null; if (file == null) return null; String name = file.getName(); String cacheName; cacheName = name.substring(0, name.length() - 4) + "-" + widthDp + "x" + heightDp + ".png"; File cacheFile = new File(cacheDir, cacheName); if (cacheFile.exists()) { bitmap = BitmapFactory.decodeFile(cacheFile.getAbsolutePath()); } if (bitmap == null) { // cache doesn't exist Bitmap bitmap1 = null; if (file.exists()) { // Load full size from sd bitmap1 = BitmapFactory.decodeFile(file.getAbsolutePath()); } else if (getImagesOnAssets().contains(name)) { // Load full size from assets bitmap1 = getPhotoFromAssets(name); } if (bitmap1 != null) { if (widthDp != -1 || heightDp != -1) { // desired size int widthPx; int heightPx; if (bitmap1.getWidth() >= bitmap1.getHeight()) { widthPx = (int) (widthDp * density); heightPx = /*heightDp != -1 ? (int) (heightDp * density) :*/ widthPx * bitmap1.getHeight() / bitmap1.getWidth(); } else { heightPx = (int) (heightDp * density); widthPx = /*widthDp != -1 ? (int) (widthDp * density) : */heightPx * bitmap1.getWidth() / bitmap1.getHeight(); } if (widthPx == bitmap1.getWidth() && heightPx == bitmap1.getHeight()) { return bitmap1; } if (widthPx == 0 && heightPx == 0) { throw new IllegalArgumentException(String.format( "width(%d) and height(%d) must be > 0. Provided values: widthDp:%d heightDp:%d", widthPx, heightPx, widthDp, heightDp)); } // bitmap size != desired size bitmap = scaleAndCrop(bitmap1, widthPx, heightPx); bitmap1.recycle(); new ImageWriter(cacheFile, bitmap).execute(); } else { bitmap = bitmap1; } } else { Log.w(TAG, "IMAGE DOES NOT EXIST " + file); } } return bitmap; } public static Bitmap scaleAndCrop(Bitmap bitmap1, int widthPx, int heightPx) { Bitmap bitmap = Bitmap.createBitmap(widthPx, heightPx, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); Paint paint = new Paint(/*Paint.FILTER_BITMAP_FLAG*/); paint.setAntiAlias(true); canvas.drawBitmap(bitmap1, null, new Rect(0, 0, widthPx, heightPx), paint); return bitmap; } /** * Configuration.SCREENLAYOUT_SIZE_LARGE, Configuration.SCREENLAYOUT_SIZE_NORMAL... */ public static int getScreenLayout() { return screenLayout; } public static HashSet<String> getImagesOnAssets() { if (imagesOnAssets == null) { try { imagesOnAssets = new HashSet<String>(Arrays.asList(CFApp.getContext().getAssets().list(""))); } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); } } return imagesOnAssets; } /** * Saves an image to the cache, asynchronously. */ static class ImageWriter extends AsyncTask<Void, Void, Void> { private final File file; private final Bitmap bitmap; public ImageWriter(File file, Bitmap bitmap) { this.file = file; this.bitmap = bitmap; } @Override protected Void doInBackground(Void... voids) { try { FileOutputStream out = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.PNG, 0, out); } catch (Exception e) { Log.e(TAG, "Bitmap file:" + file, e); } return null; } } static void writeImageInBackground(final File file, final Bitmap bitmap) { Task.callInBackground(new Callable<Object>() { @Override public Object call() throws Exception { try { final String state = Environment.getExternalStorageState(); if (state.equals(Environment.MEDIA_MOUNTED)) { FileOutputStream out = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.PNG, 0, out); } else { Log.i("Can't write " + file + " on ExtStorage. " + state); } } catch (Exception e) { Log.e(TAG, "WriteImageInBackground Failed for " + file.getAbsolutePath(), e); } return null; } }); } /** * Obtains an image, scaled down to be at least the requested size and rotated according to the EXIF on the file.<br/> * It's aware of the device density. * * @param path The path to the image file. * @return Scaled and rotated image or null if no image was found. */ @SuppressWarnings("SuspiciousNameCombination") @Nullable public static Bitmap getPhotoFromFile(String path) { try { int orientation = getCameraPhotoOrientation(path); Bitmap bitmap = BitmapFactory.decodeFile(path, null); return rotate(bitmap, orientation); } catch (Exception e) { Log.e(TAG, "" + e.getMessage(), e); return null; } } /** * Obtains an image, scaled down to be at least the requested size and rotated according to the EXIF on the file.<br/> * It's aware of the device density. * * @param path The path to the image file. * @param width Desired width in DP's. * @param height Desired height in DP's. * @return Scaled and rotated image or null if no image was found. */ @SuppressWarnings("SuspiciousNameCombination") @Nullable public static Bitmap getPhotoFromFile(String path, int width, int height) { int orientation = getCameraPhotoOrientation(path); Bitmap bitmap; if (width > 0 || height > 0) { if (displayMetrics == null) { Log.w(TAG, "Device density not accurate. Please call setDensity() from an activity before this."); } if (orientation == 90 || orientation == 270) { int x = width; width = height; height = x; } width *= density; height *= density; bitmap = decodeSampledBitmapFromFile(path, width, height); } else { bitmap = BitmapFactory.decodeFile(path); } /* if (bitmap == null) { Log.e(TAG, "Image "+path+" not found."); return null; } float originalImgRatio = (float) (1.0 * bitmap.getWidth() / bitmap.getHeight()); float desiredSizeRatio = (float) (1.0 * width / height); int finalWidth; int finalHeight; if (originalImgRatio > desiredSizeRatio) { finalHeight = height; finalWidth = (int) (height * originalImgRatio); } else { finalWidth = width; finalHeight = (int) (finalWidth / originalImgRatio); } Log.i(TAG, "getPhoto " + path + " " + width + "x" + height + " -> " + finalWidth + "x" + finalHeight+" orientation: "+orientation); bitmap = Bitmap.createScaledBitmap(bitmap, finalWidth, finalHeight, true); */ bitmap = rotate(bitmap, orientation); return bitmap; } /** * Obtains an image, scaled down to be at least the requested size and rotated according to the EXIF on the file.<br/> * It's aware of the device density. * * @param path The path to the image file. * @param width Desired width in DP's. * @param height Desired height in DP's. * @return Scaled and rotated image or null if no image was found. */ @SuppressWarnings("SuspiciousNameCombination") @Nullable public static Bitmap getPhotoFromFilePx(String path, int width, int height) { int orientation = getCameraPhotoOrientation(path); Bitmap bitmap; if (width > 0 || height > 0) { if (orientation == 90 || orientation == 270) { int x = width; width = height; height = x; } bitmap = decodeSampledBitmapFromFile(path, width, height); } else { bitmap = BitmapFactory.decodeFile(path); } /* if (bitmap == null) { Log.e(TAG, "Image "+path+" not found."); return null; } float originalImgRatio = (float) (1.0 * bitmap.getWidth() / bitmap.getHeight()); float desiredSizeRatio = (float) (1.0 * width / height); int finalWidth; int finalHeight; if (originalImgRatio > desiredSizeRatio) { finalHeight = height; finalWidth = (int) (height * originalImgRatio); } else { finalWidth = width; finalHeight = (int) (finalWidth / originalImgRatio); } Log.i(TAG, "getPhoto " + path + " " + width + "x" + height + " -> " + finalWidth + "x" + finalHeight+" orientation: "+orientation); bitmap = Bitmap.createScaledBitmap(bitmap, finalWidth, finalHeight, true); */ bitmap = rotate(bitmap, orientation); return bitmap; } /** * Obtains a scaled down image from the file system. * The scaling uses the size parameters to calculate the inSampleSize and, therefore, it will have at least that size. * * @param path The path to the image file. * @param reqWidth Minimum width px. * @param reqHeight Minimum height px. * @return Scaled image. */ public static Bitmap decodeSampledBitmapFromFile(String path, int reqWidth, int reqHeight) { // First decode with inJustDecodeBounds=true to check dimensions final BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; options.inPurgeable = true; options.inInputShareable = true; BitmapFactory.decodeFile(path, options); // Calculate inSampleSize options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; return BitmapFactory.decodeFile(path, options); } /** * Creates a squared thumbnail image from a source image and saves it to disk. * Subsequent requests to the same image with the same size will return the cached image. * The thumbnail will be a center-cropped version of the original, scaled to the specified size and rotated according to the * EXIF. * * @param c A context (not the application one) * @param path The path to the image file. * @param side The side of the final image. * @return Image * @see #getSquareThumbnail(android.content.Context, String, int) Non-cached version. */ @Nullable @Deprecated public static Bitmap getCachedSquareThumbnail(Context c, String path, int side) { Bitmap bitmap; String name = new File(path).getName(); File cacheFile = new File(cacheDir, name.substring(0, name.length() - 4) + side + ".png"); if (!cacheFile.exists()) { try { bitmap = getSquareThumbnail(c, path, side); new ImageWriter(cacheFile, bitmap).execute(); } catch (Exception e) { Log.e(TAG, "Error generating thumbnail", e); return null; } } else { Log.v(TAG, "Loading from cache " + path); final BitmapFactory.Options options = new BitmapFactory.Options(); options.inPurgeable = true; options.inInputShareable = true; bitmap = BitmapFactory.decodeFile(cacheFile.getAbsolutePath(), options); } return bitmap; } /** * Creates a thumbnail image from a source image. * The thumbnail will be a center-cropped version of the original, scaled to the specified size and rotated according to the * EXIF. * * @param c A context (not the application one) * @param path The path to the image file. * @param side The side of the final image. * @see #getCachedSquareThumbnail(android.content.Context, String, int) Disk-cached version. */ private static Bitmap getSquareThumbnail(Context c, String path, int side) { Bitmap bitmap = decodeSampledBitmapFromFile(path, side, side); bitmap = cropSquare(bitmap); bitmap = Bitmap.createScaledBitmap(bitmap, side, side, true); bitmap = rotate(bitmap, path); return bitmap; } /** * Creates a square, cropped image from the center of the source image. * * @param bitmap Source bitmap. * @return New image. */ public static Bitmap cropSquare(Bitmap bitmap) { int originalWidth = bitmap.getWidth(); int originalHeight = bitmap.getHeight(); int x, y, side; if (originalWidth > originalHeight) { x = (originalWidth - originalHeight) / 2; y = 0; side = originalHeight; } else { x = 0; y = (originalHeight - originalWidth) / 2; side = originalWidth; } // Log.v("getThumbnail", "x:" + x + " y:" + y + " side:" + side); bitmap = Bitmap.createBitmap(bitmap, x, y, side, side); return bitmap; } public static Rect getCenterSquare(Bitmap bitmap) { int originalWidth = bitmap.getWidth(); int originalHeight = bitmap.getHeight(); int x, y, side; if (originalWidth > originalHeight) { x = (originalWidth - originalHeight) / 2; y = 0; side = originalHeight; } else { x = 0; y = (originalHeight - originalWidth) / 2; side = originalWidth; } return new Rect(x, y, x + side, y + side); } @Deprecated public static Bitmap getThumbnail(Bitmap bitmap, int side) { int iw = bitmap.getWidth(); int ih = bitmap.getHeight(); int x, y, w, h; if (iw > ih) { x = (iw - ih) / 2; y = 0; w = ih; h = ih; } else { x = 0; y = (ih - iw) / 2; h = iw; w = iw; } bitmap = Bitmap.createBitmap(bitmap, x, y, w, h); bitmap = Bitmap.createScaledBitmap(bitmap, side, side, true); Log.i("getThumbnail", "x:" + x + " y:" + y + " w:" + w + " h:" + h + " | scaled: " + bitmap.getWidth() + " x " + bitmap.getHeight()); return bitmap; } /** * Calculates the orientation of an image on disk based on the EXIF information. * * @param imagePath The path to the image. * @return Returns 0, 90, 180 or 270. */ public static int getCameraPhotoOrientation(String imagePath) { int rotate = 0; try { ExifInterface exif = new ExifInterface(imagePath); int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); switch (orientation) { case ExifInterface.ORIENTATION_ROTATE_270: rotate = 270; break; case ExifInterface.ORIENTATION_ROTATE_180: rotate = 180; break; case ExifInterface.ORIENTATION_ROTATE_90: rotate = 90; break; } // Log.v(TAG, "Exif orientation: " + orientation); } catch (Exception e) { Log.e(TAG, "Error getCameraPhotoOrientation", e); } return rotate; } /** * Creates a rotated copy of an image according to the EXIF information. * * @param bitmap The source image. * @param path The path on disk. * @return A rotated bitmap. */ public static Bitmap rotate(Bitmap bitmap, String path) { int orientation = getCameraPhotoOrientation(path); bitmap = rotate(bitmap, orientation); return bitmap; } /** * Rotates an image by a given number of degrees. * * @param bitmap The source bitmap. * @param orientation The amount of degrees to rotate. * @return Rotated bitmap. */ public static Bitmap rotate(Bitmap bitmap, int orientation) { if (orientation != 0) { Matrix matrix = new Matrix(); matrix.postRotate(orientation); // create a new bitmap from the original using the matrix to transform the result Bitmap bitmap1 = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); bitmap.recycle(); return bitmap1; } return bitmap; } /** * Creates a new bitmap from the original resource and paints the visible parts with the given color. * The alpha channel is the only part of the original resource that is used for the painting. * <p/> * Note: I tried saving a painted file on disk and loading that one each time. Got 1ms improvement but more used memory for * repeated calls. * * @param c A context. * @param resId The original image with alpha channel. * @param color The new Color * @return Bitmap with new color. */ public static Bitmap createRecoloredBitmap(Context c, int resId, int color) { BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inPreferredConfig = Bitmap.Config.ARGB_8888; Bitmap source = BitmapFactory.decodeResource(c.getResources(), resId); return createRecoloredBitmap(source, color); } /** * Wraps {@link #createRecoloredBitmap(android.content.Context, int, int)} in a {@link * android.graphics.drawable.BitmapDrawable}. * * @param context A context. * @param resId The original image with alpha channel. * @param color The new Color * @return Bitmap with new color. */ public static BitmapDrawable createRecoloredDrawable(Context context, int resId, int color) { return new BitmapDrawable(context.getResources(), createRecoloredBitmap(context, resId, color)); } /** * Wraps {@link #createRecoloredBitmap(android.graphics.Bitmap, int)} in a {@link * android.graphics.drawable.BitmapDrawable}. * * @param bitmap The original image with alpha channel. * @param color The new Color * @return Bitmap with new color. */ public static BitmapDrawable createRecoloredDrawable(Context context, Bitmap bitmap, int color) { return new BitmapDrawable(context.getResources(), createRecoloredBitmap(bitmap, color)); } /** * Creates a new bitmap from the original resource and paints the visible parts with the given color. * The alpha channel is the only part of the original resource that is used for the painting. * * @param source A context. * @param color The original image with alpha channel. * @return Bitmap with new color. */ public static Bitmap createRecoloredBitmap(Bitmap source, int color) { Bitmap mask = source.extractAlpha(); Bitmap targetBitmap = Bitmap.createBitmap(mask.getWidth(), mask.getHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(targetBitmap); Paint paint = new Paint(); paint.setColor(color); canvas.drawBitmap(mask, 0, 0, paint); return targetBitmap; } /** * Creates a new bitmap from the original resource and paints the visible parts with the given color. * The alpha channel is the only part of the original resource that is used for the painting. * <p/> * Note: I tried saving a painted file on disk and loading that one each time. Got 1ms improvement but more used memory for * repeated calls. * * @param c A context. * @param resId The original image with alpha channel. * @param color The new Color * @return Bitmap with new color. */ public static Bitmap createRecoloredBitmapMultiply(Context c, int resId, int color) { BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inPreferredConfig = Bitmap.Config.ARGB_8888; Bitmap source = BitmapFactory.decodeResource(c.getResources(), resId); return createRecoloredBitmapMultiply(source, color); } /** * Creates a new bitmap from the original resource and paints the visible parts with the given color. * The alpha channel is the only part of the original resource that is used for the painting. * * @param source A context. * @param color The original image with alpha channel. * @return Bitmap with new color. */ public static Bitmap createRecoloredBitmapMultiply(Bitmap source, int color) { final Bitmap blend = createRecoloredBitmap(source, color); Paint p = new Paint(); p.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.MULTIPLY)); p.setShader(new BitmapShader(blend, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP)); Canvas c = new Canvas(); final Bitmap targetBitmap = source.copy(Bitmap.Config.ARGB_8888, true); c.setBitmap(targetBitmap); c.drawBitmap(source, 0, 0, null); c.drawRect(0, 0, source.getWidth(), source.getHeight(), p); return targetBitmap; } /** * Draws text on the canvas. Tries to make the text as big as possible so it fills the canvas but not more. * <p/> * This method starts on size 1 and goes up. Maybe a binary search or something could be better, but it works for simple * stuff. * * @param canvas The canvas to draw the text on. * @param text The text. * @param textColor The color for the text. * @param typeface The typeface for the text. */ public static void placeTextFillingCanvas(Canvas canvas, String text, int textColor, Typeface typeface) { int canvasWidth = canvas.getWidth(); int canvasHeight = canvas.getHeight(); int text_size = 1; TextPaint paint = new TextPaint(); paint.setColor(textColor); paint.setTypeface(null); paint.setSubpixelText(true); paint.setAntiAlias(true); paint.setTypeface(typeface); StaticLayout sl1 = null; StaticLayout sl2; while (true) { paint.setTextSize(text_size); sl2 = new StaticLayout(text, paint, canvasWidth, Layout.Alignment.ALIGN_CENTER, 1, 0, false); text_size += 1; if (sl1 == null || sl2.getHeight() <= canvasHeight && sl2.getWidth() <= canvasWidth) { sl1 = sl2; } else { break; } } canvas.save(); canvas.translate(0, (float) (1.0 * (canvasHeight - sl1.getHeight()) / 2)); sl1.draw(canvas); canvas.restore(); } /** * Creates a bitmap with a circle of a certain size and color. * * @param iconSizePx The size of the bitmap/diameter of the circle, in pixels. * @param color The color of the bitmap. */ public static Bitmap getCircleBitmap(int iconSizePx, int color) { Bitmap bitmap = Bitmap.createBitmap(iconSizePx, iconSizePx, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); drawCircle(canvas, color, iconSizePx); return bitmap; } /** * Wraps {@link #getCircleBitmap(int, int)} in a {@link android.graphics.drawable.BitmapDrawable}. * * @param res A resources. * @param iconSizeDp The size of the bitmap/diameter of the circle, in DP. * @param color The color of the bitmap. */ public static BitmapDrawable getCircleBitmapDrawable(Resources res, int iconSizeDp, int color) { return new BitmapDrawable(res, getCircleBitmap(dp2px(iconSizeDp), color)); } /** * Draws a circle of a certain size and color in the top left of the given canvas. * * @param canvas The canvas to draw the circle on. * @param color The color for the circle. * @param iconSizePx The diameter of the circle, in pixels. */ public static void drawCircle(Canvas canvas, int color, int iconSizePx) {// DRAW COLORED CIRCLE Paint p = new Paint(); p.setAntiAlias(true); p.setColor(color); canvas.drawCircle(iconSizePx / 2f, iconSizePx / 2f, iconSizePx / 2f, p); } /** * Draws a bitmap on top of another, in the middle. Modifies the bottom bitmap. * * @param bottom The bitmap that will be bellow. * @param top The bitmap that will be on top. */ public static Bitmap drawOnMiddle(Bitmap bottom, Bitmap top) { if (bottom == null) throw new InvalidParameterException("Bottom is null!"); if (top == null) throw new InvalidParameterException("Top is null!"); if (!bottom.isMutable()) { bottom = bottom.copy(Bitmap.Config.ARGB_8888, true); assert bottom != null; } final Canvas canvas = new Canvas(bottom); canvas.drawBitmap(top, (bottom.getWidth() - top.getWidth()) / 2f, (bottom.getHeight() - top.getHeight()) / 2f, null); return bottom; } /** * Asynchronously obtains a bitmap from disk and places it on an ImageView with a quick fade-in effect. * <p/> * You must use an {@link android.support.v4.util.LruCache} that implements {@link android.support.v4.util.LruCache#create(Object)} * so it will obtain * the bitmap. * * @param file A reference to the file that will be passed to the LruCache. * @param view The view that will receive the bitmap. * @param cache The LruCache implementation (see {@link com.carlosefonseca.common.utils.ImageUtils.SizedImageCache} for * an * example). * @param placeholder A placeholder that will be set in case there is no bitmap to place. */ @Nullable public static AsyncTask<Void, Void, Bitmap> setImageAsyncFadeIn(final String file, final ImageView view, final LruCache<String, Bitmap> cache, final int placeholder) { return setImageAsyncFadeIn(file, view, cache, placeholder, false); } /** * Asynchronously obtains a bitmap from disk and places it on an ImageView with a quick fade-in effect. * <p/> * You must use an {@link LruCache} that implements {@link android.support.v4.util.LruCache#create(Object)} so it will obtain * the bitmap. * * @param file A reference to the file that will be passed to the LruCache. * @param view The view that will receive the bitmap. * @param cache The LruCache implementation (see {@link com.carlosefonseca.common.utils.ImageUtils.SizedImageCache} * for * an * example). * @param placeholder A placeholder that will be set in case there is no bitmap to place. */ @Nullable public static AsyncTask<Void, Void, Bitmap> setImageAsyncFadeIn(final String file, final View view, final LruCache<String, Bitmap> cache, final int placeholder, final boolean toBackground) { if (file != null && file.equals(view.getTag())) { return null; } view.setVisibility(View.INVISIBLE); return new AsyncTask<Void, Void, Bitmap>() { @Nullable @Override protected Bitmap doInBackground(Void... params) { return file != null ? cache.get(file) : null; } @Override protected void onPostExecute(Bitmap bmp) { if (bmp != null) { if (toBackground) { ResourceUtils.setBackground(view, new BitmapDrawable(view.getResources(), bmp)); } else { ((ImageView) view).setImageBitmap(bmp); } view.setTag(file); } else { if (toBackground) { view.setBackgroundResource(placeholder); } else { ((ImageView) view).setImageResource(placeholder); } } if (SDK_INT < ICE_CREAM_SANDWICH) { view.setVisibility(View.VISIBLE); return; } AlphaAnimation alphaAnimation = new AlphaAnimation(0, 1); alphaAnimation.setDuration(100); Animation.AnimationListener listener = new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { view.setVisibility(View.VISIBLE); } @Override public void onAnimationRepeat(Animation animation) { } }; alphaAnimation.setAnimationListener(listener); view.startAnimation(alphaAnimation); } }.execute(); } /** * Image cache that loads resized photos from disk. Specify the desired size of the images in the constructor and perform * {@link #get(Object)} with a filename or a */ public static class SizedImageCache extends LruCache<String, Bitmap> { protected final int width; protected final int height; public SizedImageCache(int maxSize, int widthDP, int heightDP) { super(maxSize); this.width = widthDP; this.height = heightDP; } public SizedImageCache(int maxSize, int imageSizeDP) { this(maxSize, imageSizeDP, imageSizeDP); } @Nullable @Override protected Bitmap create(String key) { return getCachedPhoto(key, width, height); } } /** * Note: In this example, one eighth of the application memory is allocated for our cache. On a normal/hdpi device this is a * minimum of around 4MB (32/8). A full screen GridView filled with images on a device with 800x480 resolution would use * around 1.5MB (800*480*4 bytes), so this would cache a minimum of around 2.5 pages of images in memory. */ public static class BitmapCache extends LruCache<String, Bitmap> { public BitmapCache() { // Get max available VM memory, exceeding this amount will throw an // OutOfMemory exception. Stored in kilobytes as LruCache takes an // int in its constructor. // Use 1/4th of the available memory for this memory cache. super((int) (Runtime.getRuntime().maxMemory() / 4)); } @Override protected int sizeOf(String key, Bitmap bitmap) { // The cache size will be measured in kilobytes rather than // number of items. return sizeBitmap(bitmap); } public static int sizeBitmap(Bitmap bitmap) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { return bitmap.getAllocationByteCount(); } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) { return bitmap.getByteCount(); } else { return bitmap.getRowBytes() * bitmap.getHeight(); } } @Override protected void entryRemoved(boolean evicted, String key, Bitmap oldValue, Bitmap newValue) { oldValue.recycle(); super.entryRemoved(evicted, key, oldValue, newValue); } } /** * Redraws the bitmap in a new size. Scales the bitmap keeping the aspect ratio. */ public static Bitmap resizeBitmapCanvas(Bitmap bitmap, int desiredWidth, int desiredHeight) { if (bitmap.getWidth() == desiredWidth && bitmap.getHeight() == desiredHeight) return bitmap; float r = 1f * bitmap.getWidth() / bitmap.getHeight(); int newBitmapWidth; int newBitmapHeight = (int) (desiredWidth * 1f / r); if (newBitmapHeight > desiredHeight) { newBitmapHeight = desiredHeight; newBitmapWidth = (int) (desiredHeight * 1f * r); } else { newBitmapWidth = desiredWidth; } Bitmap newBitmap = Bitmap.createBitmap(desiredWidth, desiredHeight, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(newBitmap); Paint paint = new Paint(/*Paint.FILTER_BITMAP_FLAG*/); paint.setAntiAlias(true); int hMargin = (desiredWidth - newBitmapWidth) / 2; int vMargin = (desiredHeight - newBitmapHeight) / 2; Rect dst = new Rect(hMargin, vMargin, hMargin + newBitmapWidth, vMargin + bitmap.getHeight()); canvas.drawBitmap(bitmap, null, dst, paint); return newBitmap; } static boolean isImage(String path) { return path != null && Pattern.compile(".+\\.(jpe?g|png|gif)", Pattern.CASE_INSENSITIVE).matcher(path).matches(); } }
src/main/java/com/carlosefonseca/common/utils/ImageUtils.java
package com.carlosefonseca.common.utils; import android.content.Context; import android.content.res.Configuration; import android.content.res.Resources; import android.graphics.*; import android.graphics.drawable.BitmapDrawable; import android.media.ExifInterface; import android.os.AsyncTask; import android.os.Build; import android.os.Environment; import android.support.v4.util.LruCache; import android.text.Layout; import android.text.StaticLayout; import android.text.TextPaint; import android.util.DisplayMetrics; import android.view.View; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.widget.ImageView; import bolts.Task; import com.carlosefonseca.common.CFApp; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.security.InvalidParameterException; import java.util.Arrays; import java.util.HashSet; import java.util.concurrent.Callable; import java.util.regex.Pattern; import static android.os.Build.VERSION.SDK_INT; import static android.os.Build.VERSION_CODES.ICE_CREAM_SANDWICH; import static com.carlosefonseca.common.utils.CodeUtils.getTag; import static com.carlosefonseca.common.utils.NetworkingUtils.getLastSegmentOfURL; import static com.carlosefonseca.common.utils.NetworkingUtils.getFileFromUrlOrPath; /** * Util methods for manipulating images. */ @SuppressWarnings("UnusedDeclaration") public final class ImageUtils { private static final String TAG = getTag(ImageUtils.class); private static float density = 1f; private static DisplayMetrics displayMetrics; private static int screenLayout; private static File cacheDir; static { Context c = CFApp.getContext(); try { cacheDir = c.getCacheDir(); displayMetrics = c.getResources().getDisplayMetrics(); density = c.getResources().getDisplayMetrics().density; screenLayout = c.getResources().getConfiguration().screenLayout & Configuration.SCREENLAYOUT_SIZE_MASK; } catch (Exception e) { Log.w(TAG, "" + e.getMessage(), (Object[])null); } } private static HashSet<String> imagesOnAssets; private ImageUtils() {} /** * Given a {@code BitmapFactory.Options} of an image and the desired size for that image, calculates the adequate * InSampleSize value. */ static int calculateInSampleSize(BitmapFactory.Options options, int reqWidth, int reqHeight) { // Raw height and width of image final int height = options.outHeight; final int width = options.outWidth; int inSampleSize = 1; if (height > reqHeight && width > reqWidth) { inSampleSize = (int) Math.min(Math.floor((float) height / (float) reqHeight), Math.floor((float) width / (float) reqWidth)); } return inSampleSize; } /* public static void setDensity(Activity activity) { Log.i(TAG, "Width dp: " + ((1.0 * displayMetrics.widthDP) / density) + " density: " + density); } */ public static float getDensity() { return density; } public static int dp2px(int dp) { return Math.round(dp * density); } public static int dp2px(double dp) { return (int) Math.round(dp * density); } public static DisplayMetrics getDisplayMetrics() { if (displayMetrics == null) { displayMetrics = CFApp.getContext().getResources().getDisplayMetrics(); // throw new IllegalStateException("setDensity not yet invoked. Display Metrics aren't yet available."); } return displayMetrics; } /** * @param c * @param path * @param reqWidth Pixels * @param reqHeight Pixels * @throws IOException */ public static Bitmap decodeSampledBitmapFromFileOnAssets(Context c, String path, int reqWidth, int reqHeight) throws IOException { InputStream stream = c.getAssets().open(path); // First decode with inJustDecodeBounds=true to check dimensions BitmapFactory.Options options = null; if (reqWidth > 0 || reqHeight > 0) { options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; options.inPurgeable = true; options.inInputShareable = true; BitmapFactory.decodeStream(stream, null, options); // Calculate inSampleSize options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; stream.reset(); // stream.close(); // stream = SharedObjects.getContext().getAssets().open(path); } Bitmap bitmap = BitmapFactory.decodeStream(stream, null, options); stream.close(); return bitmap; } @Nullable private static Bitmap getPhotoFromAssets(String path) { try { InputStream stream = CFApp.getContext().getAssets().open(path); Bitmap bitmap = BitmapFactory.decodeStream(stream); stream.close(); return bitmap; } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); return null; } } @Nullable public static Bitmap decodeSampledBitmapFromFile(File path, int reqWidth, int reqHeight) { if (path == null) return null; return decodeSampledBitmapFromFile(path.getAbsolutePath(), reqWidth, reqHeight); } public static Bitmap getCachedPhoto(@Nullable File file, int widthDp, int heightDp, @Nullable String sizeName) { return getCachedPhotoPx(file, ((int) (widthDp * density)), (int) (heightDp * density), sizeName); } /** * Tries to get an image from the cache folder. If not found, tries to get the original image, scale it and save it to the * cache asynchronously. * * @param file The path to the image file. * @param widthPx Minimum width in DP's. * @param heightPx Minimum height in DP's. * @param sizeName An optional name to use for size of the the cached image. * @return Scaled and rotated image. * @see #getPhotoFromFile */ @Nullable public static Bitmap getCachedPhotoPx(@Nullable File file, int widthPx, int heightPx, @Nullable String sizeName) { Bitmap bitmap = null; if (file == null) return null; String name = file.getName(); String cacheName; File cacheFile = null; if (widthPx > 0 && heightPx > 0) { if (sizeName == null) { cacheName = name.substring(0, name.length() - 4) + "-" + widthPx + "x" + heightPx + ".png"; } else { cacheName = name.substring(0, name.length() - 4) + sizeName + ".png"; } cacheFile = new File(cacheDir, cacheName); if (cacheFile.exists()) { bitmap = BitmapFactory.decodeFile(cacheFile.getAbsolutePath()); } } if (bitmap == null) { bitmap = getPhotoFromFileOrAssetsPx(file, widthPx, heightPx); if (bitmap == null) return null; if (cacheFile != null) new ImageWriter(cacheFile, bitmap).execute(); } return bitmap; } @Deprecated @Nullable private static Bitmap getPhotoFromFileOrAssets(File file, int widthDp, int heightDp) { final Bitmap bitmap = tryPhotoFromFileOrAssets(file, widthDp, heightDp); if (bitmap == null) Log.w(TAG, "IMAGE DOES NOT EXIST " + file); return bitmap; } @Nullable private static Bitmap getPhotoFromFileOrAssetsPx(File file, int width, int height) { final Bitmap bitmap = tryPhotoFromFileOrAssetsPx(file, width, height); if (bitmap == null) Log.w(TAG, "IMAGE DOES NOT EXIST " + file); return bitmap; } @Deprecated @Nullable static Bitmap tryPhotoFromFileOrAssets(@Nullable File file, int widthDp, int heightDp) { if (file == null) return null; Bitmap bitmap = null; if (file.exists()) { bitmap = getPhotoFromFile(file.getAbsolutePath(), widthDp, heightDp); // DP's } else if (getImagesOnAssets().contains(file.getName())) { bitmap = getPhotoFromAssets(file.getName(), widthDp, heightDp); // PIXELS } return bitmap; } @Nullable static Bitmap tryPhotoFromFileOrAssetsPx(@Nullable File file, int widthPx, int heightPx) { if (file == null) return null; Bitmap bitmap = null; if (file.exists()) { bitmap = getPhotoFromFilePx(file.getAbsolutePath(), widthPx, heightPx); } else if (getImagesOnAssets().contains(file.getName())) { bitmap = getPhotoFromAssets(file.getName(), widthPx, heightPx); } return bitmap; } /** * Gets the aspect ratio (h/w) of an image be it on the full path or on the assets folder. * @return The ratio (h/w) or 0 if there's a problem with the image. */ public static double getAspectRatio(File file) { final BitmapFactory.Options imageBounds = getImageBounds(file); if (imageBounds == null) return 0; return 1.0 * imageBounds.outHeight / imageBounds.outWidth; } @Nullable static BitmapFactory.Options getImageBounds(File file) { Bitmap bitmap = null; BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; options.inPurgeable = true; options.inInputShareable = true; if (file.exists()) { BitmapFactory.decodeFile(file.getAbsolutePath(), options); } else if (getImagesOnAssets().contains(file.getName())) { try { InputStream stream = CFApp.getContext().getAssets().open(file.getName()); BitmapFactory.decodeStream(stream, null, options); } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); return null; } } else { return null; } return options; } // return 1.0 * options.outHeight / options.outWidth; @Nullable public static Bitmap getPhotoFromFileOrAssets(File file) { return getPhotoFromFileOrAssets(file, -1, -1); } public static class BitmapCanvas { public final Bitmap bitmap; public final Canvas canvas; public BitmapCanvas(Bitmap bitmap, Canvas canvas) { this.bitmap = bitmap; this.canvas = canvas; } } public static ImageUtils.BitmapCanvas canvasFromBitmap(Bitmap bitmap) { final Bitmap bitmap1 = Bitmap.createBitmap(bitmap.getWidth(), bitmap.getHeight(), Bitmap.Config.ARGB_8888); final BitmapCanvas bitmapCanvas = new BitmapCanvas(bitmap1, new Canvas(bitmap1)); bitmapCanvas.canvas.drawBitmap(bitmap, 0, 0, new Paint(Paint.DITHER_FLAG)); return bitmapCanvas; } public static interface RunnableWithBitmap { public void run(Bitmap bmp); } public static void getCachedPhotoAsync(final File file, final int widthDp, final int heightDp, final RunnableWithBitmap runnable) { new AsyncTask<Void, Void, Bitmap>() { @Nullable @Override protected Bitmap doInBackground(Void... params) { return getCachedPhoto(file, widthDp, heightDp, null); } @Override protected void onPostExecute(Bitmap bmp) { runnable.run(bmp); } }.execute(); } @Nullable public static AsyncTask<Void, Void, Bitmap> getCachedPhotoAsync(final String filenameOrUrl, final int widthDp, final int heightDp, final RunnableWithBitmap runnable) { if (filenameOrUrl.startsWith("http://")) { return new AsyncTask<Void, Void, Bitmap>() { @Nullable @Override protected Bitmap doInBackground(Void... params) { File fullPath = ResourceUtils.getFullPath(getLastSegmentOfURL(filenameOrUrl)); Bitmap cachedPhoto = getCachedPhoto(fullPath, widthDp, heightDp, null); if (cachedPhoto != null) return cachedPhoto; try { Bitmap bitmap = NetworkingUtils.loadBitmap(filenameOrUrl); new ImageWriter(fullPath, bitmap); return bitmap; } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); } return null; } @Override protected void onPostExecute(Bitmap bmp) { runnable.run(bmp); } }.execute(); } else { File file = filenameOrUrl.startsWith("/") ? new File(filenameOrUrl) : ResourceUtils.getFullPath(filenameOrUrl); getCachedPhotoAsync(file, widthDp, heightDp, runnable); } return null; } @Nullable public static Bitmap getPhotoFromFileOrAssets(String filenameOrUrl) { if (filenameOrUrl == null) return null; return getPhotoFromFileOrAssets(getFileFromUrlOrPath(filenameOrUrl), -1, -1); } /** * Convenience method to convert filenames or URLs to an existing file on disk that will then be loaded with {@link * #getCachedPhoto(java.io.File, int, int, String)}. * <p/> * Accepted {@code filenameOrUrl} options: * <ul> * <li>http://example.com/image.png</li> * <li>/sdcard/somefolder/image.png</li> * <li>image.png</li> * </ul> * <p/> * This method doesn't resize * * @param filenameOrUrl File "reference". */ @Nullable public static Bitmap getCachedPhoto(String filenameOrUrl) { return getCachedPhoto(filenameOrUrl, -1, -1); } /** * Convenience method to convert filenames or URLs to an existing file on disk that will then be loaded with {@link * #getCachedPhoto(java.io.File, int, int, String)}. * <p/> * Accepted {@code filenameOrUrl} options: * <ul> * <li>http://example.com/image.png</li> * <li>/sdcard/somefolder/image.png</li> * <li>image.png</li> * </ul> * * @param filenameOrUrl File "reference". * @param widthDp Desired width. * @param heightDp Desired height. */ @Nullable public static Bitmap getCachedPhoto(final String filenameOrUrl, final int widthDp, final int heightDp) { if (filenameOrUrl == null) return null; return getCachedPhoto(getFileFromUrlOrPath(filenameOrUrl), widthDp, heightDp, null); } /** * Convenience method to convert filenames or URLs to an existing file on disk that will then be loaded with {@link * #getResizedIcon(java.io.File, int, int)}. * <p/> * Accepted {@code filenameOrUrl} options: * <ul> * <li>http://example.com/image.png</li> * <li>/sdcard/somefolder/image.png</li> * <li>image.png</li> * </ul> * * @param filenameOrUrl File "reference". * @param widthDp Desired width. * @param heightDp Desired height. */ @Nullable public static Bitmap getResizedIcon(final String filenameOrUrl, final int widthDp, final int heightDp) { if (filenameOrUrl == null) return null; return getResizedIcon(getFileFromUrlOrPath(filenameOrUrl), widthDp, heightDp); } /** * * @param name * @param width Pixels * @param height Pixels * @return */ @Nullable public static Bitmap getPhotoFromAssets(String name, int width, int height) { try { return decodeSampledBitmapFromFileOnAssets(CFApp.getContext(), name, width, height); } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); } return null; } /** * Tries to get an image from the cache folder. If not found, tries to get the original image, scale it and save it to the * cache asynchronously. * * @param file The path to the image file. * @param widthDp Minimum width in DP's. * @param heightDp Minimum height in DP's. * @return Scaled image. * @see #getPhotoFromFile */ @Nullable public static Bitmap getResizedIcon(@Nullable File file, final int widthDp, final int heightDp) { Bitmap bitmap = null; if (file == null) return null; String name = file.getName(); String cacheName; cacheName = name.substring(0, name.length() - 4) + "-" + widthDp + "x" + heightDp + ".png"; File cacheFile = new File(cacheDir, cacheName); if (cacheFile.exists()) { bitmap = BitmapFactory.decodeFile(cacheFile.getAbsolutePath()); } if (bitmap == null) { // cache doesn't exist Bitmap bitmap1 = null; if (file.exists()) { // Load full size from sd bitmap1 = BitmapFactory.decodeFile(file.getAbsolutePath()); } else if (getImagesOnAssets().contains(name)) { // Load full size from assets bitmap1 = getPhotoFromAssets(name); } if (bitmap1 != null) { if (widthDp != -1 || heightDp != -1) { // desired size int widthPx; int heightPx; if (bitmap1.getWidth() >= bitmap1.getHeight()) { widthPx = (int) (widthDp * density); heightPx = /*heightDp != -1 ? (int) (heightDp * density) :*/ widthPx * bitmap1.getHeight() / bitmap1.getWidth(); } else { heightPx = (int) (heightDp * density); widthPx = /*widthDp != -1 ? (int) (widthDp * density) : */heightPx * bitmap1.getWidth() / bitmap1.getHeight(); } if (widthPx == bitmap1.getWidth() && heightPx == bitmap1.getHeight()) { return bitmap1; } if (widthPx == 0 && heightPx == 0) { throw new IllegalArgumentException(String.format( "width(%d) and height(%d) must be > 0. Provided values: widthDp:%d heightDp:%d", widthPx, heightPx, widthDp, heightDp)); } // bitmap size != desired size bitmap = scaleAndCrop(bitmap1, widthPx, heightPx); bitmap1.recycle(); new ImageWriter(cacheFile, bitmap).execute(); } else { bitmap = bitmap1; } } else { Log.w(TAG, "IMAGE DOES NOT EXIST " + file); } } return bitmap; } public static Bitmap scaleAndCrop(Bitmap bitmap1, int widthPx, int heightPx) { Bitmap bitmap = Bitmap.createBitmap(widthPx, heightPx, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); Paint paint = new Paint(/*Paint.FILTER_BITMAP_FLAG*/); paint.setAntiAlias(true); canvas.drawBitmap(bitmap1, null, new Rect(0, 0, widthPx, heightPx), paint); return bitmap; } /** * Configuration.SCREENLAYOUT_SIZE_LARGE, Configuration.SCREENLAYOUT_SIZE_NORMAL... */ public static int getScreenLayout() { return screenLayout; } public static HashSet<String> getImagesOnAssets() { if (imagesOnAssets == null) { try { imagesOnAssets = new HashSet<String>(Arrays.asList(CFApp.getContext().getAssets().list(""))); } catch (IOException e) { Log.e(TAG, "" + e.getMessage(), e); } } return imagesOnAssets; } /** * Saves an image to the cache, asynchronously. */ static class ImageWriter extends AsyncTask<Void, Void, Void> { private final File file; private final Bitmap bitmap; public ImageWriter(File file, Bitmap bitmap) { this.file = file; this.bitmap = bitmap; } @Override protected Void doInBackground(Void... voids) { try { FileOutputStream out = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.PNG, 0, out); } catch (Exception e) { Log.e(TAG, "Bitmap file:" + file, e); } return null; } } static void writeImageInBackground(final File file, final Bitmap bitmap) { Task.callInBackground(new Callable<Object>() { @Override public Object call() throws Exception { try { final String state = Environment.getExternalStorageState(); if (state.equals(Environment.MEDIA_MOUNTED)) { FileOutputStream out = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.PNG, 0, out); } else { Log.i("Can't write " + file + " on ExtStorage. " + state); } } catch (Exception e) { Log.e(TAG, "WriteImageInBackground Failed for " + file.getAbsolutePath(), e); } return null; } }); } /** * Obtains an image, scaled down to be at least the requested size and rotated according to the EXIF on the file.<br/> * It's aware of the device density. * * @param path The path to the image file. * @return Scaled and rotated image or null if no image was found. */ @SuppressWarnings("SuspiciousNameCombination") @Nullable public static Bitmap getPhotoFromFile(String path) { try { int orientation = getCameraPhotoOrientation(path); Bitmap bitmap = BitmapFactory.decodeFile(path, null); return rotate(bitmap, orientation); } catch (Exception e) { Log.e(TAG, "" + e.getMessage(), e); return null; } } /** * Obtains an image, scaled down to be at least the requested size and rotated according to the EXIF on the file.<br/> * It's aware of the device density. * * @param path The path to the image file. * @param width Desired width in DP's. * @param height Desired height in DP's. * @return Scaled and rotated image or null if no image was found. */ @SuppressWarnings("SuspiciousNameCombination") @Nullable public static Bitmap getPhotoFromFile(String path, int width, int height) { int orientation = getCameraPhotoOrientation(path); Bitmap bitmap; if (width > 0 || height > 0) { if (displayMetrics == null) { Log.w(TAG, "Device density not accurate. Please call setDensity() from an activity before this."); } if (orientation == 90 || orientation == 270) { int x = width; width = height; height = x; } width *= density; height *= density; bitmap = decodeSampledBitmapFromFile(path, width, height); } else { bitmap = BitmapFactory.decodeFile(path); } /* if (bitmap == null) { Log.e(TAG, "Image "+path+" not found."); return null; } float originalImgRatio = (float) (1.0 * bitmap.getWidth() / bitmap.getHeight()); float desiredSizeRatio = (float) (1.0 * width / height); int finalWidth; int finalHeight; if (originalImgRatio > desiredSizeRatio) { finalHeight = height; finalWidth = (int) (height * originalImgRatio); } else { finalWidth = width; finalHeight = (int) (finalWidth / originalImgRatio); } Log.i(TAG, "getPhoto " + path + " " + width + "x" + height + " -> " + finalWidth + "x" + finalHeight+" orientation: "+orientation); bitmap = Bitmap.createScaledBitmap(bitmap, finalWidth, finalHeight, true); */ bitmap = rotate(bitmap, orientation); return bitmap; } /** * Obtains an image, scaled down to be at least the requested size and rotated according to the EXIF on the file.<br/> * It's aware of the device density. * * @param path The path to the image file. * @param width Desired width in DP's. * @param height Desired height in DP's. * @return Scaled and rotated image or null if no image was found. */ @SuppressWarnings("SuspiciousNameCombination") @Nullable public static Bitmap getPhotoFromFilePx(String path, int width, int height) { int orientation = getCameraPhotoOrientation(path); Bitmap bitmap; if (width > 0 || height > 0) { if (orientation == 90 || orientation == 270) { int x = width; width = height; height = x; } bitmap = decodeSampledBitmapFromFile(path, width, height); } else { bitmap = BitmapFactory.decodeFile(path); } /* if (bitmap == null) { Log.e(TAG, "Image "+path+" not found."); return null; } float originalImgRatio = (float) (1.0 * bitmap.getWidth() / bitmap.getHeight()); float desiredSizeRatio = (float) (1.0 * width / height); int finalWidth; int finalHeight; if (originalImgRatio > desiredSizeRatio) { finalHeight = height; finalWidth = (int) (height * originalImgRatio); } else { finalWidth = width; finalHeight = (int) (finalWidth / originalImgRatio); } Log.i(TAG, "getPhoto " + path + " " + width + "x" + height + " -> " + finalWidth + "x" + finalHeight+" orientation: "+orientation); bitmap = Bitmap.createScaledBitmap(bitmap, finalWidth, finalHeight, true); */ bitmap = rotate(bitmap, orientation); return bitmap; } /** * Obtains a scaled down image from the file system. * The scaling uses the size parameters to calculate the inSampleSize and, therefore, it will have at least that size. * * @param path The path to the image file. * @param reqWidth Minimum width px. * @param reqHeight Minimum height px. * @return Scaled image. */ public static Bitmap decodeSampledBitmapFromFile(String path, int reqWidth, int reqHeight) { // First decode with inJustDecodeBounds=true to check dimensions final BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; options.inPurgeable = true; options.inInputShareable = true; BitmapFactory.decodeFile(path, options); // Calculate inSampleSize options.inSampleSize = calculateInSampleSize(options, reqWidth, reqHeight); // Decode bitmap with inSampleSize set options.inJustDecodeBounds = false; return BitmapFactory.decodeFile(path, options); } /** * Creates a squared thumbnail image from a source image and saves it to disk. * Subsequent requests to the same image with the same size will return the cached image. * The thumbnail will be a center-cropped version of the original, scaled to the specified size and rotated according to the * EXIF. * * @param c A context (not the application one) * @param path The path to the image file. * @param side The side of the final image. * @return Image * @see #getSquareThumbnail(android.content.Context, String, int) Non-cached version. */ @Nullable @Deprecated public static Bitmap getCachedSquareThumbnail(Context c, String path, int side) { Bitmap bitmap; String name = new File(path).getName(); File cacheFile = new File(cacheDir, name.substring(0, name.length() - 4) + side + ".png"); if (!cacheFile.exists()) { try { bitmap = getSquareThumbnail(c, path, side); new ImageWriter(cacheFile, bitmap).execute(); } catch (Exception e) { Log.e(TAG, "Error generating thumbnail", e); return null; } } else { Log.v(TAG, "Loading from cache " + path); final BitmapFactory.Options options = new BitmapFactory.Options(); options.inPurgeable = true; options.inInputShareable = true; bitmap = BitmapFactory.decodeFile(cacheFile.getAbsolutePath(), options); } return bitmap; } /** * Creates a thumbnail image from a source image. * The thumbnail will be a center-cropped version of the original, scaled to the specified size and rotated according to the * EXIF. * * @param c A context (not the application one) * @param path The path to the image file. * @param side The side of the final image. * @see #getCachedSquareThumbnail(android.content.Context, String, int) Disk-cached version. */ private static Bitmap getSquareThumbnail(Context c, String path, int side) { Bitmap bitmap = decodeSampledBitmapFromFile(path, side, side); bitmap = cropSquare(bitmap); bitmap = Bitmap.createScaledBitmap(bitmap, side, side, true); bitmap = rotate(bitmap, path); return bitmap; } /** * Creates a square, cropped image from the center of the source image. * * @param bitmap Source bitmap. * @return New image. */ public static Bitmap cropSquare(Bitmap bitmap) { int originalWidth = bitmap.getWidth(); int originalHeight = bitmap.getHeight(); int x, y, side; if (originalWidth > originalHeight) { x = (originalWidth - originalHeight) / 2; y = 0; side = originalHeight; } else { x = 0; y = (originalHeight - originalWidth) / 2; side = originalWidth; } // Log.v("getThumbnail", "x:" + x + " y:" + y + " side:" + side); bitmap = Bitmap.createBitmap(bitmap, x, y, side, side); return bitmap; } public static Rect getCenterSquare(Bitmap bitmap) { int originalWidth = bitmap.getWidth(); int originalHeight = bitmap.getHeight(); int x, y, side; if (originalWidth > originalHeight) { x = (originalWidth - originalHeight) / 2; y = 0; side = originalHeight; } else { x = 0; y = (originalHeight - originalWidth) / 2; side = originalWidth; } return new Rect(x, y, x + side, y + side); } @Deprecated public static Bitmap getThumbnail(Bitmap bitmap, int side) { int iw = bitmap.getWidth(); int ih = bitmap.getHeight(); int x, y, w, h; if (iw > ih) { x = (iw - ih) / 2; y = 0; w = ih; h = ih; } else { x = 0; y = (ih - iw) / 2; h = iw; w = iw; } bitmap = Bitmap.createBitmap(bitmap, x, y, w, h); bitmap = Bitmap.createScaledBitmap(bitmap, side, side, true); Log.i("getThumbnail", "x:" + x + " y:" + y + " w:" + w + " h:" + h + " | scaled: " + bitmap.getWidth() + " x " + bitmap.getHeight()); return bitmap; } /** * Calculates the orientation of an image on disk based on the EXIF information. * * @param imagePath The path to the image. * @return Returns 0, 90, 180 or 270. */ public static int getCameraPhotoOrientation(String imagePath) { int rotate = 0; try { ExifInterface exif = new ExifInterface(imagePath); int orientation = exif.getAttributeInt(ExifInterface.TAG_ORIENTATION, ExifInterface.ORIENTATION_NORMAL); switch (orientation) { case ExifInterface.ORIENTATION_ROTATE_270: rotate = 270; break; case ExifInterface.ORIENTATION_ROTATE_180: rotate = 180; break; case ExifInterface.ORIENTATION_ROTATE_90: rotate = 90; break; } // Log.v(TAG, "Exif orientation: " + orientation); } catch (Exception e) { Log.e(TAG, "Error getCameraPhotoOrientation", e); } return rotate; } /** * Creates a rotated copy of an image according to the EXIF information. * * @param bitmap The source image. * @param path The path on disk. * @return A rotated bitmap. */ public static Bitmap rotate(Bitmap bitmap, String path) { int orientation = getCameraPhotoOrientation(path); bitmap = rotate(bitmap, orientation); return bitmap; } /** * Rotates an image by a given number of degrees. * * @param bitmap The source bitmap. * @param orientation The amount of degrees to rotate. * @return Rotated bitmap. */ public static Bitmap rotate(Bitmap bitmap, int orientation) { if (orientation != 0) { Matrix matrix = new Matrix(); matrix.postRotate(orientation); // create a new bitmap from the original using the matrix to transform the result Bitmap bitmap1 = Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), matrix, true); bitmap.recycle(); return bitmap1; } return bitmap; } /** * Creates a new bitmap from the original resource and paints the visible parts with the given color. * The alpha channel is the only part of the original resource that is used for the painting. * <p/> * Note: I tried saving a painted file on disk and loading that one each time. Got 1ms improvement but more used memory for * repeated calls. * * @param c A context. * @param resId The original image with alpha channel. * @param color The new Color * @return Bitmap with new color. */ public static Bitmap createRecoloredBitmap(Context c, int resId, int color) { BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inPreferredConfig = Bitmap.Config.ARGB_8888; Bitmap source = BitmapFactory.decodeResource(c.getResources(), resId); return createRecoloredBitmap(source, color); } /** * Wraps {@link #createRecoloredBitmap(android.content.Context, int, int)} in a {@link * android.graphics.drawable.BitmapDrawable}. * * @param context A context. * @param resId The original image with alpha channel. * @param color The new Color * @return Bitmap with new color. */ public static BitmapDrawable createRecoloredDrawable(Context context, int resId, int color) { return new BitmapDrawable(context.getResources(), createRecoloredBitmap(context, resId, color)); } /** * Wraps {@link #createRecoloredBitmap(android.graphics.Bitmap, int)} in a {@link * android.graphics.drawable.BitmapDrawable}. * * @param bitmap The original image with alpha channel. * @param color The new Color * @return Bitmap with new color. */ public static BitmapDrawable createRecoloredDrawable(Context context, Bitmap bitmap, int color) { return new BitmapDrawable(context.getResources(), createRecoloredBitmap(bitmap, color)); } /** * Creates a new bitmap from the original resource and paints the visible parts with the given color. * The alpha channel is the only part of the original resource that is used for the painting. * * @param source A context. * @param color The original image with alpha channel. * @return Bitmap with new color. */ public static Bitmap createRecoloredBitmap(Bitmap source, int color) { Bitmap mask = source.extractAlpha(); Bitmap targetBitmap = Bitmap.createBitmap(mask.getWidth(), mask.getHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(targetBitmap); Paint paint = new Paint(); paint.setColor(color); canvas.drawBitmap(mask, 0, 0, paint); return targetBitmap; } /** * Creates a new bitmap from the original resource and paints the visible parts with the given color. * The alpha channel is the only part of the original resource that is used for the painting. * <p/> * Note: I tried saving a painted file on disk and loading that one each time. Got 1ms improvement but more used memory for * repeated calls. * * @param c A context. * @param resId The original image with alpha channel. * @param color The new Color * @return Bitmap with new color. */ public static Bitmap createRecoloredBitmapMultiply(Context c, int resId, int color) { BitmapFactory.Options opts = new BitmapFactory.Options(); opts.inPreferredConfig = Bitmap.Config.ARGB_8888; Bitmap source = BitmapFactory.decodeResource(c.getResources(), resId); return createRecoloredBitmapMultiply(source, color); } /** * Creates a new bitmap from the original resource and paints the visible parts with the given color. * The alpha channel is the only part of the original resource that is used for the painting. * * @param source A context. * @param color The original image with alpha channel. * @return Bitmap with new color. */ public static Bitmap createRecoloredBitmapMultiply(Bitmap source, int color) { final Bitmap blend = createRecoloredBitmap(source, color); Paint p = new Paint(); p.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.MULTIPLY)); p.setShader(new BitmapShader(blend, Shader.TileMode.CLAMP, Shader.TileMode.CLAMP)); Canvas c = new Canvas(); final Bitmap targetBitmap = source.copy(Bitmap.Config.ARGB_8888, true); c.setBitmap(targetBitmap); c.drawBitmap(source, 0, 0, null); c.drawRect(0, 0, source.getWidth(), source.getHeight(), p); return targetBitmap; } /** * Draws text on the canvas. Tries to make the text as big as possible so it fills the canvas but not more. * <p/> * This method starts on size 1 and goes up. Maybe a binary search or something could be better, but it works for simple * stuff. * * @param canvas The canvas to draw the text on. * @param text The text. * @param textColor The color for the text. * @param typeface The typeface for the text. */ public static void placeTextFillingCanvas(Canvas canvas, String text, int textColor, Typeface typeface) { int canvasWidth = canvas.getWidth(); int canvasHeight = canvas.getHeight(); int text_size = 1; TextPaint paint = new TextPaint(); paint.setColor(textColor); paint.setTypeface(null); paint.setSubpixelText(true); paint.setAntiAlias(true); paint.setTypeface(typeface); StaticLayout sl1 = null; StaticLayout sl2; while (true) { paint.setTextSize(text_size); sl2 = new StaticLayout(text, paint, canvasWidth, Layout.Alignment.ALIGN_CENTER, 1, 0, false); text_size += 1; if (sl1 == null || sl2.getHeight() <= canvasHeight && sl2.getWidth() <= canvasWidth) { sl1 = sl2; } else { break; } } canvas.save(); canvas.translate(0, (float) (1.0 * (canvasHeight - sl1.getHeight()) / 2)); sl1.draw(canvas); canvas.restore(); } /** * Creates a bitmap with a circle of a certain size and color. * * @param iconSizePx The size of the bitmap/diameter of the circle, in pixels. * @param color The color of the bitmap. */ public static Bitmap getCircleBitmap(int iconSizePx, int color) { Bitmap bitmap = Bitmap.createBitmap(iconSizePx, iconSizePx, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(bitmap); drawCircle(canvas, color, iconSizePx); return bitmap; } /** * Wraps {@link #getCircleBitmap(int, int)} in a {@link android.graphics.drawable.BitmapDrawable}. * * @param res A resources. * @param iconSizeDp The size of the bitmap/diameter of the circle, in DP. * @param color The color of the bitmap. */ public static BitmapDrawable getCircleBitmapDrawable(Resources res, int iconSizeDp, int color) { return new BitmapDrawable(res, getCircleBitmap(dp2px(iconSizeDp), color)); } /** * Draws a circle of a certain size and color in the top left of the given canvas. * * @param canvas The canvas to draw the circle on. * @param color The color for the circle. * @param iconSizePx The diameter of the circle, in pixels. */ public static void drawCircle(Canvas canvas, int color, int iconSizePx) {// DRAW COLORED CIRCLE Paint p = new Paint(); p.setAntiAlias(true); p.setColor(color); canvas.drawCircle(iconSizePx / 2f, iconSizePx / 2f, iconSizePx / 2f, p); } /** * Draws a bitmap on top of another, in the middle. Modifies the bottom bitmap. * * @param bottom The bitmap that will be bellow. * @param top The bitmap that will be on top. */ public static Bitmap drawOnMiddle(Bitmap bottom, Bitmap top) { if (bottom == null) throw new InvalidParameterException("Bottom is null!"); if (top == null) throw new InvalidParameterException("Top is null!"); if (!bottom.isMutable()) { bottom = bottom.copy(Bitmap.Config.ARGB_8888, true); assert bottom != null; } final Canvas canvas = new Canvas(bottom); canvas.drawBitmap(top, (bottom.getWidth() - top.getWidth()) / 2f, (bottom.getHeight() - top.getHeight()) / 2f, null); return bottom; } /** * Asynchronously obtains a bitmap from disk and places it on an ImageView with a quick fade-in effect. * <p/> * You must use an {@link android.support.v4.util.LruCache} that implements {@link android.support.v4.util.LruCache#create(Object)} * so it will obtain * the bitmap. * * @param file A reference to the file that will be passed to the LruCache. * @param view The view that will receive the bitmap. * @param cache The LruCache implementation (see {@link com.carlosefonseca.common.utils.ImageUtils.SizedImageCache} for * an * example). * @param placeholder A placeholder that will be set in case there is no bitmap to place. */ @Nullable public static AsyncTask<Void, Void, Bitmap> setImageAsyncFadeIn(final String file, final ImageView view, final LruCache<String, Bitmap> cache, final int placeholder) { return setImageAsyncFadeIn(file, view, cache, placeholder, false); } /** * Asynchronously obtains a bitmap from disk and places it on an ImageView with a quick fade-in effect. * <p/> * You must use an {@link LruCache} that implements {@link android.support.v4.util.LruCache#create(Object)} so it will obtain * the bitmap. * * @param file A reference to the file that will be passed to the LruCache. * @param view The view that will receive the bitmap. * @param cache The LruCache implementation (see {@link com.carlosefonseca.common.utils.ImageUtils.SizedImageCache} * for * an * example). * @param placeholder A placeholder that will be set in case there is no bitmap to place. */ @Nullable public static AsyncTask<Void, Void, Bitmap> setImageAsyncFadeIn(final String file, final View view, final LruCache<String, Bitmap> cache, final int placeholder, final boolean toBackground) { if (file != null && file.equals(view.getTag())) { return null; } view.setVisibility(View.INVISIBLE); return new AsyncTask<Void, Void, Bitmap>() { @Nullable @Override protected Bitmap doInBackground(Void... params) { return file != null ? cache.get(file) : null; } @Override protected void onPostExecute(Bitmap bmp) { if (bmp != null) { if (toBackground) { ResourceUtils.setBackground(view, new BitmapDrawable(view.getResources(), bmp)); } else { ((ImageView) view).setImageBitmap(bmp); } view.setTag(file); } else { if (toBackground) { view.setBackgroundResource(placeholder); } else { ((ImageView) view).setImageResource(placeholder); } } if (SDK_INT < ICE_CREAM_SANDWICH) { view.setVisibility(View.VISIBLE); return; } AlphaAnimation alphaAnimation = new AlphaAnimation(0, 1); alphaAnimation.setDuration(100); Animation.AnimationListener listener = new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { } @Override public void onAnimationEnd(Animation animation) { view.setVisibility(View.VISIBLE); } @Override public void onAnimationRepeat(Animation animation) { } }; alphaAnimation.setAnimationListener(listener); view.startAnimation(alphaAnimation); } }.execute(); } /** * Image cache that loads resized photos from disk. Specify the desired size of the images in the constructor and perform * {@link #get(Object)} with a filename or a */ public static class SizedImageCache extends LruCache<String, Bitmap> { protected final int width; protected final int height; public SizedImageCache(int maxSize, int widthDP, int heightDP) { super(maxSize); this.width = widthDP; this.height = heightDP; } public SizedImageCache(int maxSize, int imageSizeDP) { this(maxSize, imageSizeDP, imageSizeDP); } @Nullable @Override protected Bitmap create(String key) { return getCachedPhoto(key, width, height); } } /** * Note: In this example, one eighth of the application memory is allocated for our cache. On a normal/hdpi device this is a * minimum of around 4MB (32/8). A full screen GridView filled with images on a device with 800x480 resolution would use * around 1.5MB (800*480*4 bytes), so this would cache a minimum of around 2.5 pages of images in memory. */ public static class BitmapCache extends LruCache<String, Bitmap> { public BitmapCache() { // Get max available VM memory, exceeding this amount will throw an // OutOfMemory exception. Stored in kilobytes as LruCache takes an // int in its constructor. // Use 1/4th of the available memory for this memory cache. super((int) (Runtime.getRuntime().maxMemory() / 4)); } @Override protected int sizeOf(String key, Bitmap bitmap) { // The cache size will be measured in kilobytes rather than // number of items. return sizeBitmap(bitmap); } public static int sizeBitmap(Bitmap bitmap) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) return bitmap.getAllocationByteCount(); return bitmap.getRowBytes() * bitmap.getHeight(); } } /** * Redraws the bitmap in a new size. Scales the bitmap keeping the aspect ratio. */ public static Bitmap resizeBitmapCanvas(Bitmap bitmap, int desiredWidth, int desiredHeight) { if (bitmap.getWidth() == desiredWidth && bitmap.getHeight() == desiredHeight) return bitmap; float r = 1f * bitmap.getWidth() / bitmap.getHeight(); int newBitmapWidth; int newBitmapHeight = (int) (desiredWidth * 1f / r); if (newBitmapHeight > desiredHeight) { newBitmapHeight = desiredHeight; newBitmapWidth = (int) (desiredHeight * 1f * r); } else { newBitmapWidth = desiredWidth; } Bitmap newBitmap = Bitmap.createBitmap(desiredWidth, desiredHeight, Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(newBitmap); Paint paint = new Paint(/*Paint.FILTER_BITMAP_FLAG*/); paint.setAntiAlias(true); int hMargin = (desiredWidth - newBitmapWidth) / 2; int vMargin = (desiredHeight - newBitmapHeight) / 2; Rect dst = new Rect(hMargin, vMargin, hMargin + newBitmapWidth, vMargin + bitmap.getHeight()); canvas.drawBitmap(bitmap, null, dst, paint); return newBitmap; } static boolean isImage(String path) { return path != null && Pattern.compile(".+\\.(jpe?g|png|gif)", Pattern.CASE_INSENSITIVE).matcher(path).matches(); } }
Possible fix to BitmapCache
src/main/java/com/carlosefonseca/common/utils/ImageUtils.java
Possible fix to BitmapCache
<ide><path>rc/main/java/com/carlosefonseca/common/utils/ImageUtils.java <ide> } <ide> <ide> public static int sizeBitmap(Bitmap bitmap) { <del> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) return bitmap.getAllocationByteCount(); <del> return bitmap.getRowBytes() * bitmap.getHeight(); <add> if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) { <add> return bitmap.getAllocationByteCount(); <add> } else if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR1) { <add> return bitmap.getByteCount(); <add> } else { <add> return bitmap.getRowBytes() * bitmap.getHeight(); <add> } <add> } <add> <add> @Override <add> protected void entryRemoved(boolean evicted, String key, Bitmap oldValue, Bitmap newValue) { <add> oldValue.recycle(); <add> super.entryRemoved(evicted, key, oldValue, newValue); <ide> } <ide> } <ide>