repo
stringlengths
2
99
file
stringlengths
13
225
code
stringlengths
0
18.3M
file_length
int64
0
18.3M
avg_line_length
float64
0
1.36M
max_line_length
int64
0
4.26M
extension_type
stringclasses
1 value
adanet
adanet-master/adanet/experimental/phases/repeat_phase.py
# Lint as: python3 # Copyright 2020 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A phase that repeats its inner phases.""" from typing import Callable, Iterable, Iterator, List from adanet.experimental.phases.phase import DatasetProvider from adanet.experimental.phases.phase import ModelProvider from adanet.experimental.phases.phase import Phase from adanet.experimental.work_units.work_unit import WorkUnit import tensorflow.compat.v2 as tf class RepeatPhase(DatasetProvider, ModelProvider): """A phase that repeats its inner phases.""" def __init__(self, phase_factory: List[Callable[..., Phase]], repetitions: int): self._phase_factory = phase_factory self._repetitions = repetitions self._final_phase = None """Initializes a RepeatPhase. Args: phase_factory: A list of callables that return `Phase` instances. repetitions: Number of times to repeat the phases in the phase factory. """ def work_units(self, previous_phase: DatasetProvider) -> Iterator[WorkUnit]: for _ in range(self._repetitions): # Each repetition, the "first" previous phase is the one preceeding the # repeat phase itself. prev_phase = previous_phase for phase in self._phase_factory: phase = phase() for work_unit in phase.work_units(prev_phase): yield work_unit prev_phase = phase self._final_phase = prev_phase def get_train_dataset(self) -> tf.data.Dataset: if not isinstance(self._final_phase, DatasetProvider): raise NotImplementedError( 'The last phase in repetition does not provide datasets.') return self._final_phase.get_train_dataset() def get_eval_dataset(self) -> tf.data.Dataset: if not isinstance(self._final_phase, DatasetProvider): raise NotImplementedError( 'The last phase in repetition does not provide datasets.') return self._final_phase.get_eval_dataset() def get_models(self) -> Iterable[tf.keras.Model]: if not isinstance(self._final_phase, ModelProvider): raise NotImplementedError( 'The last phase in repetition does not provide models.') return self._final_phase.get_models() def get_best_models(self, num_models=1) -> Iterable[tf.keras.Model]: if not isinstance(self._final_phase, ModelProvider): raise NotImplementedError( 'The last phase in repetition does not provide models.') return self._final_phase.get_best_models(num_models)
3,035
38.947368
78
py
adanet
adanet-master/adanet/experimental/phases/keras_tuner_phase.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A phase in the AdaNet workflow.""" import sys from typing import Callable, Iterable, Iterator, Union from adanet.experimental.phases.phase import DatasetProvider from adanet.experimental.phases.phase import ModelProvider from adanet.experimental.work_units.keras_tuner_work_unit import KerasTunerWorkUnit from adanet.experimental.work_units.work_unit import WorkUnit from kerastuner.engine.tuner import Tuner import tensorflow.compat.v2 as tf class KerasTunerPhase(DatasetProvider, ModelProvider): """Tunes Keras Model hyperparameters using the Keras Tuner.""" def __init__(self, tuner: Union[Callable[..., Tuner], Tuner], *search_args, **search_kwargs): """Initializes a KerasTunerPhase. Args: tuner: A `kerastuner.tuners.tuner.Tuner` instance or a callable that returns a `kerastuner.tuners.tuner.Tuner` instance. *search_args: Arguments to pass to the tuner search method. **search_kwargs: Keyword arguments to pass to the tuner search method. """ if callable(tuner): self._tuner = tuner() else: self._tuner = tuner self._search_args = search_args self._search_kwargs = search_kwargs def work_units(self, previous_phase: DatasetProvider) -> Iterator[WorkUnit]: self._train_dataset = previous_phase.get_train_dataset() self._eval_dataset = previous_phase.get_eval_dataset() yield KerasTunerWorkUnit( self._tuner, x=self._train_dataset, validation_data=self._eval_dataset, *self._search_args, **self._search_kwargs) # TODO: Find a better way to get all models than to pass in a # large number. def get_models(self) -> Iterable[tf.keras.Model]: return self._tuner.get_best_models(num_models=sys.maxsize) def get_best_models(self, num_models) -> Iterable[tf.keras.Model]: return self._tuner.get_best_models(num_models=num_models) def get_train_dataset(self) -> tf.data.Dataset: return self._train_dataset def get_eval_dataset(self) -> tf.data.Dataset: return self._eval_dataset
2,683
36.277778
83
py
adanet
adanet-master/adanet/experimental/phases/phase.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A phase in the AdaNet workflow.""" import abc from typing import Iterable, Iterator, Optional from adanet.experimental.storages.in_memory_storage import InMemoryStorage from adanet.experimental.storages.storage import Storage from adanet.experimental.work_units.work_unit import WorkUnit import tensorflow.compat.v2 as tf class Phase(abc.ABC): """A stage in a linear workflow.""" def __init__(self, storage: Storage = InMemoryStorage()): self._storage = storage # TODO: Find a better way to ensure work_units only gets called # once per phase. @abc.abstractmethod def work_units(self, previous_phase: Optional['Phase']) -> Iterator[WorkUnit]: pass class DatasetProvider(Phase, abc.ABC): """An interface for a phase that produces datasets.""" def __init__(self, storage: Storage = InMemoryStorage()): """Initializes a Phase. Args: storage: A `Storage` instance. """ super().__init__(storage) self._train_dataset = None self._eval_dataset = None @abc.abstractmethod def get_train_dataset(self) -> tf.data.Dataset: """Returns the dataset for train data.""" pass @abc.abstractmethod def get_eval_dataset(self) -> tf.data.Dataset: """Returns the dataset for eval data.""" pass class ModelProvider(Phase, abc.ABC): """An interface for a phase that produces models.""" @abc.abstractmethod def get_models(self) -> Iterable[tf.keras.Model]: """Returns the models produced by this phase.""" pass @abc.abstractmethod def get_best_models(self, num_models: int = 1) -> Iterable[tf.keras.Model]: """Returns the `k` best models produced by this phase.""" pass
2,294
28.805195
80
py
adanet
adanet-master/adanet/experimental/phases/__init__.py
# Lint as: python3 # Copyright 2020 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """AdaNet ModelFlow phases.""" from adanet.experimental.phases.autoensemble_phase import AutoEnsemblePhase from adanet.experimental.phases.input_phase import InputPhase from adanet.experimental.phases.keras_trainer_phase import KerasTrainerPhase from adanet.experimental.phases.keras_tuner_phase import KerasTunerPhase from adanet.experimental.phases.repeat_phase import RepeatPhase __all__ = [ "AutoEnsemblePhase", "InputPhase", "KerasTrainerPhase", "KerasTunerPhase", "RepeatPhase", ]
1,131
35.516129
76
py
adanet
adanet-master/adanet/experimental/phases/keras_trainer_phase.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A phase in the AdaNet workflow.""" from typing import Callable, Iterable, Iterator, Union from adanet.experimental.phases.phase import DatasetProvider from adanet.experimental.phases.phase import ModelProvider from adanet.experimental.storages.in_memory_storage import InMemoryStorage from adanet.experimental.storages.storage import Storage from adanet.experimental.work_units.keras_trainer_work_unit import KerasTrainerWorkUnit from adanet.experimental.work_units.work_unit import WorkUnit import tensorflow.compat.v2 as tf class KerasTrainerPhase(DatasetProvider, ModelProvider): """Trains Keras models.""" def __init__(self, models: Union[Iterable[tf.keras.Model], Callable[[], Iterable[tf.keras.Model]]], storage: Storage = InMemoryStorage()): """Initializes a KerasTrainerPhase. Args: models: A list of `tf.keras.Model` instances or a list of callables that return `tf.keras.Model` instances. storage: A `Storage` instance. """ # TODO: Consume arbitary fit inputs. # Dataset should be wrapped inside a work unit. # For instance when you create KerasTrainer work unit the dataset is # encapsulated inside that work unit. # What if you want to run on different (parts of the) datasets # what if a work units consumes numpy arrays? super().__init__(storage) self._models = models def work_units(self, previous_phase: DatasetProvider) -> Iterator[WorkUnit]: self._train_dataset = previous_phase.get_train_dataset() self._eval_dataset = previous_phase.get_eval_dataset() models = self._models if callable(models): models = models() for model in models: yield KerasTrainerWorkUnit(model, self._train_dataset, self._eval_dataset, self._storage) def get_models(self) -> Iterable[tf.keras.Model]: return self._storage.get_models() def get_best_models(self, num_models) -> Iterable[tf.keras.Model]: return self._storage.get_best_models(num_models) def get_train_dataset(self) -> tf.data.Dataset: return self._train_dataset def get_eval_dataset(self) -> tf.data.Dataset: return self._eval_dataset
2,844
39.070423
87
py
adanet
adanet-master/adanet/experimental/phases/input_phase.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A phase that provides datasets.""" from typing import Optional from adanet.experimental.phases.phase import DatasetProvider from adanet.experimental.phases.phase import Phase import tensorflow.compat.v2 as tf class InputPhase(DatasetProvider): """A phase that simply relays train and eval datasets.""" def __init__(self, train_dataset: tf.data.Dataset, eval_dataset: tf.data.Dataset): """Initializes an InputPhase. Args: train_dataset: A `tf.data.Dataset` for training. eval_dataset: A `tf.data.Dataset` for evaluation. """ self._train_dataset = train_dataset self._eval_dataset = eval_dataset def get_train_dataset(self) -> tf.data.Dataset: return self._train_dataset def get_eval_dataset(self) -> tf.data.Dataset: return self._eval_dataset def work_units(self, previous_phase: Optional[Phase]): return []
1,512
31.891304
74
py
adanet
adanet-master/adanet/experimental/keras/ensemble_model.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """An AdaNet ensemble implementation.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from typing import Sequence import tensorflow.compat.v2 as tf class EnsembleModel(tf.keras.Model): """An ensemble of Keras models.""" def __init__(self, submodels: Sequence[tf.keras.Model], freeze_submodels: bool = True): """Initializes an EnsembleModel. Args: submodels: A list of `tf.keras.Model` that compose the ensemble. freeze_submodels: Whether to freeze the weights of submodels. """ super().__init__() if freeze_submodels: for submodel in submodels: for layer in submodel.layers: layer.trainable = False self._submodels = submodels @property def submodels(self) -> Sequence[tf.keras.Model]: return self._submodels def call(self, inputs): raise NotImplementedError class MeanEnsemble(EnsembleModel): """An ensemble that averages submodel outputs.""" def call(self, inputs): if len(self._submodels) == 1: return self._submodels[0](inputs) submodel_outputs = [] for submodel in self._submodels: submodel_outputs.append(submodel(inputs)) return tf.keras.layers.average(submodel_outputs) class WeightedEnsemble(EnsembleModel): """An ensemble that linearly combines submodel outputs.""" # TODO: Extract output shapes from submodels instead of passing in # as argument. def __init__(self, submodels: Sequence[tf.keras.Model], output_units: int): """Initializes a WeightedEnsemble. Args: submodels: A list of `adanet.keras.SubModel` that compose the ensemble. output_units: The output size of the last layer of each submodel. """ super().__init__(submodels) self.dense = tf.keras.layers.Dense(units=output_units) def call(self, inputs): submodel_outputs = [] for submodel in self.submodels: submodel_outputs.append(submodel(inputs)) return self.dense(tf.stack(submodel_outputs))
2,663
29.62069
79
py
adanet
adanet-master/adanet/experimental/keras/testing_utils.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utilities for unit-testing AdaNet Keras.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from typing import Optional, Tuple import numpy as np import tensorflow.compat.v2 as tf # TODO: Add ability to choose the problem type: regression, # classification, multi-class etc. def get_holdout_data( train_samples: int, test_samples: int, input_shape: Tuple[int], num_classes: int, random_seed: Optional[int] = None ) -> Tuple[tf.data.Dataset, tf.data.Dataset]: """Generates training and test data. Args: train_samples: Number of training samples to generate. test_samples: Number of training samples to generate. input_shape: Shape of the inputs. num_classes: Number of classes for the data and targets. random_seed: A random seed for numpy to use. Returns: A tuple of `tf.data.Datasets`. """ if random_seed: np.random.seed(random_seed) num_sample = train_samples + test_samples templates = 2 * num_classes * np.random.random((num_classes,) + input_shape) y = np.random.randint(0, num_classes, size=(num_sample,)) x = np.zeros((num_sample,) + input_shape, dtype=np.float32) for i in range(num_sample): x[i] = templates[y[i]] + np.random.normal(loc=0, scale=1., size=input_shape) train_dataset = tf.data.Dataset.from_tensor_slices( (x[:train_samples], y[:train_samples])) test_dataset = tf.data.Dataset.from_tensor_slices( (x[train_samples:], y[train_samples:])) return train_dataset, test_dataset
2,182
33.650794
80
py
adanet
adanet-master/adanet/experimental/keras/ensemble_model_test.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for adanet.experimental.keras.EnsembleModel.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import parameterized from adanet.experimental.keras import testing_utils from adanet.experimental.keras.ensemble_model import MeanEnsemble from adanet.experimental.keras.ensemble_model import WeightedEnsemble import tensorflow.compat.v2 as tf class EnsembleModelTest(parameterized.TestCase, tf.test.TestCase): @parameterized.named_parameters( { 'testcase_name': 'mean_ensemble', 'ensemble': MeanEnsemble, 'want_results': [0.07671691, 0.20448962], }, { 'testcase_name': 'weighted_ensemble', 'ensemble': WeightedEnsemble, 'output_units': 2, 'want_results': [0.42579408, 0.53439462], }) def test_lifecycle(self, ensemble, want_results, output_units=None): train_dataset, test_dataset = testing_utils.get_holdout_data( train_samples=128, test_samples=64, input_shape=(10,), num_classes=2, random_seed=42) # TODO: Consider performing `tf.data.Dataset` transformations # within get_test_data function. train_dataset = train_dataset.batch(32).repeat(10) test_dataset = test_dataset.batch(32).repeat(10) model1 = tf.keras.Sequential([ tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(2), ]) model1.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss='mse') model1.fit(train_dataset) model1.trainable = False # Since models inside ensemble should be trained. model1_pre_train_weights = model1.get_weights() model2 = tf.keras.Sequential([ tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(2), ]) model2.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss='mse') model2.fit(train_dataset) model2.trainable = False # Since models inside ensemble should be trained. model2_pre_train_weights = model2.get_weights() if output_units: ensemble = ensemble(submodels=[model1, model2], output_units=output_units) else: ensemble = ensemble(submodels=[model1, model2]) ensemble.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae']) ensemble.fit(train_dataset) # Make sure submodel weights were not altered during ensemble training. model1_post_train_weights = model1.get_weights() model2_post_train_weights = model2.get_weights() self.assertAllClose(model1_pre_train_weights, model1_post_train_weights) self.assertAllClose(model2_pre_train_weights, model2_post_train_weights) eval_results = ensemble.evaluate(test_dataset) self.assertAllClose(eval_results, want_results) if __name__ == '__main__': tf.enable_v2_behavior() tf.test.main()
3,693
34.519231
79
py
adanet
adanet-master/adanet/experimental/keras/model_search_test.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for adanet.experimental.keras.ModelSearch.""" import os import shutil import sys import time from absl import flags from absl.testing import parameterized from adanet.experimental.controllers.sequential_controller import SequentialController from adanet.experimental.keras import testing_utils from adanet.experimental.keras.ensemble_model import MeanEnsemble from adanet.experimental.keras.model_search import ModelSearch from adanet.experimental.phases.autoensemble_phase import AutoEnsemblePhase from adanet.experimental.phases.autoensemble_phase import GrowStrategy from adanet.experimental.phases.autoensemble_phase import MeanEnsembler from adanet.experimental.phases.input_phase import InputPhase from adanet.experimental.phases.keras_trainer_phase import KerasTrainerPhase from adanet.experimental.phases.keras_tuner_phase import KerasTunerPhase from adanet.experimental.phases.repeat_phase import RepeatPhase from adanet.experimental.storages.in_memory_storage import InMemoryStorage from kerastuner import tuners import tensorflow.compat.v2 as tf class ModelSearchTest(parameterized.TestCase, tf.test.TestCase): def setUp(self): super(ModelSearchTest, self).setUp() # Setup and cleanup test directory. # Flags are not automatically parsed at this point. flags.FLAGS(sys.argv) self.test_subdirectory = os.path.join(flags.FLAGS.test_tmpdir, self.id()) shutil.rmtree(self.test_subdirectory, ignore_errors=True) os.makedirs(self.test_subdirectory) def tearDown(self): super(ModelSearchTest, self).tearDown() shutil.rmtree(self.test_subdirectory, ignore_errors=True) def test_phases_end_to_end(self): train_dataset, test_dataset = testing_utils.get_holdout_data( train_samples=128, test_samples=64, input_shape=(10,), num_classes=10, random_seed=42) # TODO: Consider performing `tf.data.Dataset` transformations # within get_test_data function. train_dataset = train_dataset.batch(32) test_dataset = test_dataset.batch(32) model1 = tf.keras.Sequential([ tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(10), ]) model1.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae']) model2 = tf.keras.Sequential([ tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(64, activation='relu'), tf.keras.layers.Dense(10), ]) model2.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae']) # TODO: This test could potentially have the best model be # a non-ensemble Keras model. Therefore, need to address this issue and # remove the freeze_submodels flag. ensemble = MeanEnsemble(submodels=[model1, model2], freeze_submodels=False) ensemble.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae']) controller = SequentialController(phases=[ InputPhase(train_dataset, test_dataset), KerasTrainerPhase([model1, model2]), KerasTrainerPhase([ensemble]), ]) model_search = ModelSearch(controller) model_search.run() self.assertIsInstance( model_search.get_best_models(num_models=1)[0], MeanEnsemble) def test_tuner_end_to_end(self): train_dataset, test_dataset = testing_utils.get_holdout_data( train_samples=128, test_samples=64, input_shape=(10,), num_classes=10, random_seed=42) # TODO: Consider performing `tf.data.Dataset` transformations # within get_holdout_data function. train_dataset = train_dataset.batch(32) test_dataset = test_dataset.batch(32) def build_model(hp): model = tf.keras.Sequential() model.add( tf.keras.layers.Dense( units=hp.Int('units', min_value=32, max_value=512, step=32), activation='relu')) model.add(tf.keras.layers.Dense(10, activation='softmax')) model.compile( optimizer=tf.keras.optimizers.Adam( hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4])), loss='sparse_categorical_crossentropy', metrics=['accuracy']) return model # Define phases. tuner = tuners.RandomSearch( build_model, objective='val_accuracy', max_trials=3, executions_per_trial=1, directory=self.test_subdirectory, project_name='helloworld_tuner', overwrite=True) tuner_phase = KerasTunerPhase(tuner) def build_ensemble(): ensemble = MeanEnsemble( submodels=tuner_phase.get_best_models(num_models=2)) ensemble.compile( optimizer=tf.keras.optimizers.Adam(0.01), loss='mse', metrics=['mae']) return [ensemble] ensemble_phase = KerasTrainerPhase(build_ensemble) input_phase = InputPhase(train_dataset, test_dataset) controller = SequentialController(phases=[input_phase, tuner_phase, ensemble_phase]) # Execute phases. model_search = ModelSearch(controller) model_search.run() self.assertIsInstance( model_search.get_best_models(num_models=1)[0], MeanEnsemble) def test_autoensemble_end_to_end(self): train_dataset, test_dataset = testing_utils.get_holdout_data( train_samples=128, test_samples=64, input_shape=(10,), num_classes=10, random_seed=42) # TODO: Consider performing `tf.data.Dataset` transformations # within get_holdout_data function. train_dataset = train_dataset.batch(32) test_dataset = test_dataset.batch(32) def build_model(hp): model = tf.keras.Sequential() model.add( tf.keras.layers.Dense( units=hp.Int('units', min_value=32, max_value=512, step=32), activation='relu')) model.add(tf.keras.layers.Dense(10, activation='softmax')) model.compile( optimizer=tf.keras.optimizers.Adam( hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4])), loss='sparse_categorical_crossentropy', metrics=['accuracy']) return model # This allows us to have a shared storage for all the autoensemble phases # that occur in the repeat phase. autoensemble_storage = InMemoryStorage() input_phase = InputPhase(train_dataset, test_dataset) # pylint: disable=g-long-lambda repeat_phase = RepeatPhase( [ lambda: KerasTunerPhase( tuners.RandomSearch( build_model, objective='val_accuracy', max_trials=3, executions_per_trial=1, directory=self.test_subdirectory, project_name='helloworld_' + str(int(time.time())), overwrite=True)), lambda: AutoEnsemblePhase( ensemblers=[ MeanEnsembler('sparse_categorical_crossentropy', 'adam', ['accuracy']) ], ensemble_strategies=[GrowStrategy()], storage=autoensemble_storage) ], repetitions=3) # pylint: enable=g-long-lambda controller = SequentialController(phases=[input_phase, repeat_phase]) model_search = ModelSearch(controller) model_search.run() self.assertIsInstance( model_search.get_best_models(num_models=1)[0], MeanEnsemble) if __name__ == '__main__': tf.enable_v2_behavior() tf.test.main()
8,219
35.533333
86
py
adanet
adanet-master/adanet/experimental/keras/model_search.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """An AdaNet interface for model search.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from typing import Sequence from adanet.experimental.controllers.controller import Controller from adanet.experimental.schedulers.in_process_scheduler import InProcessScheduler from adanet.experimental.schedulers.scheduler import Scheduler import tensorflow.compat.v2 as tf class ModelSearch(object): """An AutoML pipeline manager.""" def __init__(self, controller: Controller, scheduler: Scheduler = InProcessScheduler()): """Initializes a ModelSearch. Args: controller: A `Controller` instance. scheduler: A `Scheduler` instance. """ self._controller = controller self._scheduler = scheduler def run(self): """Executes the training workflow to generate models.""" self._scheduler.schedule(self._controller.work_units()) def get_best_models(self, num_models) -> Sequence[tf.keras.Model]: """Returns the top models from the run.""" return self._controller.get_best_models(num_models)
1,757
32.807692
82
py
adanet
adanet-master/adanet/experimental/keras/__init__.py
# Lint as: python3 # Copyright 2020 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """AdaNet Keras models.""" from adanet.experimental.keras.ensemble_model import EnsembleModel from adanet.experimental.keras.ensemble_model import MeanEnsemble from adanet.experimental.keras.ensemble_model import WeightedEnsemble from adanet.experimental.keras.model_search import ModelSearch __all__ = [ "EnsembleModel", "MeanEnsemble", "WeightedEnsemble", "ModelSearch", ]
1,015
34.034483
74
py
adanet
adanet-master/adanet/experimental/schedulers/scheduler.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A scheduler for managing AdaNet phases.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc from typing import Iterator from adanet.experimental.work_units.work_unit import WorkUnit class Scheduler(abc.ABC): """Abstract interface for a scheduler to be used in ModelFlow pipelines.""" @abc.abstractmethod def schedule(self, work_units: Iterator[WorkUnit]): """Schedules and executes work units. Args: work_units: An iterator that yields `WorkUnit` instances. """ pass
1,197
30.526316
77
py
adanet
adanet-master/adanet/experimental/schedulers/in_process_scheduler.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """An in process scheduler for managing AdaNet phases.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from typing import Iterator from adanet.experimental.schedulers import scheduler from adanet.experimental.work_units.work_unit import WorkUnit class InProcessScheduler(scheduler.Scheduler): """A scheduler that executes in a single process.""" def schedule(self, work_units: Iterator[WorkUnit]): """Schedules and execute work units in a single process. Args: work_units: An iterator that yields `WorkUnit` instances. """ for work_unit in work_units: work_unit.execute()
1,297
32.282051
74
py
adanet
adanet-master/adanet/experimental/schedulers/__init__.py
# Lint as: python3 # Copyright 2020 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """AdaNet ModelFlow schedulers.""" from adanet.experimental.schedulers.in_process_scheduler import InProcessScheduler __all__ = [ "InProcessScheduler", ]
782
33.043478
82
py
adanet
adanet-master/adanet/experimental/controllers/sequential_controller.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A manual controller for model search.""" from typing import Iterator, Sequence from adanet.experimental.controllers.controller import Controller from adanet.experimental.phases.phase import ModelProvider from adanet.experimental.phases.phase import Phase from adanet.experimental.work_units.work_unit import WorkUnit import tensorflow.compat.v2 as tf class SequentialController(Controller): """A controller where the user specifies the sequences of phase to execute.""" # TODO: Add checks to make sure phases are valid. def __init__(self, phases: Sequence[Phase]): """Initializes a SequentialController. Args: phases: A list of `Phase` instances. """ self._phases = phases def work_units(self) -> Iterator[WorkUnit]: previous_phase = None for phase in self._phases: for work_unit in phase.work_units(previous_phase): yield work_unit previous_phase = phase def get_best_models(self, num_models: int) -> Sequence[tf.keras.Model]: final_phase = self._phases[-1] if isinstance(final_phase, ModelProvider): return self._phases[-1].get_best_models(num_models) raise RuntimeError('Final phase does not provide models.')
1,826
35.54
80
py
adanet
adanet-master/adanet/experimental/controllers/controller.py
# Lint as: python3 # Copyright 2019 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The AutoML controller for AdaNet.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc from typing import Iterator, Sequence from adanet.experimental.work_units.work_unit import WorkUnit import tensorflow.compat.v2 as tf class Controller(abc.ABC): """Defines the machine learning workflow to produce high-quality models.""" @abc.abstractmethod def work_units(self) -> Iterator[WorkUnit]: """Yields `WorkUnit` instances.""" pass @abc.abstractmethod def get_best_models(self, num_models) -> Sequence[tf.keras.Model]: """Returns the top models produced from executing the controller.""" pass
1,316
31.925
77
py
adanet
adanet-master/adanet/experimental/controllers/__init__.py
# Lint as: python3 # Copyright 2020 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """AdaNet ModelFlow controllers.""" from adanet.experimental.controllers.sequential_controller import SequentialController __all__ = [ "SequentialController", ]
789
33.347826
86
py
adanet
adanet-master/adanet/tf_compat/__init__.py
# Copyright 2018 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """TensorFlow major version compatibility code.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from distutils.version import LooseVersion import tensorflow.compat.v1 as tf import tensorflow.compat.v2 as tf_v2 # pylint: disable=unused-import # pylint: disable=g-direct-tensorflow-import from tensorflow.python import tf2 from tensorflow.python.keras.metrics import Metric from tensorflow.python.tpu import tpu_function from tensorflow_estimator.python.estimator.head import regression_head # pylint: enable=g-direct-tensorflow-import # pylint: enable=unused-import DatasetV1 = tf.compat.v1.data.Dataset DatasetV2 = tf.compat.v2.data.Dataset v1 = tf.compat.v1 v2 = tf.compat.v2 try: SessionRunHook = tf.estimator.SessionRunHook except AttributeError: SessionRunHook = tf.train.SessionRunHook try: SessionRunArgs = tf.estimator.SessionRunArgs except AttributeError: SessionRunArgs = tf.train.SessionRunArgs try: SummarySaverHook = tf.estimator.SummarySaverHook except AttributeError: SummarySaverHook = tf.train.SummarySaverHook try: CheckpointSaverHook = tf.estimator.CheckpointSaverHook except AttributeError: CheckpointSaverHook = tf.train.CheckpointSaverHook try: # Loss reduction strings change between TF 1.13 and TF 1.14, which causes # Heads to raise errors. regression_head.RegressionHead( loss_reduction=tf.losses.Reduction.SUM_OVER_BATCH_SIZE) SUM_OVER_BATCH_SIZE = tf.losses.Reduction.SUM_OVER_BATCH_SIZE SUM = tf.losses.Reduction.SUM except ValueError: SUM_OVER_BATCH_SIZE = "sum_over_batch_size" SUM = "sum" def tensor_name(tensor): """Returns the Tensor's name. Tensor names always have the structure <op_name>:<int>. This method returns the portion before the ':'. Args: tensor: Tensor. Returns: String name of the Tensor. """ return tensor.name.split(":")[-2] def version_greater_or_equal(semver): """Returns whether the current TF version is >= to semver string.""" try: tf_version = tf.version.VERSION except AttributeError: tf_version = tf.VERSION return LooseVersion(tf_version) >= LooseVersion(semver) def make_one_shot_iterator(dataset): """Returns a dataset's one-shot iterator.""" try: return v1.data.make_one_shot_iterator(dataset) except AttributeError: return dataset.make_one_shot_iterator() def random_normal(*args, **kwargs): """Returns a random normal distribution Tensor.""" try: return tf.random.normal(*args, **kwargs) except AttributeError: return tf.random_normal(*args, **kwargs) def metric_op(metric): """Converts Keras metrics into a metric op tuple. NOTE: If this method is called in for loop, the runtime is O(n^2). However the number of eval metrics at any given time should be small enough that this does not affect performance. Any impact is only during graph construction time, and therefore has no effect on steps/s. Args: metric: Either a `tf.keras.metric.Metric` instance or a tuple of Tensor value and update op. Returns: A tuple of metric Tensor value and update op. """ if not isinstance(metric, tf.keras.metrics.Metric): return metric vars_to_add = {} for var in metric.variables: vars_to_add[_hashable_var_key(var)] = var metric = (metric.result(), metric.updates[0]) _update_variable_collection(v1.GraphKeys.LOCAL_VARIABLES, vars_to_add) _update_variable_collection(v1.GraphKeys.METRIC_VARIABLES, vars_to_add) return metric def _hashable_var_key(var): """Returns a hashable key to identify the given Variable.""" # In TF 2, Variables themselves are not hashable, so cannot be dict keys. # Error is "Tensor is unhashable if Tensor equality is enabled. Instead, use # tensor.experimental_ref() as the key". For a related issue, see: # https://github.com/tensorflow/tensorflow/issues/32139 ref_op = getattr(var, "experimental_ref", None) if callable(ref_op): return ref_op() return var def _update_variable_collection(collection_name, vars_to_add): """Add variables to collection.""" collection = {} for var in v1.get_collection(collection_name): collection[_hashable_var_key(var)] = var # Skip variables that are in the collection already: O(n) runtime. for var_ref in vars_to_add: if var_ref in collection: continue v1.add_to_collection(collection_name, vars_to_add[var_ref]) def skip_for_tf2(f): """Decorator that skips tests when using TensorFlow 2.""" def test_wrapper(*args, **kwargs): """Wraps the decorated function to determine whether to skip.""" # Extract test case instance from args. self = args[0] try: # If tf.contrib doesn't exist, we are in TF 2.0. _ = tf.contrib _ = tf.contrib.estimator.regression_head( loss_reduction=tf.compat.v1.losses.Reduction.SUM_OVER_BATCH_SIZE) except (AttributeError, ImportError): self.skipTest("Skipping test in TF 2.0.") return f(*args, **kwargs) return test_wrapper def skip_for_tf1(f): """Decorator that skips tests when using TensorFlow 1.""" def test_wrapper(*args, **kwargs): """Wraps the decorated function to determine whether to skip.""" # Extract test case instance from args. self = args[0] try: # If tf.contrib doesn't exist, we are in TF 2.0. _ = tf_v2.contrib except (AttributeError, ImportError): return f(*args, **kwargs) self.skipTest("Skipping test in TF 1.0.") return f(*args, **kwargs) return test_wrapper def is_v2_behavior_enabled(): """Returns if user called tf.enable_v2_behavior.""" # Since there is no actual tf.is_v2_behavior enabled, check that the # settings were enabled. return tf2.enabled() def load_variable(checkpoint_path, var_name, shape, dtype): """Loads a variable from a given checkpoint.""" with tf.Graph().as_default(): variable = v1.get_variable( var_name, shape=shape, dtype=dtype, initializer=v1.zeros_initializer(), trainable=False) trackable_vars = {var_name: variable} checkpoint = v2.train.Checkpoint(**trackable_vars) status = checkpoint.restore(checkpoint_path) status.expect_partial() with v1.Session() as session: status.initialize_or_restore(session) return session.run(variable)
6,952
29.362445
80
py
adanet
adanet-master/adanet/subnetwork/generator_test.py
"""Test AdaNet single graph subnetwork implementation. Copyright 2018 The AdaNet Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import parameterized from adanet import tf_compat from adanet.subnetwork.generator import Builder from adanet.subnetwork.generator import Subnetwork import tensorflow.compat.v2 as tf def dummy_tensor(shape=(), random_seed=42): """Returns a randomly initialized tensor.""" return tf.Variable( tf_compat.random_normal(shape=shape, seed=random_seed), trainable=False).read_value() class FakeSubnetwork(Builder): """Fake subnetwork builder.""" @property def name(self): return "fake_subnetwork" def build_subnetwork(self, features, logits_dimension, training, iteration_step, summary, previous_ensemble=None): return def build_subnetwork_train_op(self, subnetwork, loss, var_list, labels, iteration_step, summary, previous_ensemble): return def build_mixture_weights_train_op(self, loss, var_list, logits, labels, iteration_step, summary): return class SubnetworkTest(parameterized.TestCase, tf.test.TestCase): @parameterized.named_parameters( { "testcase_name": "no_persisted_tensors_nor_shared", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), }, { "testcase_name": "empty_persisted_tensors", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": {}, }, { "testcase_name": "dict_logits_and_last_layer", "last_layer": { "head1": dummy_tensor() }, "logits": { "head1": dummy_tensor() }, "complexity": dummy_tensor(), "persisted_tensors": {}, }, { "testcase_name": "persisted_tensors", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": { "hidden_layer": dummy_tensor(), }, }, { "testcase_name": "nested_persisted_tensors", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": { "hidden_layer": dummy_tensor(), "nested": { "foo": dummy_tensor(), "nested": { "foo": dummy_tensor(), }, }, }, }, { "testcase_name": "shared_primitive", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "shared": 1, }, { "testcase_name": "shared_dict", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "shared": {}, }, { "testcase_name": "shared_lambda", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "shared": lambda x: x, }, { "testcase_name": "shared_object", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "shared": dummy_tensor(), }) def test_new(self, last_layer, logits, complexity, persisted_tensors=None, shared=None): with self.test_session(): got = Subnetwork(last_layer, logits, complexity, persisted_tensors, shared) self.assertEqual(got.last_layer, last_layer) self.assertEqual(got.logits, logits) self.assertEqual(got.complexity, complexity) self.assertEqual(got.persisted_tensors, persisted_tensors) self.assertEqual(got.shared, shared) @parameterized.named_parameters( { "testcase_name": "none_last_layer", "last_layer": None, "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": {}, }, { "testcase_name": "none_logits", "last_layer": dummy_tensor(), "logits": None, "complexity": dummy_tensor(), "persisted_tensors": {}, }, { "testcase_name": "none_complexity", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": None, "persisted_tensors": {}, }, { "testcase_name": "empty_list_persisted_tensors", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": [], }, { "testcase_name": "list_persisted_tensors", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": [1.], }, { "testcase_name": "empty_nested_persisted_tensors", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": { "value": dummy_tensor(), "nested": {}, }, }, { "testcase_name": "empty_nested_persisted_tensors_recursive", "last_layer": dummy_tensor(), "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": { "value": dummy_tensor(), "nested": { "value": dummy_tensor(), "nested": { "value": dummy_tensor(), "nested": {}, }, }, }, }, { "testcase_name": "only_dict_logits", "last_layer": dummy_tensor(), "logits": { "head": dummy_tensor() }, "complexity": dummy_tensor(), "persisted_tensors": {}, }, { "testcase_name": "only_dict_last_layer", "last_layer": { "head": dummy_tensor() }, "logits": dummy_tensor(), "complexity": dummy_tensor(), "persisted_tensors": {}, }) def test_new_errors(self, last_layer, logits, complexity, persisted_tensors): with self.test_session(): with self.assertRaises(ValueError): Subnetwork(last_layer, logits, complexity, persisted_tensors) if __name__ == "__main__": tf.test.main()
7,392
31.425439
79
py
adanet
adanet-master/adanet/subnetwork/report.py
"""Container for an `adanet.Subnetwork`'s attributes and metrics. Copyright 2018 The AdaNet Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections from adanet import tf_compat import six import tensorflow.compat.v1 as tf class Report( collections.namedtuple("Report", ["hparams", "attributes", "metrics"])): # pyformat: disable """A container for data to be collected about a :class:`Subnetwork`. Args: hparams: A dict mapping strings to python strings, ints, bools, or floats. It is meant to contain the constants that define the :class:`adanet.subnetwork.Builder`, such as dropout, number of layers, or initial learning rate. attributes: A dict mapping strings to rank 0 Tensors of dtype string, int32, or float32. It is meant to contain properties that may or may not change over the course of training the :class:`adanet.subnetwork.Subnetwork`, such as the number of parameters, the Lipschitz constant, the :math:`L2` norm of the weights, or learning rate at materialization time. metrics: Dict of metric results keyed by name. The values of the dict are the results of calling a metric function, namely a `(metric_tensor, update_op)` tuple. `metric_tensor` should be evaluated without any impact on state (typically is a pure computation results based on variables.). For example, it should not trigger the :code:`update_op` or requires any input fetching. This is meant to contain metrics of interest, such as the training loss, complexity regularized loss, or standard deviation of the last layer outputs. Returns: A validated :class:`adanet.subnetwork.Report` object. Raises: ValueError: If validation fails. """ # pyformat: enable def __new__(cls, hparams, attributes, metrics): def _is_scalar(tensor): """Returns True iff tensor is scalar.""" return tensor.shape.ndims == 0 def _is_accepted_dtype(tensor): """Returns True iff tensor has the dtype we can handle.""" return tensor.dtype.base_dtype in (tf.bool, tf.int32, tf.float32, tf.float64, tf.string) # Validate hparams for key, value in hparams.items(): if not isinstance(value, (bool, int, float, six.string_types)): raise ValueError( "hparam '{}' refers to invalid value {}, type {}. type must be " "python primitive int, float, bool, or string.".format( key, value, type(value))) # Validate attributes for key, value in attributes.items(): if not isinstance(value, tf.Tensor): raise ValueError("attribute '{}' refers to invalid value: {}, type: {}." "type must be Tensor.".format(key, value, type(value))) if not (_is_scalar(value) and _is_accepted_dtype(value)): raise ValueError( "attribute '{}' refers to invalid tensor {}. Shape: {}".format( key, value, value.get_shape())) # Validate metrics metrics_copy = {} for key, value in metrics.items(): value = tf_compat.metric_op(value) if not isinstance(value, tuple): raise ValueError( "metric '{}' has invalid type {}. Must be a tuple.".format( key, type(value))) if len(value) < 2: raise ValueError( "metric tuple '{}' has fewer than 2 elements".format(key)) if not isinstance(value[0], (tf.Tensor, tf.Variable)): raise ValueError( "First element of metric tuple '{}' has value {} and type {}. " "Must be a Tensor or Variable.".format(key, value[0], type(value[0]))) if not _is_accepted_dtype(value[0]): raise ValueError( "First element of metric '{}' refers to Tensor of the wrong " "dtype {}. Must be one of tf.bool, tf.int32, tf.float32, " "tf.float64 or tf.string.".format(key, value[0].dtype)) if not _is_scalar(value[0]): tf.logging.warn( "First element of metric '{}' refers to Tensor of rank > 0. " "AdaNet is currently unable to store metrics of rank > 0 -- this " "metric will be dropped from the report. " "value: {}".format(key, value[0])) continue if not isinstance(value[1], (tf.Tensor, tf.Operation, tf.Variable)): raise ValueError( "Second element of metric tuple '{}' has value {} and type {}. " "Must be a Tensor, Operation, or Variable.".format( key, value[1], type(value[1]))) metrics_copy[key] = value return super(Report, cls).__new__( cls, hparams=hparams, attributes=attributes, metrics=metrics_copy) class MaterializedReport( collections.namedtuple("MaterializedReport", [ "iteration_number", "name", "hparams", "attributes", "metrics", "included_in_final_ensemble" ])): # pyformat: disable """Data collected about a :class:`adanet.subnetwork.Subnetwork`. Args: iteration_number: A python integer for the AdaNet iteration number, starting from 0. name: A string, which is either the name of the corresponding Builder, or "previous_ensemble" if it refers to the previous_ensemble. hparams: A dict mapping strings to python strings, ints, or floats. These are constants passed from the author of the :class:`adanet.subnetwork.Builder` that was used to construct this :class:`adanet.subnetwork.Subnetwork`. It is meant to contain the arguments that defined the :class:`adanet.subnetwork.Builder`, such as dropout, number of layers, or initial learning rate. attributes: A dict mapping strings to python strings, ints, bools, or floats. These are python primitives that come from materialized Tensors; these Tensors were defined by the author of the :class:`adanet.subnetwork.Builder` that was used to construct this :class:`adanet.subnetwork.Subnetwork`. It is meant to contain properties that may or may not change over the course of training the :class:`adanet.subnetwork.Subnetwork`, such as the number of parameters, the Lipschitz constant, or the :math:`L2` norm of the weights. metrics: A dict mapping strings to python strings, ints, or floats. These are python primitives that come from metrics that were evaluated on the trained :class:`adanet.subnetwork.Subnetwork` over some dataset; these metrics were defined by the author of the :class:`adanet.subnetwork.Builder` that was used to construct this :class:`adanet.subnetwork.Subnetwork`. It is meant to contain performance metrics or measures that could predict generalization, such as the training loss, complexity regularized loss, or standard deviation of the last layer outputs. included_in_final_ensemble: A boolean denoting whether the associated :class:`adanet.subnetwork.Subnetwork` was included in the ensemble at the end of the AdaNet iteration. Returns: An :class:`adanet.subnetwork.MaterializedReport` object. """ # pyformat: enable def __new__(cls, iteration_number, name, hparams, attributes, metrics, included_in_final_ensemble=False): return super(MaterializedReport, cls).__new__( cls, iteration_number=iteration_number, name=name, hparams=hparams, attributes=attributes, metrics=metrics, included_in_final_ensemble=included_in_final_ensemble)
8,294
41.106599
80
py
adanet
adanet-master/adanet/subnetwork/__init__.py
# Copyright 2018 The AdaNet Authors. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # https://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Low-level APIs for defining custom subnetworks and search spaces.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from adanet.subnetwork.generator import Builder from adanet.subnetwork.generator import Generator from adanet.subnetwork.generator import SimpleGenerator from adanet.subnetwork.generator import Subnetwork from adanet.subnetwork.generator import TrainOpSpec from adanet.subnetwork.report import MaterializedReport from adanet.subnetwork.report import Report __all__ = [ "Builder", "Generator", "MaterializedReport", "Report", "SimpleGenerator", "Subnetwork", "TrainOpSpec", ]
1,290
32.973684
74
py
adanet
adanet-master/adanet/subnetwork/generator.py
"""An AdaNet subnetwork definition in Tensorflow using a single graph. Copyright 2018 The AdaNet Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import collections import six def _validate_nested_persisted_tensors(persisted_tensors): """Raises a ValueError when a nested dict is empty in persisted_tensors.""" for key, entry in persisted_tensors.items(): if not isinstance(entry, dict): continue if not entry: raise ValueError("Got empty nested dictionary for key: '{}'".format(key)) _validate_nested_persisted_tensors(entry) class TrainOpSpec( collections.namedtuple("TrainOpSpec", ["train_op", "chief_hooks", "hooks"])): """A data structure for specifying training operations. Args: train_op: Op for the training step. chief_hooks: Iterable of :class:`tf.train.SessionRunHook` objects to run on the chief worker during training. hooks: Iterable of :class:`tf.train.SessionRunHook` objects to run on all workers during training. Returns: A :class:`adanet.subnetwork.TrainOpSpec` object. """ def __new__(cls, train_op, chief_hooks=None, hooks=None): # Make hooks immutable. chief_hooks = tuple(chief_hooks) if chief_hooks else () hooks = tuple(hooks) if hooks else () return super(TrainOpSpec, cls).__new__(cls, train_op, chief_hooks, hooks) class Subnetwork( collections.namedtuple("Subnetwork", [ "last_layer", "logits", "complexity", "persisted_tensors", "shared", "local_init_ops" ])): # pyformat: disable """An AdaNet subnetwork. In the AdaNet paper, an :class:`adanet.subnetwork.Subnetwork` is are called a *subnetwork*, and indicated by *h*. A collection of weighted subnetworks form an AdaNet ensemble. Args: last_layer: :class:`tf.Tensor` output or dict of string to :class:`tf.Tensor` outputs (for multi-head) of the last layer of the subnetwork, i.e the layer before the logits layer. When the mixture weight type is :class:`MATRIX`, the AdaNet algorithm takes care of computing ensemble mixture weights matrices (one per subnetwork) that multiply the various last layers of the ensemble's subnetworks, and regularize them using their subnetwork's complexity. This field is represented by *h* in the AdaNet paper. logits: :class:`tf.Tensor` logits or dict of string to :class:`tf.Tensor` logits (for multi-head) for training the subnetwork. These logits are not used in the ensemble's outputs if the mixture weight type is :class:`MATRIX`, instead AdaNet learns its own logits (mixture weights) from the subnetwork's `last_layers` with complexity regularization. The logits are used in the ensemble only when the mixture weights type is :class:`SCALAR` or :class:`VECTOR`. Even though the logits are not used in the ensemble in some cases, they should always be supplied as adanet uses the logits to train the subnetworks. complexity: A scalar :class:`tf.Tensor` representing the complexity of the subnetwork's architecture. It is used for choosing the best subnetwork at each iteration, and for regularizing the weighted outputs of more complex subnetworks. persisted_tensors: DEPRECATED. See `shared`. Optional nested dictionary of string to :class:`tf.Tensor` to persist across iterations. At the end of an iteration, the :class:`tf.Tensor` instances will be available to subnetworks in the next iterations, whereas others that are not part of the `Subnetwork` will be pruned. This allows later :class:`adanet.subnetwork.Subnetwork` instances to dynamically build upon arbitrary :class:`tf.Tensors` from previous :class:`adanet.subnetwork.Subnetwork` instances. shared: Optional Python object(s), primitive(s), or function(s) to share with subnetworks within the same iteration or in future iterations. local_init_ops: Iterable of :class:`tf.Operation` objects to run to initialize local variables. Returns: A validated :class:`adanet.subnetwork.Subnetwork` object. Raises: ValueError: If last_layer is None. ValueError: If logits is None. ValueError: If logits is a dict but last_layer is not. ValueError: If last_layer is a dict but logits is not. ValueError: If complexity is None. ValueError: If persisted_tensors is present but not a dictionary. ValueError: If persisted_tensors contains an empty nested dictionary. """ # pyformat: enable # Import here to avoid strict BUILD deps check. from tensorflow.python.util import deprecation # pylint: disable=g-direct-tensorflow-import,g-import-not-at-top @deprecation.deprecated_args( None, "`persisted_tensors` is deprecated, please use `shared` instead.", "persisted_tensors") def __new__(cls, last_layer, logits, complexity, persisted_tensors=None, shared=None, local_init_ops=None): if last_layer is None: raise ValueError("last_layer not provided") if logits is None: raise ValueError("logits not provided") if isinstance(logits, dict) and not isinstance(last_layer, dict): raise ValueError("if logits is a dict last_layer must also be a dict") if isinstance(last_layer, dict) and not isinstance(logits, dict): raise ValueError("if last_layer is a dict logits must also be a dict") if complexity is None: raise ValueError("complexity not provided") if persisted_tensors is not None: if not isinstance(persisted_tensors, dict): raise ValueError("persisted_tensors must be a dict") _validate_nested_persisted_tensors(persisted_tensors) local_init_ops = tuple(local_init_ops) if local_init_ops else () return super(Subnetwork, cls).__new__( cls, last_layer=last_layer, logits=logits, complexity=complexity, persisted_tensors=persisted_tensors, shared=shared, local_init_ops=local_init_ops) @six.add_metaclass(abc.ABCMeta) class Builder(object): """Interface for a subnetwork builder. Given features, labels, and the best ensemble of subnetworks at iteration t-1, a `Builder` creates a `Subnetwork` to add to a candidate ensemble at iteration t. These candidate ensembles are evaluated against one another at the end of the iteration, and the best one is selected based on its complexity-regularized loss. """ @abc.abstractproperty def name(self): r"""Returns the unique name of this subnetwork within an iteration. Returns: String name of this subnetwork. """ # TODO: Validate name matches ^[A-Za-z0-9_.\\-/]*$ @abc.abstractmethod def build_subnetwork(self, features, labels, logits_dimension, training, iteration_step, summary, previous_ensemble=None): # pyformat: disable """Returns the candidate `Subnetwork` to add to the ensemble. This method will be called only once before :meth:`build_subnetwork_train_op`. This method should construct the candidate subnetwork's graph operations and variables. Accessing the global step via :meth:`tf.train.get_or_create_global_step()` or :meth:`tf.train.get_global_step()` within this scope will return an incrementable iteration step since the beginning of the iteration. Args: features: Input `dict` of :class:`tf.Tensor` objects. labels: Labels :class:`tf.Tensor` or a dictionary of string label name to :class:`tf.Tensor` (for multi-head). Can be `None`. logits_dimension: Size of the last dimension of the logits :class:`tf.Tensor`. Typically, logits have for shape `[batch_size, logits_dimension]`. training: A python boolean indicating whether the graph is in training mode or prediction mode. iteration_step: Integer :class:`tf.Tensor` representing the step since the beginning of the current iteration, as opposed to the global step. summary: An :class:`adanet.Summary` for scoping summaries to individual subnetworks in Tensorboard. Using :meth:`tf.summary` within this scope will use this :class:`adanet.Summary` under the hood. previous_ensemble: The best :class:`adanet.Ensemble` from iteration t-1. The created subnetwork will extend the previous ensemble to form the :class:`adanet.Ensemble` at iteration t. Returns: An :class:`adanet.subnetwork.Subnetwork` instance. """ # pyformat: enable @abc.abstractmethod def build_subnetwork_train_op(self, subnetwork, loss, var_list, labels, iteration_step, summary, previous_ensemble): """Returns an op for training a new subnetwork. This method will be called once after :meth:`build_subnetwork`. Accessing the global step via :meth:`tf.train.get_or_create_global_step()` or :meth:`tf.train.get_global_step()` within this scope will return an incrementable iteration step since the beginning of the iteration. Args: subnetwork: Newest subnetwork, that is not part of the `previous_ensemble`. loss: A :class:`tf.Tensor` containing the subnetwork's loss to minimize. var_list: List of subnetwork :class:`tf.Variable` parameters to update as part of the training operation. labels: Labels :class:`tf.Tensor` or a dictionary of string label name to :class:`tf.Tensor` (for multi-head). iteration_step: Integer :class:`tf.Tensor` representing the step since the beginning of the current iteration, as opposed to the global step. summary: An :class:`adanet.Summary` for scoping summaries to individual subnetworks in Tensorboard. Using `tf.summary` within this scope will use this :class:`adanet.Summary` under the hood. previous_ensemble: The best `Ensemble` from iteration t-1. The created subnetwork will extend the previous ensemble to form the `Ensemble` at iteration t. Is None for iteration 0. Returns: Either a train op or an :class:`adanet.subnetwork.TrainOpSpec`. """ def build_subnetwork_report(self): """Returns a `subnetwork.Report` to materialize and record. This method will be called once after :meth:`build_subnetwork`. Do NOT depend on variables created in :meth:`build_subnetwork_train_op`, because they are not called before :meth:`build_subnetwork_report` is called. If it returns None, AdaNet records the name and standard eval metrics. """ return None @six.add_metaclass(abc.ABCMeta) class Generator(object): """Interface for a candidate subnetwork generator. Given the ensemble of subnetworks at iteration t-1, this object is responsible for generating the set of candidate subnetworks for iteration t that minimize the objective as part of an ensemble. """ @abc.abstractmethod def generate_candidates(self, previous_ensemble, iteration_number, previous_ensemble_reports, all_reports, config): # pyformat: disable """Generates :class:`adanet.subnetwork.Builder` instances for an iteration. NOTE: Every call to :meth:`generate_candidates` must be deterministic for the given arguments. Args: previous_ensemble: The best :class:`adanet.Ensemble` from iteration t-1. DEPRECATED. We are transitioning away from the use of previous_ensemble in generate_candidates. New Generators should *not* use previous_ensemble in their implementation of generate_candidates -- please only use iteration_number, previous_ensemble_reports and all_reports. iteration_number: Python integer AdaNet iteration t, starting from 0. previous_ensemble_reports: List of :class:`adanet.subnetwork.MaterializedReport` instances corresponding to the Builders composing :class:`adanet.Ensemble` from iteration t-1. The first element in the list corresponds to the Builder added in the first iteration. If a :class:`adanet.subnetwork.MaterializedReport` is not supplied to the estimator, previous_ensemble_report is `None`. all_reports: List of :class:`adanet.subnetwork.MaterializedReport` instances. If an :class:`adanet.subnetwork.ReportMaterializer` is not supplied to the estimator, `all_reports` is `None`. If :class:`adanet.subnetwork.ReportMaterializer` is supplied to the estimator and t=0, `all_reports` is an empty List. Otherwise, `all_reports` is a sequence of Lists. Each element of the sequence is a List containing all the :class:`adanet.subnetwork.MaterializedReport` instances in an AdaNet iteration, starting from iteration 0, and ending at iteration t-1. config: The current :class:`tf.estimator.RunConfig` object to configure the runtime settings. Returns: A list of :class:`adanet.subnetwork.Builder` instances. """ # pyformat: enable class SimpleGenerator(Generator): """Always generates the given :class:`adanet.subnetwork.Builder` instances. Args: subnetwork_builders: List of :class:`adanet.subnetwork.Builder` instances to return at each iteration when `generate_candidates` is called. Returns: A :class:`adanet.SimpleGenerator` instance. """ def __init__(self, subnetwork_builders): self._subnetwork_builders = subnetwork_builders def generate_candidates(self, previous_ensemble, iteration_number, previous_ensemble_reports, all_reports): return self._subnetwork_builders
14,469
41.558824
114
py
adanet
adanet-master/adanet/subnetwork/report_test.py
"""Test AdaNet single graph subnetwork implementation. Copyright 2018 The AdaNet Authors. All Rights Reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at https://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from absl.testing import parameterized from adanet.subnetwork.report import Report import tensorflow.compat.v2 as tf # pylint: disable=g-direct-tensorflow-import from tensorflow.python.eager import context from tensorflow.python.framework import test_util # pylint: enable=g-direct-tensorflow-import class ReportTest(parameterized.TestCase, tf.test.TestCase): # pylint: disable=g-long-lambda @parameterized.named_parameters( { "testcase_name": "empty", "hparams": {}, "attributes": lambda: {}, "metrics": lambda: {}, }, { "testcase_name": "non_empty", "hparams": { "hoo": 1 }, "attributes": lambda: { "aoo": tf.constant(1) }, "metrics": lambda: { "moo": (tf.constant(1), tf.constant(1)) }, }, { "testcase_name": "non_tensor_update_op", "hparams": { "hoo": 1 }, "attributes": lambda: { "aoo": tf.constant(1) }, "metrics": lambda: { "moo": (tf.constant(1), tf.no_op()) }, }) # pylint: enable=g-long-lambda @test_util.run_in_graph_and_eager_modes def test_new(self, hparams, attributes, metrics): with context.graph_mode(): _ = tf.constant(0) # Just to have a non-empty graph. report = Report( hparams=hparams, attributes=attributes(), metrics=metrics()) self.assertEqual(hparams, report.hparams) self.assertEqual( self.evaluate(attributes()), self.evaluate(report.attributes)) self.assertEqual(self.evaluate(metrics()), self.evaluate(report.metrics)) @test_util.run_in_graph_and_eager_modes def test_drop_non_scalar_metric(self): """Tests b/118632346.""" hparams = {"hoo": 1} attributes = {"aoo": tf.constant(1)} metrics = { "moo1": (tf.constant(1), tf.constant(1)), "moo2": (tf.constant([1, 1]), tf.constant([1, 1])), } want_metrics = metrics.copy() del want_metrics["moo2"] with self.test_session(): report = Report(hparams=hparams, attributes=attributes, metrics=metrics) self.assertEqual(hparams, report.hparams) self.assertEqual(attributes, report.attributes) self.assertEqual(want_metrics, report.metrics) @parameterized.named_parameters( { "testcase_name": "tensor_hparams", "hparams": { "hoo": tf.constant(1) }, "attributes": {}, "metrics": {}, }, { "testcase_name": "non_tensor_attributes", "hparams": {}, "attributes": { "aoo": 1, }, "metrics": {}, }, { "testcase_name": "non_tuple_metrics", "hparams": {}, "attributes": {}, "metrics": { "moo": tf.constant(1) }, }, { "testcase_name": "one_item_tuple_metrics", "hparams": {}, "attributes": {}, "metrics": { "moo": (tf.constant(1),) }, }) @test_util.run_in_graph_and_eager_modes def test_new_errors(self, hparams, attributes, metrics): with self.assertRaises(ValueError): Report(hparams=hparams, attributes=attributes, metrics=metrics) if __name__ == "__main__": tf.test.main()
4,130
30.295455
79
py
polytope
polytope-main/setup.py
#!/usr/bin/env python """Installation script.""" # import imp # inline # import importlib # inline import shlex as _sh import os import setuptools as _stp import subprocess import sys classifiers = [ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', 'Topic :: Scientific/Engineering', 'Topic :: Software Development'] def retrieve_git_info(): """Return commit hash of HEAD, or "release", or None if failure. If the git command fails, then return None. If HEAD has tag with prefix "vM" where M is an integer, then return 'release'. Tags with such names are regarded as version or release tags. Otherwise, return the commit hash as str. """ # Is Git installed? cmd = _sh.split(''' git --version ''') try: subprocess.call( cmd, stdout=subprocess.PIPE) except OSError: return None # Decide whether this is a release cmd = _sh.split(''' git describe --tags --candidates=0 HEAD ''') p = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) p.wait() if p.returncode == 0: tag = p.stdout.read().decode('utf-8') if len(tag) >= 2 and tag.startswith('v'): try: int(tag[1]) return 'release' except ValueError: pass # Otherwise, return commit hash cmd = _sh.split(''' git log -1 --format=%H ''') p = subprocess.Popen( cmd, stdout=subprocess.PIPE) p.wait() sha1 = p.stdout.read().decode('utf-8') return sha1 def run_setup(): """Get version from git, then install.""" # load long description from README.rst readme_file = 'README.rst' if os.path.exists(readme_file): with open(readme_file) as f: long_description = f.read() else: print( 'Could not find readme file to ' 'extract long_description.') long_description = '' # If .git directory is present, # create commit_hash.txt accordingly # to indicate version information if os.path.exists('.git'): # Provide commit hash or # empty file to indicate release sha1 = retrieve_git_info() if sha1 is None: sha1 = 'unknown-commit' elif sha1 == 'release': sha1 = '' commit_hash_header = ( '# DO NOT EDIT! ' 'This file was automatically generated by ' 'setup.py of polytope') with open('polytope/commit_hash.txt', 'w') as f: f.write(commit_hash_header + '\n') f.write(sha1 + '\n') # Import polytope/version.py # without importing polytope if sys.version_info.major == 2: import imp version = imp.load_module( 'version', *imp.find_module('version', ['polytope'])) else: import importlib.util spec = importlib.util.spec_from_file_location( 'version', 'polytope/version.py') version = importlib.util.module_from_spec(spec) sys.modules['version'] = version spec.loader.exec_module(version) polytope_version = version.version _stp.setup( name='polytope', version=polytope_version, description='Polytope Toolbox', long_description=long_description, author='Caltech Control and Dynamical Systems', author_email='[email protected]', url='https://tulip-control.org', project_urls={ 'Bug Tracker': 'https://github.com/tulip-control/polytope/issues', 'Documentation': 'https://tulip-control.github.io/polytope/', 'Source Code': 'https://github.com/tulip-control/polytope'}, license='BSD', python_requires='>=3.8', setup_requires=[ 'setuptools >= 65.5.1'], install_requires=[ 'networkx >= 3.0', 'numpy >= 1.24.1', 'scipy >= 1.10.0'], tests_require=[ 'matplotlib >= 3.6.3', 'pytest >= 7.2.1'], packages=[ 'polytope'], package_dir=dict( polytope='polytope'), package_data=dict( polytope=['commit_hash.txt']), classifiers=classifiers) if __name__ == '__main__': run_setup()
4,919
28.818182
68
py
polytope
polytope-main/run_tests.py
#!/usr/bin/env python """ Driver script for testing polytope package. Try calling it with "-h" flag. This script was originally copied from one by the same name in TuLiP (https://tulip-control.org). """ # import imp # inline # import importlib # inline import sys import os.path import nose if __name__ == "__main__": if ("-h" in sys.argv) or ("--help" in sys.argv): print("""Usage: run_tests.py [OPTIONS...] [[-]TESTFILES...] TESTFILES... is space-separated list of test file names, where the suffix "_test.py" is added to each given name. E.g., run_tests.py gr1cint causes the gr1cint_test.py file to be used and no others. If no arguments are given, then default is to run all tests. If TESTFILES... each have a prefix of "-", then all tests *except* those listed will be run. Besides what is below, OPTIONS... are passed on to nose. --fast exclude tests that are marked as slow --cover generate a coverage report --outofsource import polytope from outside the current directory --where=DIR search for tests in directory DIR; default is "tests" (this is exactly the "-w" or "--where" option of nose)""") exit(1) if "--fast" in sys.argv: skip_slow = True sys.argv.remove("--fast") else: skip_slow = False if "--cover" in sys.argv: measure_coverage = True sys.argv.remove("--cover") else: measure_coverage = False if "--outofsource" in sys.argv: require_nonlocaldir_polytope = True sys.argv.remove("--outofsource") else: require_nonlocaldir_polytope = False # Try to find test directory among command-line arguments given_tests_dir = False for i in range(len(sys.argv[1:])): if sys.argv[i+1] == "-w": given_tests_dir = True tests_dir = sys.argv[i+2] break if sys.argv[i+1].startswith("--where="): given_tests_dir = True tests_dir = sys.argv[i+1][len("--where="):] break if not given_tests_dir: tests_dir = "tests" if require_nonlocaldir_polytope: # Scrub local directory from search path for modules import os try: while True: sys.path.remove("") except ValueError: pass try: while True: sys.path.remove(os.path.abspath(os.curdir)) except ValueError: pass try: if sys.version_info.major == 2: import imp modtuple = imp.find_module("polytope", sys.path) imp.load_module("polytope", *modtuple) else: import importlib importlib.import_module('polytope') except ImportError: if require_nonlocaldir_polytope: raise ImportError( "polytope package not found, " "besides in the local directory") else: raise argv = ["nosetests"] if skip_slow: argv.append("--attr=!slow") if measure_coverage: argv.extend([ "--with-coverage", "--cover-html", "--cover-package=polytope"]) testfiles = [] excludefiles = [] for basename in sys.argv[1:]: # Only add extant file names if os.path.exists(os.path.join(tests_dir, basename+"_test.py")): testfiles.append(basename+"_test.py") elif basename[0] == "-": if os.path.exists( os.path.join(tests_dir, basename[1:]+"_test.py")): excludefiles.append(basename[1:]+"_test.py") else: argv.append(basename) else: argv.append(basename) if len(testfiles) > 0 and len(excludefiles) > 0: print("You can specify files to exclude or include, but not both.") print('Try calling it with "-h" flag.') exit(1) if len(excludefiles) > 0: argv.append("--exclude="+"|".join(excludefiles)) argv.extend(testfiles) if not given_tests_dir: argv += ["--where="+tests_dir] nose.main(argv=argv+["--verbosity=3", "--exe"])
4,212
31.658915
79
py
polytope
polytope-main/examples/randplot.py
#!/usr/bin/env python """ Sample N points in the unit square, compute hull and plot. Usage: randplot.py [N] The default value of N is 3. Note that plotting requires matplotlib (https://matplotlib.org), which is an optional dependency. """ import sys import numpy as np import matplotlib.pyplot as plt import polytope if __name__ == "__main__": if len(sys.argv) < 2: N = 3 else: N = int(sys.argv[1]) V = np.random.rand(N, 2) print("Sampled "+str(N)+" points:") print(V) P = polytope.qhull(V) print("Computed the convex hull:") print(P) V_min = polytope.extreme(P) print("which has extreme points:") print(V_min) P.plot() plt.show()
712
17.763158
68
py
polytope
polytope-main/examples/timing_benchmarks.py
""" execution time measurements for polytope package """ import os import numpy as np import polytope as pc if os.name is not 'posix': raise Exception('works only on POSIX operating systems') # [0, 1] x [0, 1] A0 = np.array([ [0.0, 1.0], [0.0, -1.0], [1.0, 0.0], [-1.0, 0.0] ]) b0 = np.array([[1.0, 0.0, 1.0, 0.0]]) # [0, 0.5] x [0, 0.5] A1 = np.array([ [0.0, 2.0], [0.0, -1.0], [2.0, 0.0], [-1.0, 0.0] ]) b1 = np.array([1.0, 0.0, 1.0, 0.0]) N = 10**4 print('starting timing measurements...') # instance creation start = os.times()[4] for i in range(N): p0 = pc.Polytope(A0, b0) end = os.times()[4] print('instantiation: ' + str(end - start)) # intersection p0 = pc.Polytope(A0, b0) p1 = pc.Polytope(A1, b1) start = os.times()[4] for i in range(N): union = p0.intersect(p1) end = os.times()[4] print('intersection: ' + str(end - start)) start = os.times()[4] for i in range(N): union = p0.union(p1) end = os.times()[4] print('union: ' + str(end - start)) start = os.times()[4] for i in range(N): union = p0.diff(p1) end = os.times()[4] print('difference: ' + str(end - start)) print('end of timing measurements.')
1,180
16.893939
60
py
polytope
polytope-main/tests/polytope_test.py
#!/usr/bin/env python """Tests for the polytope subpackage.""" import logging import numpy as np from numpy.testing import assert_allclose from numpy.testing import assert_array_equal import pytest import scipy.optimize import polytope as pc import polytope.polytope as alg from polytope import solvers log = logging.getLogger('polytope.polytope') log.setLevel(logging.INFO) def test_polytope_str(): # 1 constaint (so uniline) A = np.array([[1]]) b = np.array([1]) p = pc.Polytope(A, b) s = str(p) s_ = 'Single polytope \n [[1.]] x <= [[1.]]\n' assert s == s_, (s, s_) # > 1 constraints (so multiline) polys = dict( p1d=[[0, 1]], p2d=[[0, 1], [0, 2]], p3d=[[0, 1], [0, 2], [0, 3]]) strings = dict( p1d='Single polytope \n [[ 1.] x <= [[1.]\n [-1.]]| [0.]]\n', p2d=( 'Single polytope \n [[ 1. 0.] | [[1.]\n [ 0. 1.] ' 'x <= [2.]\n [-1. -0.] | [0.]\n [-0. -1.]]|' ' [0.]]\n'), p3d=( 'Single polytope \n [[ 1. 0. 0.] | [[1.]\n ' '[ 0. 1. 0.] | [2.]\n [ 0. 0. 1.] x <= [3.]\n' ' [-1. -0. -0.] | [0.]\n [-0. -1. -0.] |' ' [0.]\n [-0. -0. -1.]]| [0.]]\n')) for name, poly in polys.items(): p = pc.Polytope.from_box(poly) s = str(p) s_ = strings[name] assert s == s_, (s, s_) class operations_test(object): def setUp(self): # unit square in first quadrant self.Ab = np.array([[0.0, 1.0, 1.0], [0.0, -1.0, 0.0], [1.0, 0.0, 1.0], [-1.0, 0.0, 0.0]]) # unit square in second quadrant self.Ab2 = np.array([[-1.0, 0.0, 1.0], [1.0, 0.0, 0.0], [0.0, 1.0, 1.0], [0.0, -1.0, 0.0]]) # unit square in third quadrant self.Ab3 = np.array([[0.0, 1.0, 0.0], [0.0, -1.0, 1.0], [1.0, 0.0, 0.0], [-1.0, 0.0, 1.0]]) # unit square in fourth quadrant self.Ab4 = np.array([[0.0, 1.0, 0.0], [0.0, -1.0, 1.0], [1.0, 0.0, 1.0], [-1.0, 0.0, 0.0]]) self.A = self.Ab[:, 0:2] self.b = self.Ab[:, 2] def tearDown(self): pass def comparison_test(self): p = pc.Polytope(self.A, self.b) p2 = pc.Polytope(self.A, 2*self.b) assert(p <= p2) assert(not p2 <= p) assert(not p2 == p) r = pc.Region([p]) r2 = pc.Region([p2]) assert(r <= r2) assert(not r2 <= r) assert(not r2 == r) # test H-rep -> V-rep -> H-rep v = pc.extreme(p) p3 = pc.qhull(v) assert(p3 == p) # test V-rep -> H-rep with d+1 points p4 = pc.qhull(np.array([[0, 0], [1, 0], [0, 1]])) assert(p4 == pc.Polytope( np.array([[1, 1], [0, -1], [0, -1]]), np.array([1, 0, 0]))) def region_rotation_test(self): p = pc.Region([pc.Polytope(self.A, self.b)]) p1 = pc.Region([pc.Polytope(self.A, self.b)]) p2 = pc.Region([pc.Polytope(self.Ab2[:, 0:2], self.Ab2[:, 2])]) p3 = pc.Region([pc.Polytope(self.Ab3[:, 0:2], self.Ab3[:, 2])]) p4 = pc.Region([pc.Polytope(self.Ab4[:, 0:2], self.Ab4[:, 2])]) p = p.rotation(0, 1, np.pi/2) print(p.bounding_box) assert(p == p2) assert(not p == p3) assert(not p == p4) assert(not p == p1) assert_allclose(p.chebXc, [-0.5, 0.5]) p = p.rotation(0, 1, np.pi/2) assert(p == p3) assert_allclose(p.chebXc, [-0.5, -0.5]) p = p.rotation(0, 1, np.pi/2) assert(p == p4) assert_allclose(p.chebXc, [0.5, -0.5]) p = p.rotation(0, 1, np.pi/2) assert(p == p1) assert_allclose(p.chebXc, [0.5, 0.5]) def polytope_rotation_test(self): p = pc.Polytope(self.A, self.b) p1 = pc.Polytope(self.A, self.b) p2 = pc.Polytope(self.Ab2[:, 0:2], self.Ab2[:, 2]) p3 = pc.Polytope(self.Ab3[:, 0:2], self.Ab3[:, 2]) p4 = pc.Polytope(self.Ab4[:, 0:2], self.Ab4[:, 2]) p = p.rotation(0, 1, np.pi/2) print(p.bounding_box) assert(p == p2) assert(not p == p3) assert(not p == p4) assert(not p == p1) assert_allclose(p.chebXc, [-0.5, 0.5]) p = p.rotation(0, 1, np.pi/2) assert(p == p3) assert_allclose(p.chebXc, [-0.5, -0.5]) p = p.rotation(0, 1, np.pi/2) assert(p == p4) assert_allclose(p.chebXc, [0.5, -0.5]) p = p.rotation(0, 1, np.pi/2) assert(p == p1) assert_allclose(p.chebXc, [0.5, 0.5]) def region_translation_test(self): p = pc.Region([pc.Polytope(self.A, self.b)]) p1 = pc.Region([pc.Polytope(self.A, self.b)]) p2 = pc.Region([pc.Polytope(self.Ab2[:, 0:2], self.Ab2[:, 2])]) p = p.translation([-1, 0]) assert(p == p2) assert(not p == p1) p = p.translation([1, 0]) assert(p == p1) def polytope_translation_test(self): p = pc.Polytope(self.A, self.b) p1 = pc.Polytope(self.A, self.b) p2 = pc.Polytope(self.Ab2[:, 0:2], self.Ab2[:, 2]) p = p.translation([-1, 0]) assert(p == p2) assert(not p == p1) p = p.translation([1, 0]) assert(p == p1) def region_empty_test(self): # Note that as of commit a037b555758ed9ee736fa7cb324d300b8d622fb4 # Region.__init__ deletes empty polytopes from # the given list of polytopes at instantiation. reg = pc.Region() reg.list_poly = [pc.Polytope(), pc.Polytope()] assert len(reg) > 0 assert pc.is_empty(reg) def polytope_full_dim_test(self): assert pc.is_fulldim(pc.Polytope(self.A, self.b)) assert pc.is_fulldim(pc.Polytope(self.Ab2[:, 0:2], self.Ab2[:, 2])) assert not pc.is_fulldim(pc.Polytope()) assert not pc.is_fulldim(pc.Polytope(self.A, self.b - 1e3)) def region_full_dim_test(self): assert not pc.is_fulldim(pc.Region()) p1 = pc.Polytope(self.A, self.b) p2 = pc.Polytope(self.Ab2[:, 0:2], self.Ab2[:, 2]) reg = pc.Region([p1, p2]) assert pc.is_fulldim(reg) # Adding empty polytopes should not affect the # full-dimensional status of this region. reg.list_poly.append(pc.Polytope()) assert pc.is_fulldim(reg) reg.list_poly.append(pc.Polytope(self.A, self.b - 1e3)) assert pc.is_fulldim(reg) def polytope_intersect_test(self): p1 = pc.Polytope(self.A, self.b) p2 = pc.Polytope(self.Ab2[:, 0:2], self.Ab2[:, 2]) p3 = p1.intersect(p2) assert pc.is_fulldim(p1) assert pc.is_fulldim(p2) assert not pc.is_fulldim(p3) # p4 is the unit square with center at the origin. p4 = pc.Polytope(np.array([[ 1., 0.], [ 0., 1.], [-1., 0.], [ 0., -1.]]), np.array([0.5, 0.5, 0.5, 0.5])) p5 = p2.intersect(p4) assert pc.is_fulldim(p4) assert pc.is_fulldim(p5) def polytope_contains_test(self): p = pc.Polytope(self.A, self.b) # single point point_i = [0.1, 0.3] point_o = [2, 0] assert point_i in p assert point_o not in p # multiple points many_points_i = np.random.random((2, 8)) many_points_0 = np.random.random((2, 8)) - np.array([[0], [1]]) many_points = np.concatenate([many_points_0, many_points_i], axis=1) truth = np.array([False] * 8 + [True] * 8, dtype=bool) assert_array_equal(p.contains(many_points), truth) def region_contains_test(self): A = np.array([[1.0], [-1.0]]) b = np.array([1.0, 0.0]) poly = pc.Polytope(A, b) polys = [poly] reg = pc.Region(polys) assert 0.5 in reg # small positive tolerance (includes boundary) points = np.array([[-1.0, 0.0, 0.5, 1.0, 2.0]]) c = reg.contains(points) c_ = np.array( [[False, True, True, True, False]], dtype=bool) # zero tolerance (excludes boundary) points = np.array([[-1.0, 0.0, 0.5, 1.0, 2.0]]) c = reg.contains(points, abs_tol=0) c_ = np.array( [[False, False, True, False, False]], dtype=bool) assert np.all(c == c_), c def is_inside_test(self): box = [[0.0, 1.0], [0.0, 2.0]] p = pc.Polytope.from_box(box) point = np.array([0.0, 1.0]) abs_tol = 0.01 assert pc.is_inside(p, point) assert pc.is_inside(p, point, abs_tol) region = pc.Region([p]) assert pc.is_inside(region, point) assert pc.is_inside(region, point, abs_tol) point = np.array([2.0, 0.0]) assert not pc.is_inside(p, point) assert not pc.is_inside(p, point, abs_tol) region = pc.Region([p]) assert not pc.is_inside(region, point) assert not pc.is_inside(region, point, abs_tol) abs_tol = 1.2 assert pc.is_inside(p, point, abs_tol) assert pc.is_inside(region, point, abs_tol) def test_bounding_box_to_polytope(): boxes = [ [[0, 1]], [[0, 1], [0, 2]], [[-1, 2], [3, 5], [-5, -3]], ] for intervals in boxes: _check_bbox_to_poly(intervals) def _check_bbox_to_poly(intervals): poly = pc.box2poly(intervals) bbox = alg._bounding_box_to_polytope(*poly.bounding_box) assert poly == bbox, bbox def solve_rotation_test_090(atol=1e-15): g1 = np.array([0, 1, 1, 0]) g2 = np.array([0, 1, 0, 0]) R = alg.solve_rotation_ap(g1, g2) e0 = np.array([0, 1, 1, 1]) e1 = np.array([0, 0, -1, 0]) e2 = np.array([0, 0, 0, 0]) t0 = np.array([0, 1, -1, 1]) t1 = np.array([0, -1, 0, 0]) t2 = np.array([0, 0, 0, 0]) assert_allclose(R.dot(e0), t0, atol=atol) assert_allclose(R.dot(e1), t1, atol=atol) assert_allclose(R.dot(e2), t2, atol=atol) def solve_rotation_test_180(atol=1e-15): g1 = np.array([0, 1, 0, 0]) g2 = np.array([0, 0, 1, 0]) R = alg.solve_rotation_ap(g1, g2) e0 = np.array([0, 1, 1, 1]) e1 = np.array([0, 0, -1, 0]) e2 = np.array([0, 0, 0, 0]) t0 = np.array([0, -1, -1, 1]) t1 = np.array([0, 0, 1, 0]) t2 = np.array([0, 0, 0, 0]) assert_allclose(R.dot(e0), t0, atol=atol) assert_allclose(R.dot(e1), t1, atol=atol) assert_allclose(R.dot(e2), t2, atol=atol) def solve_rotation_test_270R(atol=1e-15): g1 = np.array([0, -1, 0, 0]) g2 = np.array([0, 1, 1, 0]) R = alg.solve_rotation_ap(g1, g2) e0 = np.array([0, 1, 1, 1]) e1 = np.array([0, 0, -1, 0]) e2 = np.array([0, 0, 0, 0]) t0 = np.array([0, -1, 1, 1]) t1 = np.array([0, 1, 0, 0]) t2 = np.array([0, 0, 0, 0]) assert_allclose(R.dot(e0), t0, atol=atol) assert_allclose(R.dot(e1), t1, atol=atol) assert_allclose(R.dot(e2), t2, atol=atol) def solve_rotation_test_270L(atol=1e-15): g1 = np.array([0, -1, 0, 0]) g2 = np.array([0, 1, -1, 0]) R = alg.solve_rotation_ap(g1, g2) e0 = np.array([0, 1, 1, 1]) e1 = np.array([0, 0, -1, 0]) e2 = np.array([0, 0, 0, 0]) t0 = np.array([0, 1, -1, 1]) t1 = np.array([0, -1, 0, 0]) t2 = np.array([0, 0, 0, 0]) assert_allclose(R.dot(e0), t0, atol=atol) assert_allclose(R.dot(e1), t1, atol=atol) assert_allclose(R.dot(e2), t2, atol=atol) def givens_rotation_test_180(atol=1e-15): R = alg.givens_rotation_matrix(1, 2, np.pi, 4) e0 = np.array([0, 1, 1, 1]) e1 = np.array([0, 0, -1, 0]) e2 = np.array([0, 0, 0, 0]) t0 = np.array([0, -1, -1, 1]) t1 = np.array([0, 0, 1, 0]) t2 = np.array([0, 0, 0, 0]) assert_allclose(R.dot(e0), t0, atol=atol) assert_allclose(R.dot(e1), t1, atol=atol) assert_allclose(R.dot(e2), t2, atol=atol) def givens_rotation_test_270L(atol=1e-15): g1 = np.array([0, -1, 0, 0]) g2 = np.array([0, 1, -1, 0]) R = alg.givens_rotation_matrix(1, 2, 3*np.pi/2, 4) e0 = np.array([0, 1, 1, 1]) e1 = np.array([0, 0, -1, 0]) e2 = np.array([0, 0, 0, 0]) t0 = np.array([0, 1, -1, 1]) t1 = np.array([0, -1, 0, 0]) t2 = np.array([0, 0, 0, 0]) assert_allclose(R.dot(e0), t0, atol=atol) assert_allclose(R.dot(e1), t1, atol=atol) assert_allclose(R.dot(e2), t2, atol=atol) def test_enumerate_integral_points(): """Test the computation of integral points.""" # convex polytope vertices = np.array([[0.5, 1.5], [0.5, 1.5]]) hull = pc.box2poly(vertices) integral_points = alg.enumerate_integral_points(hull) integral_points_ = np.array([[1.0], [1.0]]) assert_allclose( _lexsort(integral_points), _lexsort(integral_points_)), integral_points # # nonconvex polytope vertices = np.array([[0.0, 0.0], [1.0, 1.0], [2.0, 1.0]]) hull_1 = pc.qhull(vertices) hull_2 = pc.box2poly([[1.0, 2.0], [1.0, 2.0]]) nonconvex = hull_1.union(hull_2) integral_points = alg.enumerate_integral_points(nonconvex) integral_points_ = np.array([ [0.0, 1.0, 2.0, 1.0, 2.0], [0.0, 1.0, 1.0, 2.0, 2.0] ]) assert_allclose( _lexsort(integral_points), _lexsort(integral_points_)), integral_points # # 3-dimensional polytope vertices = np.array([ [0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 1.0]]) hull = pc.qhull(vertices) integral_points = alg.enumerate_integral_points(hull) integral_points_ = np.array([ [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0], [0.0, 0.0, 0.0, 1.0] ]) assert_allclose( _lexsort(integral_points), _lexsort(integral_points_)), integral_points def _lexsort(x): return x[:, np.lexsort(x)] def test_grid_region(): # 8 points in [0, 1] poly = pc.box2poly([[0, 1]]) points, res = pc.grid_region(poly) assert res == [8], res _check_grid(points, poly, res) # 100 points in [0, 2] poly = pc.box2poly([[0, 2]]) points, res = pc.grid_region(poly, res=[100]) assert res == [100], res _check_grid(points, poly, res) # 8 * 8 points in a square poly = pc.box2poly([[0, 10], [5, 20]]) points, res = pc.grid_region(poly) assert res == [80, 120], res _check_grid(points, poly, res) # 20 * 20 points in a square poly = pc.box2poly([[-3, 50], [1, 4]]) points, res = pc.grid_region(poly, res=[20, 21]) assert res == [20, 21], res _check_grid(points, poly, res) with pytest.raises(ValueError): pc.grid_region(poly, res=[20]) with pytest.raises(ValueError): pc.grid_region(poly, res=[20, 10, 20]) with pytest.raises(ValueError): pc.grid_region(poly, res=[20, -1]) with pytest.raises(ValueError): pc.grid_region(poly, res=[0, 2]) res = [1, 1] points, res_ = pc.grid_region(poly, res=res) assert res == res_, res_ _check_grid(points, poly, res) def _check_grid(points, poly, res): assert points.shape == (poly.dim, np.prod(res)), (points.shape, res) bbox = alg._bounding_box_to_polytope(*poly.bounding_box) c = bbox.contains(points) assert np.all(c), points[:, c] def test_lpsolve(): # Ensure same API for both `scipy` and `cvxopt`. # Ensured by the different testing configurations. # Could change `polytope.polytope.default_solver` to # achieve the same result, when `cvxopt.glpk` is present. # # 2-D example c = np.array([1, 1], dtype=float) A = np.array([[-1, 0], [0, -1]], dtype=float) b = np.array([1, 1], dtype=float) res = solvers.lpsolve(c, A, b) x = res['x'] assert x.ndim == 1, x.ndim assert x.shape == (2,), x.shape # # 1-D example c, A, b = example_1d() res = solvers.lpsolve(c, A, b) x = res['x'] assert x.ndim == 1, x.ndim assert x.shape == (1,), x.shape def example_1d(): c = np.array([1], dtype=float) A = np.array([[-1]], dtype=float) b = np.array([1], dtype=float) return c, A, b def test_lpsolve_solver_selection_scipy(): # should always work, because `polytope` requires `scipy` c, A, b = example_1d() r_ = np.array([-1.0]) # call directly to isolate from selection within `lpsolve` r = solvers._solve_lp_using_scipy(c, A, b) assert r['x'] == r_, r['x'] r = solvers.lpsolve(c, A, b, solver='scipy') assert r['x'] == r_, r['x'] def test_lpsolve_solver_selection_glpk_present(): c, A, b = example_1d() have_glpk = is_glpk_present() # skip if GLPK fails to import if not have_glpk: log.info( 'Skipping GLPK test of `lpsolve` ' 'because GLPK failed to import, ' 'so assume not installed.') return r = solvers.lpsolve(c, A, b, solver='glpk') assert r['x'] == np.array([-1.0]), r['x'] def test_lpsolve_solver_selection_glpk_absent(): c, A, b = example_1d() have_glpk = is_glpk_present() # skip if GLPK imports if have_glpk: log.info( 'Skipping GLPK failure test, ' 'because GLPK is present.') return with pytest.raises(RuntimeError): solvers.lpsolve(c, A, b, solver='glpk') def test_request_glpk_after_changing_default_to_scipy(): c, A, b = example_1d() have_glpk = is_glpk_present() if not have_glpk: return assert solvers.default_solver != 'scipy' solvers.default_solver = 'scipy' solvers.lpsolve(c, A, b, solver='glpk') def is_glpk_present(): """Return `True` if `cvxopt.glpk` imports.""" try: import cvxopt.glpk assert 'glpk' in solvers.installed_solvers, ( solvers.installed_solvers) return True except ImportError: assert 'glpk' not in solvers.installed_solvers, ( solvers.installed_solvers) return False def test_fourier_motzkin_square(): # Setup a square and project it on the x and y axis a = np.array([ [-1.0, 0.0], [1.0, 0.0], [0.0, -1.0], [0.0, 1.0], ]) b = np.array([ -1.0, 2.0, -1.0, 2.0, ]) poly = pc.Polytope(a, b) project_dim_0 = pc.polytope.projection_fm(poly, None, np.array([1])) project_dim_1 = pc.polytope.projection_fm(poly, None, np.array([0])) expected_a = np.array([[-1.0], [1.0]]) expected_b = np.array([-1.0, 2.0]) ind_0 = np.argsort(project_dim_0.A, axis=0).flatten() ind_1 = np.argsort(project_dim_1.A, axis=0).flatten() assert np.allclose( project_dim_0.A[ind_0], expected_a, pc.polytope.ABS_TOL),\ (project_dim_0.A[ind_0], expected_a) assert np.allclose( project_dim_0.b[ind_0], expected_b, pc.polytope.ABS_TOL),\ (project_dim_0.b[ind_0], expected_b) assert np.allclose( project_dim_1.A[ind_1], expected_a, pc.polytope.ABS_TOL),\ (project_dim_1.A[ind_1], expected_a) assert np.allclose( project_dim_1.b[ind_1], expected_b, pc.polytope.ABS_TOL),\ (project_dim_1.b[ind_1], expected_b) def test_fourier_motzkin_triangle(): # Setup a triangle and project it on the x and y axis. a = np.array([ [0.0, -1.0], [1.0, 1.0], [-1.0, 1.0], ]) b = np.array([ -1.0, 4.0, 0.0, ]) poly = pc.Polytope(a, b) project_dim_0 = pc.polytope.projection_fm(poly, None, np.array([1])) project_dim_1 = pc.polytope.projection_fm(poly, None, np.array([0])) expected_a_0 = np.array([[-1.0], [1.0]]) expected_b_0 = np.array([-1.0, 3.0]) ind_0 = np.argsort(project_dim_0.A, axis=0).flatten() expected_a_1 = np.array([[-1.0], [1.0]]) expected_b_1 = np.array([-1.0, 2.0]) ind_1 = np.argsort(project_dim_1.A, axis=0).flatten() assert np.allclose( project_dim_0.A[ind_0], expected_a_0, pc.polytope.ABS_TOL), \ (project_dim_0.A[ind_0], expected_a_0) assert np.allclose( project_dim_0.b[ind_0], expected_b_0, pc.polytope.ABS_TOL), \ (project_dim_0.b[ind_0], expected_b_0) assert np.allclose( project_dim_1.A[ind_1], expected_a_1, pc.polytope.ABS_TOL), \ (project_dim_1.A[ind_1], expected_a_1) assert np.allclose( project_dim_1.b[ind_1], expected_b_1, pc.polytope.ABS_TOL), \ (project_dim_1.b[ind_1], expected_b_1) def test_reduce(): a = np.array([ [1.0, 0.1], [1.0, 0.1], [-1., 0.], [0., 1.], [0., -1.] ]) b = np.array([ 50., 50.5, -40., 1., 0. ]) poly = pc.Polytope(a, b) poly2 = pc.reduce(poly) l, u = poly2.bounding_box assert_allclose(l, np.array([[40.], [0.]]), rtol=1e-07, atol=1e-07) assert_allclose(u, np.array([[50.], [1.]]), rtol=1e-07, atol=1e-07) if __name__ == '__main__': pass
21,440
29.326733
76
py
polytope
polytope-main/tests/plot_test.py
#!/usr/bin/env python """Tests for plotting.""" import unittest try: import matplotlib as mpl import matplotlib.patches except ImportError: mpl = None import polytope as pc from polytope import plot class Axes(object): """Mock class.""" def add_patch(self, x): pass @unittest.skipIf( mpl is None, '`matplotlib` is not installed') def test_plot_transition_arrow(): p0 = pc.box2poly([[0.0, 1.0], [0.0, 2.0]]) p1 = pc.box2poly([[0.1, 2.0], [0.0, 2.0]]) # matplotlib.patches is loaded also by matplotlib.pyplot # and .figures, so instantiating real Axes w/o # loading patches is impossible ax = Axes() arrow = plot.plot_transition_arrow(p0, p1, ax=ax) assert(isinstance(arrow, matplotlib.patches.Arrow))
775
21.823529
60
py
polytope
polytope-main/polytope/esp.py
# Copyright (c) 2011-2014 by California Institute of Technology # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the California Institute of Technology nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH # OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. r"""Equality Set Projection (ESP). Non-vertex polytope projection method from - https://web.archive.org/web/20150103142532/ https://www-control.eng.cam.ac.uk/~cnj22/research/projection.html - https://infoscience.epfl.ch/record/169768 Very unstable, can not handle complex polytopes. Reference ========= \cite{Jones04} """ # Created by P. Nilsson, 8/2/11 import pickle import numpy as np from scipy import io as sio from scipy import linalg from polytope import solvers class Ridge(object): """A ridge. Attributes: - `E_r`: Equality set of a facet - `ar, br`: Affine hull of the facet s.t. P_{E_0} = P intersection {x | ar x = br}. """ def __init__(self, E, a, b): self.E_r = E self.ar = a self.br = b class Ridge_Facet(object): """A ridge facet. Attributes: - `E_r`: Equality set of a ridge - `ar,br`: Affine hull of the ridge s.t. P_{E_f} intersection {x | ar x = br} defines the ridge, where E_f is the equality set of the facet. - `E_0`: Equality set of a facet - `af,bf`: Affine hull of the facet. """ def __init__(self, E_r, ar, br, E_0, af, bf): self.E_r = E_r self.ar = ar self.br = br self.E_0 = E_0 self.af = af self.bf = bf def esp(CC, DD, bb, centered=False, abs_tol=1e-10, verbose=0): """Project polytope [C D] x <= b onto C coordinates. Projects the polytope [C D] x <= b onto the coordinates that correspond to C. The projection of the polytope P = {[C D]x <= b} where C is M x D and D is M x K is defined as proj(P) = {x in R^d | exist y in R^k s.t Cx + Dy < b} """ if 'glpk' not in solvers.installed_solvers: raise Exception( "projection_esp error:" " Equality set projection requires `cvxopt.glpk` to run.") # Remove zero columns and rows nonzerorows = np.nonzero( np.sum(np.abs(np.hstack([CC, DD])), axis=1) > abs_tol)[0] nonzeroxcols = np.nonzero(np.sum(np.abs(CC), axis=0) > abs_tol)[0] nonzeroycols = np.nonzero(np.sum(np.abs(DD), axis=0) > abs_tol)[0] C = CC[nonzerorows, :].copy() D = DD[nonzerorows, :].copy() C = C[:, nonzeroxcols] D = D[:, nonzeroycols] b = bb[nonzerorows].copy() # Make sure origo is inside polytope if not centered: xc0, yc0, trans = cheby_center(C, D, b) if trans: b = b - np.dot(C, xc0).flatten() - np.dot(D, yc0).flatten() else: b = b else: trans = False d = C.shape[1] k = D.shape[1] if verbose > 0: print("Projecting from dim " + str(d + k) + " to " + str(d)) if k == 0: # Not projecting return C, bb, [] if d == 1: # Projection to 1D c = np.zeros(d + k) c[0] = 1 G = np.hstack([C, D]) sol = solvers.lpsolve(c, G, b, solver='glpk') if sol['status'] != "optimal": raise Exception( "esp: projection to 1D is not full-dimensional, " "LP returned status " + str(sol['status'])) min_sol = np.array(sol['x']).flatten() min_dual_sol = np.array(sol['z']).flatten() sol = solvers.lpsolve(-c, G, b, solver='glpk') if sol['status'] != "optimal": raise Exception( "esp: projection to 1D is not full-dimensional, " + "LP returned status " + str(sol['status'])) max_sol = np.array(sol['x']).flatten() max_dual_sol = np.array(sol['z']).flatten() # min, max x_min = min_sol[0] x_max = max_sol[0] y_min = min_sol[range(1, k + 1)] y_max = max_sol[range(1, k + 1)] if is_dual_degenerate(c, G, b, None, None, min_sol, min_dual_sol): # Min case, relax constraint a little to avoid infeasibility E_min = unique_equalityset( C, D, b, np.array([1.]), x_min + abs_tol / 3, abs_tol=abs_tol) else: E_min = np.nonzero(np.abs(np.dot(G, min_sol) - b) < abs_tol)[0] if is_dual_degenerate(c, G, b, None, None, max_sol, max_dual_sol): # Max case, relax constraint a little to avoid infeasibility E_max = unique_equalityset( C, D, b, np.array([1.]), x_max - abs_tol / 3, abs_tol=abs_tol) else: E_max = np.nonzero(np.abs(np.dot(G, max_sol) - b) < abs_tol)[0] G = np.array([[1.], [-1.]]) g = np.array([x_max, -x_min]) # Relocate if trans: g = g + np.dot(G, xc0) # Return zero cols/rows E_max = nonzerorows[E_max] E_min = nonzerorows[E_min] if verbose > 0: print( "Returning projection from dim " + str(d + k) + " to dim 1 \n") return G, g, [E_max, E_min] E = [] L = [] E_0, af, bf = shoot(C, D, b, abs_tol=abs_tol) ridge_list = ridge(C, D, b, E_0, af, bf, abs_tol=abs_tol, verbose=verbose) for i in range(len(ridge_list)): r = ridge_list[i] L.append(Ridge_Facet(r.E_r, r.ar, r.br, E_0, af, bf)) G = af.T g = bf if verbose > 0: print("\nStarting eq set " + str(E_0) + "\nStarting ridges ") for rr in L: print(str(rr.E_r)) E.append(E_0) while len(L) > 0: rid_fac1 = L[0] if verbose > 0: print("\nLooking for neighbors to " + str(rid_fac1.E_0) + " and " + str(rid_fac1.E_r) + " ..") E_adj, a_adj, b_adj = adjacent(C, D, b, rid_fac1, abs_tol=abs_tol) if verbose > 0: print("found neighbor " + str(E_adj) + ". \n\nLooking for ridges of neighbor..") ridge_list = ridge( C, D, b, E_adj, a_adj, b_adj, abs_tol=abs_tol, verbose=verbose) if verbose > 0: print("found " + str(len(ridge_list)) + " ridges\n") found_org = False for i in range(len(ridge_list)): r = ridge_list[i] E_r = r.E_r ar = r.ar br = r.br found = False for j in range(len(L)): rid_fac2 = L[j] A_r = rid_fac2.E_r if len(A_r) != len(E_r): continue t1 = np.sort(np.array(A_r)) t2 = np.sort(np.array(E_r)) if np.sum(np.abs(t1 - t2)) < abs_tol: found = True break if found: if verbose > 0: print("Ridge " + str(E_r) + " already visited, removing from L..") if rid_fac2 == rid_fac1: found_org = True L.remove(rid_fac2) else: if verbose > 0: print("Adding ridge-facet " + str(E_adj) + " " + str(E_r) + "") L.append(Ridge_Facet(E_r, ar, br, E_adj, a_adj, b_adj)) if not found_org: print("Expected ridge " + str(rid_fac1.E_r)) print("but got ridges ") for rid in ridge_list: print(rid.E_r) raise Exception( "esp: ridge did not return neighboring ridge as expected") G = np.vstack([G, a_adj]) g = np.hstack([g, b_adj]) E.append(E_adj) # Restore center if trans: g = g + np.dot(G, xc0) # Return zero rows for Ef in E: Ef = nonzerorows[Ef] return G, g, E def shoot(C, D, b, maxiter=1000, abs_tol=1e-7): """Return random equality set of P that projects on a projection facet. Returns randomly selected equality set E_0 of P such that the projection of the equality set is a facet of the projection. @param C: Matrix defining the polytope Cx+Dy <= b @param D: Matrix defining the polytope Cx+Dy <= b @param b: Vector defining the polytope Cx+Dy <= b @return: `E_0,af,bf`: Equality set and affine hull """ d = C.shape[1] k = D.shape[1] iter = 0 while True: if iter > maxiter: raise Exception( "shoot: could not find starting equality set") gamma = np.random.rand(d) - 0.5 c = np.zeros(k + 1) c[0] = -1 G = np.hstack([np.array([np.dot(C, gamma)]).T, D]) sol = solvers.lpsolve(c, G, b, solver='glpk') opt_sol = np.array(sol['x']).flatten() opt_dual = np.array(sol['z']).flatten() r_opt = opt_sol[0] y_opt = np.array(opt_sol[range(1, len(opt_sol))]).flatten() x_opt = r_opt * gamma E_0 = np.nonzero( np.abs(np.dot(C, x_opt) + np.dot(D, y_opt) - b) < abs_tol)[0] DE0 = D[E_0, :] CE0 = C[E_0, :] b0 = b[E_0] if rank(np.dot(null_space(DE0.T).T, CE0)) == 1: break iter += 1 af, bf = proj_aff(CE0, DE0, b0, abs_tol=abs_tol) if is_dual_degenerate(c, G, b, None, None, opt_sol, opt_dual, abs_tol=abs_tol): E_0 = unique_equalityset(C, D, b, af, bf, abs_tol=abs_tol) af, bf = proj_aff(C[E_0, :], D[E_0, :], b[E_0]) if len(bf) > 1: raise Exception("shoot: wrong dimension of affine hull") return E_0, af.flatten(), bf def ridge(C, D, b, E, af, bf, abs_tol=1e-7, verbose=0): """Compute all ridges of a facet in the projection. Input: `C,D,b`: Original polytope data `E,af,bf`: Equality set and affine hull of a facet in the projection Output: `ridge_list`: A list containing all the ridges of the facet as Ridge objects """ d = C.shape[1] k = D.shape[1] Er_list = [] q = C.shape[0] E_c = np.setdiff1d(range(q), E) # E slices C_E = C[E, :] D_E = D[E, :] b_E = b[E, :] # E_c slices C_Ec = C[E_c, :] D_Ec = D[E_c, :] b_Ec = b[E_c] # dots S = C_Ec - np.dot(np.dot(D_Ec, linalg.pinv(D_E)), C_E) L = np.dot(D_Ec, null_space(D_E)) t = b_Ec - np.dot(D_Ec, np.dot(linalg.pinv(D_E), b_E)) if rank(np.hstack([C_E, D_E])) < k + 1: if verbose > 1: print("Doing recursive ESP call") u, s, v = linalg.svd(np.array([af]), full_matrices=1) sigma = s[0] v = v.T * u[0, 0] # Correct sign V_hat = v[:, [0]] V_tilde = v[:, range(1, v.shape[1])] Cnew = np.dot(S, V_tilde) Dnew = L bnew = t - np.dot(S, V_hat).flatten() * bf / sigma Anew = np.hstack([Cnew, Dnew]) xc2, yc2, cen2 = cheby_center(Cnew, Dnew, bnew) bnew = bnew - np.dot(Cnew, xc2).flatten() - np.dot(Dnew, yc2).flatten() Gt, gt, E_t = esp( Cnew, Dnew, bnew, centered=True, abs_tol=abs_tol, verbose=0) if (len(E_t[0]) == 0) or (len(E_t[1]) == 0): raise Exception( "ridge: recursive call did not return any equality sets") for i in range(len(E_t)): E_f = E_t[i] er = np.sort(np.hstack([E, E_c[E_f]])) ar = np.dot(Gt[i, :], V_tilde.T).flatten() br0 = gt[i].flatten() # Make orthogonal to facet ar = ar - af * np.dot(af.flatten(), ar.flatten()) br = br0 - bf * np.dot(af.flatten(), ar.flatten()) # Normalize and make ridge equation point outwards norm = np.sqrt(np.sum(ar * ar)) ar = ar * np.sign(br) / norm br = br * np.sign(br) / norm # Restore center br = br + np.dot(Gt[i, :], xc2) / norm if len(ar) > d: raise Exception("ridge: wrong length of new ridge!") Er_list.append(Ridge(er, ar, br)) else: if verbose > 0: print("Doing direct calculation of ridges") X = np.arange(S.shape[0]) while len(X) > 0: i = X[0] X = np.setdiff1d(X, i) if np.linalg.norm(S[i, :]) < abs_tol: continue Si = S[i, :] Si = Si / np.linalg.norm(Si) if np.linalg.norm(af - np.dot(Si, af) * Si) > abs_tol: test1 = null_space( np.vstack([ np.hstack([af, bf]), np.hstack([S[i, :], t[i]])]), nonempty=True) test2 = np.hstack([S, np.array([t]).T]) test = np.dot(test1.T, test2.T) test = np.sum(np.abs(test), 0) Q_i = np.nonzero(test > abs_tol)[0] Q = np.nonzero(test < abs_tol)[0] X = np.setdiff1d(X, Q) # Have Q_i Sq = S[Q_i, :] tq = t[Q_i] c = np.zeros(d + 1) c[0] = 1 Gup = np.hstack([-np.ones([Sq.shape[0], 1]), Sq]) Gdo = np.hstack([-1, np.zeros(Sq.shape[1])]) G = np.vstack([Gup, Gdo]) h = np.hstack([tq, 1]) Al = np.zeros([2, 1]) Ar = np.vstack([af, S[i, :]]) A = np.hstack([Al, Ar]) bb = np.hstack([bf, t[i]]) sol = solvers._solve_lp_using_cvxopt( c, G, h, A=A, b=bb) if sol['status'] == 'optimal': tau = sol['x'][0] if tau < -abs_tol: ar = np.array([S[i, :]]).flatten() br = t[i].flatten() # Make orthogonal to facet ar = ar - af * np.dot(af.flatten(), ar.flatten()) br = br - bf * np.dot(af.flatten(), ar.flatten()) # Normalize and make ridge equation point outwards norm = np.sqrt(np.sum(ar * ar)) ar = ar / norm br = br / norm # accumulate Er_list.append( Ridge(np.sort(np.hstack([E, E_c[Q]])), ar, br)) return Er_list def adjacent(C, D, b, rid_fac, abs_tol=1e-7): """Compute the (unique) adjacent facet. @param rid_fac: A Ridge_Facet object containing the parameters for a facet and one of its ridges. @return: (E_adj,a_adj,b_adj): The equality set and parameters for the adjacent facet such that:: P_{E_adj} = P intersection {x | a_adj x = b_adj} """ E = rid_fac.E_0 af = rid_fac.af bf = rid_fac.bf # E_r = rid_fac.E_r ar = rid_fac.ar br = rid_fac.br # shape d = C.shape[1] k = D.shape[1] # E_r slices C_er = C[E_r, :] D_er = D[E_r, :] b_er = b[E_r] # stack c = -np.hstack([ar, np.zeros(k)]) G = np.hstack([C_er, D_er]) h = b_er A = np.hstack([af, np.zeros(k)]) sol = solvers._solve_lp_using_cvxopt( c, G, h, A=A.T, b=bf * (1 - 0.01)) if sol['status'] != "optimal": print(G) print(h) print(af) print(bf) print(ar) print(br) print(np.dot(af, ar)) data = {} data["C"] = C data["D"] = D data["b"] = b sio.savemat("matlabdata", data) with open('polytope.p', 'wb') as f: pickle.dump(data, f) raise Exception( "adjacent: Lp returned status " + str(sol['status'])) opt_sol = np.array(sol['x']).flatten() dual_opt_sol = np.array(sol['z']).flatten() x_opt = opt_sol[range(d)] y_opt = opt_sol[range(d, d + k)] if is_dual_degenerate( c.flatten(), G, h, A, bf * (1 - 0.01), opt_sol, dual_opt_sol, abs_tol=abs_tol): # If degenerate, compute affine hull and take preimage E_temp = np.nonzero(np.abs(np.dot(G, opt_sol) - h) < abs_tol)[0] a_temp, b_temp = proj_aff( C_er[E_temp, :], D_er[E_temp, :], b_er[E_temp], expected_dim=1, abs_tol=abs_tol) E_adj = unique_equalityset(C, D, b, a_temp, b_temp, abs_tol=abs_tol) if len(E_adj) == 0: data = {} data["C"] = C data["D"] = D data["b"] = b data["Er"] = E_r + 1 data["ar"] = ar data["br"] = br data["Ef"] = E + 1 data["af"] = af data["bf"] = bf sio.savemat("matlabdata", data) raise Exception( "adjacent: equality set computation returned empty set") else: r = np.abs(np.dot(C, x_opt) + np.dot(D, y_opt) - b) < abs_tol E_adj = np.nonzero(r)[0] C_eadj = C[E_adj, :] D_eadj = D[E_adj, :] b_eadj = b[E_adj] af_adj, bf_adj = proj_aff(C_eadj, D_eadj, b_eadj, abs_tol=abs_tol) return E_adj, af_adj, bf_adj def proj_aff(Ce, De, be, expected_dim=None, abs_tol=1e-7): """Affine projection. Compute the set aff = {x | Ce x + De y = be} on the form aff = ({x | a x = b} intersection {Ce x + De y < be}). Input: Polytope parameters Ce, De and be Output: Constants a and b """ # Remove zero columns ind = np.nonzero(np.sum(np.abs(De), axis=0) > abs_tol)[0] D = De[:, ind] if D.shape[1] == 0: a = Ce b = be a_n, b_n = normalize(a, b) if expected_dim is not None: if expected_dim != b_n.size: raise Exception( "proj_aff: wrong dimension calculated in 1") return a_n.flatten(), b_n sh = np.shape(D.T) m = sh[0] n = sh[1] nDe = null_space(D.T) a = np.dot(nDe.T, Ce) b = np.dot(nDe.T, be) a_n, b_n = normalize(a, b) if expected_dim is not None: if expected_dim != b_n.size: raise Exception("proj_aff: wrong dimension calculated in 2") return a_n, b_n def is_dual_degenerate(c, G, h, A, b, x_opt, z_opt, abs_tol=1e-7): """Return `True` if pair of dual problems is dual degenerate. Checks if the pair of dual problems:: (P): min c'x (D): max h'z + b'y s.t Gx <= h s.t G'z + A'y = c Ax = b z <= 0 is dual degenerate, i.e. if (P) has several optimal solutions. Optimal solutions x* and z* are required. Input: `G,h,A,b`: Parameters of (P) `x_opt`: One optimal solution to (P) `z_opt`: The optimal solution to (D) corresponding to _inequality constraints_ in (P) Output: `dual`: Boolean indicating whether (P) has many optimal solutions. """ D = - G d = - h.flatten() mu = - z_opt.flatten() # mu >= 0 # Active constraints I = np.nonzero(np.abs(np.dot(D, x_opt).flatten() - d) < abs_tol)[0] # Positive elements in dual opt J = np.nonzero(mu > abs_tol)[0] # i, j i = mu < abs_tol # Zero elements in dual opt i = i.astype(int) j = np.zeros(len(mu), dtype=int) j[I] = 1 # 1 if active # Indices where active constraints have 0 dual opt L = np.nonzero(i + j == 2)[0] # sizes nI = len(I) nJ = len(J) nL = len(L) # constraints DI = D[I, :] # Active constraints DJ = D[J, :] # Constraints with positive lagrange mult DL = D[L, :] # Active constraints with zero dual opt dual = 0 if A is None: test = DI else: test = np.vstack([DI, A]) if rank(test) < np.amin(DI.shape): return True else: if len(L) > 0: if A is None: Ae = DJ else: Ae = np.vstack([DJ, A]) be = np.zeros(Ae.shape[0]) Ai = - DL bi = np.zeros(nL) sol = solvers._solve_lp_using_cvxopt( c= - np.sum(DL, axis=0), G=Ai, h=bi, A=Ae, b=be) if sol['status'] == "dual infeasible": # Dual infeasible -> primal unbounded -> value>epsilon return True if sol['primal objective'] > abs_tol: return True return False def unique_equalityset(C, D, b, af, bf, abs_tol=1e-7, verbose=0): """Return equality set E with the following property: P_E = {x | af x = bf} intersection P where P is the polytope C x + D y < b The inequalities have to be satisfied with equality everywhere on the face defined by af and bf. """ if D is not None: A = np.hstack([C, D]) a = np.hstack([af, np.zeros(D.shape[1])]) else: A = C a = af E = [] for i in range(A.shape[0]): A_i = np.array(A[i, :]) b_i = b[i] sol = solvers._solve_lp_using_cvxopt( c=A_i, G=A, h=b, A=a.T, b=bf) if sol['status'] != "optimal": raise Exception( "unique_equalityset: LP returned status " + str(sol['status'])) if np.abs(sol['primal objective'] - b_i) < abs_tol: # Constraint is active everywhere E.append(i) if len(E) == 0: raise Exception("unique_equalityset: empty E") return np.array(E) def unique_equalityset2(C, D, b, opt_sol, abs_tol=1e-7): A = np.hstack([C, D]) E0 = np.nonzero(np.abs(np.dot(A, opt_sol) - b) < abs_tol)[0] af, bf = proj_aff(C[E0, :], D[E0, :], b[E0], expected_dim=1) # stack ineq = np.hstack([af, np.zeros(D.shape[1])]) G = np.vstack([A, np.vstack([ineq, -ineq])]) h = np.hstack([b, np.hstack([bf, -bf])]) # shape m = G.shape[0] n = G.shape[1] # ht e = 1e-3 v = np.vstack([np.zeros([1, n]), np.eye(n)]).T v = v - np.array([np.mean(v, axis=1)]).T v = v * e ht = h + np.amin(-np.dot(G, v), axis=1) # stack H1 = np.hstack([G, -np.eye(m)]) H2 = np.hstack([G, np.zeros([m, m])]) H3 = np.hstack([np.zeros([m, n]), -np.eye(m)]) H = np.vstack([H1, np.vstack([H2, H3])]) h = np.hstack([ht, np.hstack([h, np.zeros(m)])]) c = np.hstack([np.zeros(n), np.ones(m)]) sol = solvers.lpsolve(c, H, h, solver='glpk') if not sol['status'] == "optimal": raise Exception( "unique_equalityset: LP returned status " + str(sol['status'])) opt_sol2 = np.array(sol['x']).flatten() x = opt_sol2[range(n)] s = opt_sol2[range(n, len(opt_sol2))] E = np.nonzero(s > abs_tol)[0] print(E) E = np.sort(E[np.nonzero(E < C.shape[0])]) # Check that they define the same projection at, bt = proj_aff(C[E, :], D[E, :], b[E]) if bt.size != 1 or np.sum(np.abs(at - af)) + np.abs(bt - bf) > abs_tol: raise Exception("unique_equalityset2: affine hulls not the same") return E def cheby_center(C, D, b): """Calculate Chebyshev center for the polytope `C x + D y <= b`. Input: `C, D, b`: Polytope parameters Output: `x_0, y_0`: The chebyshev centra `boolean`: True if a point could be found, False otherwise. """ d = C.shape[1] k = D.shape[1] A = np.hstack([C, D]) dim = np.shape(A)[1] c = - np.r_[np.zeros(dim), 1] norm2 = np.sqrt(np.sum(A * A, axis=1)) G = np.c_[A, norm2] sol = solvers.lpsolve(c, G, h=b, solver='glpk') if sol['status'] == "optimal": opt = np.array(sol['x'][0:-1]).flatten() return opt[range(d)], opt[range(d, d + k)], True else: return np.zeros(d), np.zeros(k), False def normalize(AA, bb, abs_tol=1e-7): """Normalize `A x = b` such that `A'A = 1` and `b > 0`. Also, remove duplicate lines. """ if AA.size == 0: return AA, bb dim = AA.size / bb.size A = AA.copy().reshape(bb.size, dim) b = bb.copy().reshape(bb.size, 1) # Remove zero lines keepind = np.nonzero( np.sum(np.abs(np.hstack([A, b])), axis=1) > abs_tol)[0] A = A[keepind, :] b = b[keepind] # Normalize anorm = np.sqrt(np.sum(A * A, axis=1)) for i in range(len(anorm)): A[i, :] = A[i, :] * np.sign(b[i, 0]) / anorm[i] b[i, 0] = np.sign(b[i, 0]) * b[i, 0] / anorm[i] # Remove duplicate rows keep_row = [] for i in range(len(anorm)): unique = True for j in range(i + 1, len(anorm)): test = (np.sum(np.abs(A[i, :] - A[j, :])) + np.abs(b[i, 0] - b[j, 0])) if test < abs_tol: unique = False break if unique: keep_row.append(i) A_n = A[keep_row, :] b_n = b[keep_row, 0] # Return flat A if only one row if A_n.size == dim: A_n = A_n.flatten() return A_n, b_n.flatten() def rank(A, eps=1e-15): u, s, vh = linalg.svd(A) m = A.shape[0] n = A.shape[1] tol = np.amax([m, n]) * np.amax(s) * eps return np.sum(s > tol) def null_space(A, eps=1e-15, nonempty=False): """Returns the null space N_A to matrix A such that A N_A = 0.""" u, s, v = linalg.svd(A, full_matrices=1) m = A.shape[0] n = A.shape[1] tol = np.amax([m, n]) * np.amax(s) * eps rank = np.sum(s > tol) N_space = v[range(rank, n), :].T if nonempty and (len(N_space) == 0): N_space = v[range(np.amax(n - 1, 1), n), :] return N_space
26,643
32.984694
79
py
polytope
polytope-main/polytope/prop2partition.py
# Copyright (c) 2011-2014 by California Institute of Technology # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the California Institute of Technology nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH # OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. """Proposition preserving partition module.""" import logging import warnings import numpy as np from scipy import sparse as sp import polytope as pc logger = logging.getLogger(__name__) _hl = 40 * '-' def find_adjacent_regions(partition): """Return region pairs that are spatially adjacent. @type partition: iterable container of L{Region} @rtype: lil_matrix """ n = len(partition) adj = sp.lil_matrix((n, n), dtype=np.int8) s = partition.regions for i, a in enumerate(s): adj[i, i] = 1 for j, b in enumerate(s[0:i]): adj[i, j] = adj[j, i] = pc.is_adjacent(a, b) return adj ################################ class Partition(object): """Partition of a set. A C{Partition} is an iterable container of sets over C{Partition.set} and these must implement the methods: - union, __add__ - difference - intersection - __le__ so the builtin class C{set} can be used for discrete sets, or custom classes (e.g. polytopes) can be used for sets equipped with more structure. To utilize additional structure, see L{MetricPartition}. """ def __init__(self, domain=None): """Partition over C{domain}. C{domain} is used to avoid conflicts with the python builtin set function. """ self.set = domain def __len__(self): return len(self.regions) def __iter__(self): return iter(self.regions) def __getitem__(self, key): return self.regions[key] def is_partition(self): """Return True if Regions are pairwise disjoint and cover domain. """ return self.is_cover() and self.are_disjoint() def is_cover(self): """Return True if Regions cover domain """ union = pc.Region() for region in self.regions: union += region if not self.domain <= union: msg = 'partition does not cover domain.' logger.Error(msg) warnings.warn(msg) return False else: return True def are_disjoint(self, check_all=False, fname=None): """Return True if all Regions are disjoint. Print: - the offending Regions and their - their intersection (mean) volume ratio - their difference (mean) volume ratio Optionally save numbered figures of: - offending Regions - their intersection - their difference @param check_all: don't return when first offending regions found, continue and check all pairs @type check_all: bool @param fname: path prefix where to save the debugging figures By default no figures are saved. @type fname: str """ logger.info('checking if PPP is a partition.') l, u = self.set.bounding_box ok = True for i, region in enumerate(self.regions): for j, other in enumerate(self.regions[0:i]): if pc.is_fulldim(region.intersect(other)): msg = 'PPP is not a partition, regions: ' msg += str(i) + ' and: ' + str(j) msg += ' intersect each other.\n' msg += 'Offending regions are:\n' + 10 * '-' + '\n' msg += str(region) + 10 * '-' + '\n' msg += str(other) + 10 * '-' + '\n' isect = region.intersect(other) diff = region.diff(other) mean_volume = (region.volume + other.volume) / 2.0 overlap = 100 * isect.volume / mean_volume non_overlap = 100 * diff.volume / mean_volume msg += '|cap| = ' + str(overlap) + ' %\n' msg += '|diff| = ' + str(non_overlap) + '\n' logger.error(msg) if fname: print('saving') fname1 = fname + 'region' + str(i) + '.pdf' fname2 = fname + 'region' + str(j) + '.pdf' fname3 = ( fname + 'isect_' + str(i) + '_' + str(j) + '.pdf') fname4 = ( fname + 'diff_' + str(i) + '_' + str(j) + '.pdf') _save_region_plot(region, fname1, l, u) _save_region_plot(other, fname2, l, u) _save_region_plot(isect, fname3, l, u) _save_region_plot(diff, fname4, l, u) ok = False if not check_all: break return ok def refines(self, other): """Return True if each element is a subset of other. @type other: PropPreservingPartition """ for small in self: found_superset = False for big in other: if small <= big: found_superset = True break if not found_superset: return False return True def preserves(self, other): """Return True if it refines closure of C{other} under complement. Closure under complement is the union of C{other} with the collection of complements of its elements. This method checks the annotation of elements in C{self} with elements fro C{other}. """ for item in self._elements: # item subset of these sets for superset in item.supersets: if not item <= superset: return False # item subset of the complements of these sets for other_set in set(other).difference(item.supersets): if item.intersect(other_set): return False return True class MetricPartition(Partition): """Partition of a metric space. Includes adjacency information which abstracts the topology induced by the metric. Two subsets in the partition are called adjacent if the intersection of their closure is non-empty. If the space is also a measure space, then volume information is used for diagnostic purposes. """ def compute_adj(self): """Update the adjacency matrix by checking all region pairs. Uses L{polytope.is_adjacent}. """ n = len(self.regions) adj = sp.lil_matrix((n, n)) logger.info('computing adjacency from scratch...') for i, region0 in enumerate(self.regions): for j, region1 in enumerate(self.regions): if i == j: adj[i, j] = 1 continue if pc.is_adjacent(region0, region1): adj[i, j] = 1 adj[j, i] = 1 logger.info('regions: ' + str(i) + ', ' + str(j) + ', are adjacent.') logger.info('...done computing adjacency.') # check previous one to unmask errors if self.adj is not None: logger.info('checking previous adjacency...') ok = True row, col = adj.nonzero() for i, j in zip(row, col): assert(adj[i, j]) if adj[i, j] != self.adj[i, j]: ok = False msg = 'PPP adjacency matrix is incomplete, ' msg += 'missing: (' + str(i) + ', ' + str(j) + ')' logger.error(msg) row, col = self.adj.nonzero() for i, j in zip(row, col): assert(self.adj[i, j]) if adj[i, j] != self.adj[i, j]: ok = False msg = 'PPP adjacency matrix is incorrect, ' msg += 'has 1 at: (' + str(i) + ', ' + str(j) + ')' logger.error(msg) if not ok: logging.error('PPP had incorrect adjacency matrix.') logger.info('done checking previous adjacency.') else: ok = True logger.info('no previous adjacency found: ' + 'skip verification.') # update adjacency self.adj = adj return ok def _save_region_plot(region, fname, l, u): ax = region.plot() ax.set_xlim(l[0, 0], u[0, 0]) ax.set_ylim(l[1, 0], u[1, 0]) ax.figure.savefig(fname)
10,171
31.394904
74
py
polytope
polytope-main/polytope/version.py
# Copyright (c) 2014 by California Institute of Technology # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the California Institute of Technology nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH # OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. """polytope package version""" import os.path version_info = (0, 2, 4) version = '.'.join([str(x) for x in version_info]) # Append annotation to version string to indicate development versions. # # An empty (modulo comments and blank lines) commit_hash.txt is used # to indicate a release, in which case nothing is appended to version # string as defined above. path_to_hashfile = os.path.join(os.path.dirname(__file__), "commit_hash.txt") if os.path.exists(path_to_hashfile): commit_hash = "" with open(path_to_hashfile, "r") as f: for line in f: line = line.strip() if len(line) == 0 or line[0] == '#': # Ignore blank lines and comments, the latter being # any line that begins with #. continue # First non-blank line is assumed to be the commit hash commit_hash = line break if len(commit_hash) > 0: version += ".dev0+" + commit_hash else: version += ".dev0+unknown.commit"
2,615
39.875
77
py
polytope
polytope-main/polytope/polytope.py
# -*- coding: utf-8 -*- # # Copyright (c) 2011-2014 by California Institute of Technology # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the California Institute of Technology nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH # OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. # # # # Acknowledgement: # The overall structure of this library and the functions in the list # below are taken with permission from: # # M. Kvasnica, P. Grieder and M. Baotić, # Multi-Parametric Toolbox (MPT), # https://people.ee.ethz.ch/~mpt/2/ (Multi-Parametric Toolbox version 2.*) # https://web.archive.org/web/20121011103905/http://control.ee.ethz.ch/~mpt/ # # mldivide # region_diff # extreme # envelope # is_convex # bounding_box # intersect2 # projection_interhull # projection_exthull # r"""Computational geometry module for polytope computations. For linear programming the fastest installed solver is selected. To change this choice, see the module `polytope.solvers`. The structure of this module is based on \cite{MPT04}. """ import math import logging import warnings import numpy as np from polytope.solvers import lpsolve from polytope.esp import esp from polytope.quickhull import quickhull logger = logging.getLogger(__name__) # Nicer numpy output np.set_printoptions(precision=5, suppress=True) # global default absolute tolerance, # to enable changing it code w/o passing arguments, # so that magic methods can still be used ABS_TOL = 1e-7 # inline imports: # # import matplotlib as mpl # from matplotlib import pyplot as plt class Polytope(object): """A convex polytope, in half-space representation. The minimal vertex representation can be computed with the function `extreme`. A minimal half-space representation can be computed from a vertex representation with the function `qhull`. Attributes: - `A`: a numpy array for the hyperplane normals in hyperplane representation of a polytope - `b`: a numpy array for the hyperplane offsets in hyperplane representation of a polytope - `chebXc`: coordinates of chebyshev center (if calculated) - `chebR`: chebyshev radius (if calculated) - `bbox`: bounding box (if calculated) - `minrep`: if polytope is in minimal representation (after running reduce) - `normalize`: if True (default), normalize given A and b arrays; else, use A and b without modification. - `dim`: dimension - `volume`: volume, computed on first call Reference ========= https://en.wikipedia.org/wiki/Convex_polytope See Also ======== L{Region}, L{extreme}, L{qhull} """ def __init__( self, A=np.array([]), b=np.array([]), minrep=False, chebR=0, chebX=None, fulldim=None, volume=None, vertices=None, normalize=True): self.A = A.astype(float) self.b = b.astype(float).flatten() if A.size > 0 and normalize: # Normalize Anorm = np.sqrt(np.sum(A * A, 1)).flatten() pos = np.nonzero(Anorm > 1e-10)[0] self.A = self.A[pos, :] self.b = self.b[pos] Anorm = Anorm[pos] mult = 1 / Anorm for i in range(self.A.shape[0]): self.A[i, :] = self.A[i, :] * mult[i] self.b = self.b.flatten() * mult self.minrep = minrep self._chebXc = chebX self._chebR = chebR self.bbox = None self.fulldim = fulldim if volume is not None: self._set_volume(volume) else: self._volume = None self.vertices = vertices def __str__(self): """Return pretty-formatted H-representation of polytope.""" A, b = self.A, self.b A_rows = str(A).split('\n') n_rows = len(A_rows) # column vector from `b`, if not already one b_col = b.reshape(b.shape[0], 1) if len(b.shape) == 1 else b b_rows = str(b_col).split('\n') # place an "x" somewhere near the middle x_row = int((n_rows - 1) / 2) # where "x" is shown above = x_row below = (n_rows - x_row - 2) spacer = ' | ' last_middle = [spacer[1:]] if n_rows > 1 else [] middle = ( above * [spacer] + [' x <= '] + below * [spacer] + last_middle) assert len(middle) == n_rows, (middle, n_rows) # format lines lines = [A_rows[k] + middle[k] + b_rows[k] for k in range(n_rows)] output = 'Single polytope \n {lines}\n'.format( lines='\n '.join(lines)) return output def __len__(self): return 0 def __copy__(self): A = self.A.copy() b = self.b.copy() P = Polytope(A, b) P._chebXc = self._chebXc P._chebR = self._chebR P.minrep = self.minrep P.bbox = self.bbox P.fulldim = self.fulldim return P def __contains__(self, point): """Return `True` if `self` contains `point`. Boundary points are included. @param point: column vector, e.g., as `numpy.ndarray` @rtype: bool For multiple points, see the method `self.contains`. """ if not isinstance(point, np.ndarray): point = np.array(point) test = self.A.dot(point.flatten()) - self.b < ABS_TOL return np.all(test) def contains(self, points, abs_tol=ABS_TOL): """Return Boolean array of whether each point in `self`. Any point that satisfies all inequalities is contained in `self`. A tolerance is added, and strict inequality checked (<). Pass `abs_tol=0` to exclude the boundary. @param points: column vectors @rtype: bool, 1d array """ test = self.A.dot(points) - self.b[:, np.newaxis] < abs_tol return np.all(test, axis=0) def __eq__(self, other): return self <= other and other <= self def __ne__(self, other): return not self == other def __le__(self, other): return is_subset(self, other) def __ge__(self, other): return is_subset(other, self) def __bool__(self): return bool(self.volume > 0) __nonzero__ = __bool__ def union(self, other, check_convex=False): """Return union with Polytope or Region. For usage see function union. @type other: L{Polytope} or L{Region} @rtype: L{Region} """ return union(self, other, check_convex) def diff(self, other): """Return set difference with Polytope or Region. @type other: L{Polytope} or L{Region} @rtype: L{Region} """ return mldivide(self, other) def intersect(self, other, abs_tol=ABS_TOL): """Return intersection with Polytope or Region. @type other: L{Polytope}. @rtype: L{Polytope} or L{Region} """ if isinstance(other, Region): return other.intersect(self, abs_tol=abs_tol) if not isinstance(other, Polytope): msg = 'Polytope intersection defined only' msg += ' with other Polytope. Got instead: ' msg += str(type(other)) raise Exception(msg) if (not is_fulldim(self)) or (not is_fulldim(other)): return Polytope() if self.dim != other.dim: raise Exception("polytopes have different dimension") iA = np.vstack([self.A, other.A]) ib = np.hstack([self.b, other.b]) return reduce(Polytope(iA, ib), abs_tol=abs_tol) def translation(self, d): """Returns a copy of C{self} translated by the vector C{d}. Consult L{polytope.polytope._translate} for implementation details. @type d: 1d array """ newpoly = self.copy() _translate(newpoly, d) return newpoly def rotation(self, i=None, j=None, theta=None): """Returns a rotated copy of C{self}. Describe the plane of rotation and the angle of rotation (in radians) with i, j, and theta. i and j are the indices 0..N-1 of two of the identity basis vectors, and theta is the angle of rotation. Consult L{polytope.polytope._rotate} for more detail. @type i: int @type j: int @type theta: number """ newpoly = self.copy() _rotate(newpoly, i=i, j=j, theta=theta) return newpoly def copy(self): """Return copy of this Polytope.""" return self.__copy__() @classmethod def from_box(cls, intervals=[]): """Class method for easy construction of hyperrectangles. @param intervals: intervals [xi_min, xi_max], the cross-product of which defines the polytope as an N-dimensional hyperrectangle @type intervals: [ndim x 2] numpy array or list of lists:: [[x0_min, x0_max], [x1_min, x1_max], ... [xN_min, xN_max]] @return: hyperrectangle defined by C{intervals} @rtype: L{Polytope} """ if not isinstance(intervals, np.ndarray): try: intervals = np.array(intervals) except Exception: raise Exception('Polytope.from_box:' + 'intervals must be a numpy ndarray or ' + 'convertible as arg to numpy.array') if intervals.ndim != 2: raise Exception('Polytope.from_box: ' + 'intervals must be 2 dimensional') n = intervals.shape if n[1] != 2: raise Exception('Polytope.from_box: ' + 'intervals must have 2 columns') n = n[0] # a <= b for each interval ? if (intervals[:, 0] > intervals[:, 1]).any(): msg = 'Polytope.from_box: ' msg += 'Invalid interval in from_box method.\n' msg += 'First element of an interval must' msg += ' not be larger than the second.' raise Exception(msg) A = np.vstack([np.eye(n), -np.eye(n)]) b = np.hstack([intervals[:, 1], -intervals[:, 0]]) return cls(A, b, minrep=True) def project(self, dim, solver=None, abs_tol=ABS_TOL, verbose=0): """Return Polytope projection on selected subspace. For usage details see function: L{projection}. """ return projection(self, dim, solver, abs_tol, verbose) def scale(self, factor): """Multiply polytope by scalar factor. A x <= b, becomes: A x <= (factor * b) @type factor: float """ self.b = factor * self.b @property def dim(self): """Return Polytope dimension.""" try: return np.shape(self.A)[1] except Exception: return 0.0 @property def volume(self): if self._volume is None: self._volume = volume(self) return self._volume def _set_volume(self, polytope_volume): """Set the attribute `self._volume`. @param polytope_volume: nonnegative number """ if polytope_volume < 0.0: raise ValueError( '`polytope_volume` must be >= 0, given: {v}'.format( v=polytope_volume)) self._volume = float(polytope_volume) @property def chebR(self): r, xc = cheby_ball(self) return self._chebR @property def chebXc(self): r, xc = cheby_ball(self) return self._chebXc @property def cheby(self): return cheby_ball(self) @property def bounding_box(self): """Wrapper of L{polytope.bounding_box}. Computes the bounding box on first call. """ if self.bbox is None: self.bbox = bounding_box(self) return self.bbox def plot(self, ax=None, color=None, hatch=None, alpha=1.0, linestyle=None, linewidth=None, edgecolor=None): if self.dim != 2: raise Exception("Cannot plot polytopes of dimension other than 2") # Setting default values for plotting linestyle = linestyle or "dashed" linewidth = linewidth or 3 edgecolor = edgecolor or "black" ax = _newax(ax) if not is_fulldim(self): logger.error("Cannot plot empty polytope") return None if color is None: color = np.random.rand(3) poly = _get_patch( self, facecolor=color, hatch=hatch, alpha=alpha, linestyle=linestyle, linewidth=linewidth, edgecolor=edgecolor) ax.add_patch(poly) return ax def text(self, txt, ax=None, color='black'): """Plot text at chebyshev center.""" _plot_text(self, txt, ax, color) def _translate(polyreg, d): """Translate C{polyreg} by the vector C{d}. Modifies C{polyreg} in-place. @type d: 1d array """ if isinstance(polyreg, Polytope): # Translate hyperplanes polyreg.b = polyreg.b + np.dot(polyreg.A, d) else: # Translate subregions for poly in polyreg.list_poly: _translate(poly, d) # Translate bbox and cheby if polyreg.bbox is not None: polyreg.bbox = (polyreg.bbox[0] + d, polyreg.bbox[1] + d) if polyreg._chebXc is not None: polyreg._chebXc = polyreg._chebXc + d def _rotate(polyreg, i=None, j=None, u=None, v=None, theta=None, R=None): """Rotate C{polyreg} in-place. Return the rotation matrix. There are two types of rotation: simple and compound. For simple rotations, by definition, all motion can be projected as circles in a single plane; the other N - 2 dimensions are invariant. Therefore any simple rotation can be parameterized by its plane of rotation. Compound rotations are the combination of multiple simple rotations; they have more than one plane of rotation. For N > 3 dimensions, a compound rotation may be necessary to map one orientation to another (Euler's rotation theorem no longer applies). Use one of the following three methods to specify rotation. The first two can only express simple rotation, but simple rotations may be applied in a sequence to achieve a compound rotation. (1) Provide the indices 0..N-1 of the identity basis vectors, i and j, which define the plane of rotation and a radian angle of rotation, theta, between them. This method contructs the Givens rotation matrix. The right hand rule defines the positive rotation direction. (2) Provide two vectors, the two vectors define the plane of rotation and angle of rotation is TWICE the angle from the first vector, u, to the second vector, v. (3) Provide an N-by-N rotation matrix, R. WARNING: No checks are made to determine whether the provided transformation matrix is a valid rotation. Further Reading https://en.wikipedia.org/wiki/Plane_of_rotation @param polyreg: The polytope or region to be rotated. @type polyreg: L{Polytope} or L{Region} @param i: The first index describing the plane of rotation. @type i: int @param j: The second index describing the plane of rotation. @type j: int @param u: The first vector describing the plane of rotation. @type u: 1d array @param u: The second vector describing the plane of rotation. @type v: 1d array. @param theta: The radian angle to rotate the polyreg in the plane defined by i and j. @type theta: number @param R: A predefined rotation matrix. @type R: 2d array """ # determine the rotation matrix based on inputs if R is not None: logger.debug("rotate: R=\n{}".format(R)) if i is not None: raise ValueError(i) if j is not None: raise ValueError(j) if theta is not None: raise ValueError(theta) if u is not None: raise ValueError(u) if v is not None: raise ValueError(v) elif i is not None and j is not None and theta is not None: logger.info("rotate via indices and angle.") if R is not None: raise ValueError(R) if u is not None: raise ValueError(u) if v is not None: raise ValueError(v) if i == j: raise ValueError("Must provide two unique basis vectors.") R = givens_rotation_matrix(i, j, theta, polyreg.dim) elif u is not None and v is not None: logger.info("rotate via 2 vectors.") if R is not None: raise ValueError(R) if i is not None: raise ValueError(i) if j is not None: raise ValueError(j) if theta is not None: raise ValueError(theta) R = solve_rotation_ap(u, v) else: raise ValueError("R or (i and j and theta) or (u and v) " "must be defined.") if isinstance(polyreg, Polytope): # Ensure that half space is normalized before rotation n, p = _hessian_normal(polyreg.A, polyreg.b) # Rotate the hyperplane normals polyreg.A = np.inner(n, R) polyreg.b = p else: # Rotate subregions for poly in polyreg.list_poly: _rotate(poly, None, None, R=R) # transform bbox and cheby if polyreg.bbox is not None: polyreg.bbox = (np.inner(polyreg.bbox[0].T, R).T, np.inner(polyreg.bbox[1].T, R).T) if polyreg._chebXc is not None: polyreg._chebXc = np.inner(polyreg._chebXc, R) return R def givens_rotation_matrix(i, j, theta, N): """Return the Givens rotation matrix for an N-dimensional space.""" R = np.identity(N) c = np.cos(theta) s = np.sin(theta) R[i, i] = c R[j, j] = c R[i, j] = -s R[j, i] = s return R def solve_rotation_ap(u, v): r"""Return the rotation matrix for the rotation in the plane defined by the vectors u and v across TWICE the angle between u and v. This algorithm uses the Aguilera-Perez Algorithm \cite{Aguilera} to generate the rotation matrix. The algorithm works basically as follows: Starting with the Nth component of u, rotate u towards the (N-1)th component until the Nth component is zero. Continue until u is parallel to the 0th basis vector. Next do the same with v until it only has none zero components in the first two dimensions. The result will be something like this: [[u0, 0, 0 ... 0], [v0, v1, 0 ... 0]] Now it is trivial to align u with v. Apply the inverse rotations to return to the original orientation. NOTE: The precision of this method is limited by sin, cos, and arctan functions. """ # TODO: Assert vectors are non-zero and non-parallel aka exterior # product is non-zero N = u.size # the number of dimensions uv = np.stack([u, v], axis=1) # the plane of rotation M = np.identity(N) # stores the rotations for rorienting reference frame # ensure u has positive basis0 component if uv[0, 0] < 0: M[0, 0] = -1 M[1, 1] = -1 uv = M.dot(uv) # align uv plane with the basis01 plane and u with basis0. for c in range(2): for r in range(N - 1, c, -1): if uv[r, c] != 0: # skip rotations when theta will be zero theta = np.arctan2(uv[r, c], uv[r - 1, c]) Mk = givens_rotation_matrix(r, r - 1, theta, N) uv = Mk.dot(uv) M = Mk.dot(M) # rotate u onto v theta = 2 * np.arctan2(uv[1, 1], uv[0, 1]) logger.debug( "solve_rotation_ap: {d} degree rotation".format( d=180 * theta / np.pi)) R = givens_rotation_matrix(0, 1, theta, N) # perform M rotations in reverse order M_inverse = M.T R = M_inverse.dot(R.dot(M)) return R def _hessian_normal(A, b): """Normalize half space representation according to hessian normal form.""" L2 = np.reshape(np.linalg.norm(A, axis=1), (-1, 1)) # needs to be column if any(L2 == 0): raise ValueError('One of the rows of A is a zero vector.') n = A / L2 # hyperplane normals p = b / L2.flatten() # hyperplane distances from origin return n, p class Region(object): """A polytope, possibly nonconvex. Represented using a `list` of convex polytopes. Is usable as a container of polytopes. Attributes: - `list_poly`: list of Polytope objects - `props`: set of propositions inside region - `bbox`: if calculated, bounding box of region (see bounding_box) - `fulldim`: if calculated, boolean indicating whether region is fully dimensional - `dim`: dimension - `volume`: volume of region, calculated on first call - `chebXc`: coordinates of maximum chebyshev center (if calculated) - `chebR`: maximum chebyshev radius (if calculated) Reference ========= https://en.wikipedia.org/wiki/Polytope See Also ======== L{Polytope} """ def __init__(self, list_poly=None, props=None): if list_poly is None: list_poly = [] if props is None: props = set() if isinstance(list_poly, str): # Hack to be able to use the Region class also for discrete # problems. self.list_poly = list_poly self.props = set(props) else: if isinstance(list_poly, Region): dim = list_poly[0].dim for poly in list_poly: if poly.dim != dim: raise Exception("Region error:" " Polytopes must be of same dimension!") self.list_poly = list_poly[:] for poly in list_poly: if is_empty(poly): self.list_poly.remove(poly) self.props = set(props) self.bbox = None self.fulldim = None self._volume = None self._chebXc = None self._chebR = None def __iter__(self): return iter(self.list_poly) def __getitem__(self, key): return self.list_poly[key] def __str__(self): output = '' for i in range(len(self.list_poly)): output += '\t Polytope number ' + str(i + 1) + ':\n' poly_str = str(self.list_poly[i]) poly_str = poly_str.replace('\n', '\n\t\t') output += '\t ' + poly_str + '\n' output += '\n' return output def __len__(self): return len(self.list_poly) def __contains__(self, point): """Return `True` if `self` contains `point`. See `Polytope.__contains__`. """ if not isinstance(point, np.ndarray): point = np.array(point) return any(point in u for u in self.list_poly) def contains(self, points, abs_tol=ABS_TOL): """Return Boolean array of whether each point in `self`. See `Polytope.contains`. """ if not isinstance(points, np.ndarray): points = np.array(points) if points.shape[0] != self.dim: raise ValueError('points should be column vectors') contained = np.full(points.shape[1], False, dtype=bool) for poly in self.list_poly: contained = np.logical_or( poly.contains(points, abs_tol), contained) return contained def __eq__(self, other): return self <= other and other <= self def __ne__(self, other): return not self == other def __le__(self, other): return is_subset(self, other) def __ge__(self, other): return is_subset(other, self) def __add__(self, other): """Return union with Polytope or Region. Applies convex simplification if possible. To turn off this check, use Region.union @type other: L{Polytope} or L{Region} @rtype: L{Region} """ return union(self, other, check_convex=True) def __bool__(self): return bool(self.volume > 0) __nonzero__ = __bool__ def union(self, other, check_convex=False): """Return union with Polytope or Region. For usage see function union. @type other: L{Polytope} or L{Region} @rtype: L{Region} """ return union(self, other, check_convex) def __sub__(self, other): """Return set difference with Polytope or Region. @type other: L{Polytope} or L{Region} @rtype: L{Region} """ return mldivide(self, other) def diff(self, other): """Return set difference with Polytope or Region. @type other: L{Polytope} or L{Region} @rtype: L{Region} """ return mldivide(self, other) def __and__(self, other): """Return intersection with Polytope or Region. Absolute tolerance 1e-7 used. To select the absolute tolerance use method Region.intersect @type other: L{Polytope} or L{Region} @rtype: L{Polytope} or L{Region} """ return intersect(self, other) def intersect(self, other, abs_tol=ABS_TOL): """Return intersection with Polytope or Region. @type other: iterable container of L{Polytope}. @rtype: L{Region} """ if isinstance(other, Polytope): other = [other] P = Region() for poly0 in self: for poly1 in other: isect = poly0.intersect(poly1, abs_tol) rp, xp = isect.cheby if rp > abs_tol: P = union(P, isect, check_convex=True) return P def rotation(self, i=None, j=None, theta=None): """Returns a rotated copy of C{self}. Describe the plane of rotation and the angle of rotation (in radians) with i, j, and theta. i and j are the indices 0..N-1 of two of the identity basis vectors, and theta is the angle of rotation. Consult L{polytope.polytope._rotate} for more detail. @type i: int @type j: int @type theta: number """ newreg = self.copy() _rotate(newreg, i=i, j=j, theta=theta) return newreg def translation(self, d): """Returns a copy of C{self} translated by the vector C{d}. Consult L{polytope.polytope._translate} for implementation details. @type d: 1d array """ newreg = self.copy() _translate(newreg, d) return newreg def __copy__(self): """Return copy of this Region.""" return Region(list_poly=self.list_poly[:], props=self.props.copy()) def copy(self): """Return copy of this Region.""" return self.__copy__() @property def dim(self): """Return Region dimension.""" return np.shape(self.list_poly[0].A)[1] @property def volume(self): if self._volume is None: self._volume = volume(self) return self._volume def _set_volume(self, region_volume): """Set the attribute `self._volume`. @param region_volume: nonnegative number """ if region_volume < 0.0: raise ValueError( '`region_volume` must be >= 0, given: {v}'.format( v=region_volume)) self._volume = float(region_volume) @property def chebR(self): r, xc = cheby_ball(self) return self._chebR @property def chebXc(self): r, xc = cheby_ball(self) return self._chebXc @property def cheby(self): return cheby_ball(self) @property def bounding_box(self): """Wrapper of polytope.bounding_box. Computes the bounding box on first call. """ if self.bbox is None: self.bbox = bounding_box(self) return self.bbox def plot(self, ax=None, color=None, hatch=None, alpha=1.0, linestyle=None, linewidth=None, edgecolor=None): """Plot a `polytope` on axes `ax`.""" # TODO optional arg for text label if self.dim != 2: raise Exception("Cannot plot region of dimension other than 2") if not is_fulldim(self): logger.error("Cannot plot empty region") return None ax = _newax(ax) if color is None: color = np.random.rand(3) for poly2 in self.list_poly: # TODO hatched polytopes in same region poly2.plot(ax, color=color, hatch=hatch, alpha=alpha, linestyle=linestyle, linewidth=linewidth, edgecolor=edgecolor) return ax def text(self, txt, ax=None, color='black'): """Plot text at chebyshev center.""" _plot_text(self, txt, ax, color) def is_empty(polyreg): """Check if the description of a polytope is empty @param polyreg: L{Polytope} or L{Region} instance @return: Boolean indicating whether polyreg is empty """ n = len(polyreg) if len(polyreg) == 0: try: return len(polyreg.A) == 0 except Exception: return True else: N = np.zeros(n, dtype=int) for i in range(n): N[i] = is_empty(polyreg.list_poly[i]) if np.all(N): return True else: return False def is_fulldim(polyreg, abs_tol=ABS_TOL): """Check if a polytope or region has inner points. @param polyreg: L{Polytope} or L{Region} instance @return: Boolean that is True if inner points found, False otherwise. """ # logger.debug('is_fulldim') if polyreg.fulldim is not None: return polyreg.fulldim lenP = len(polyreg) if lenP == 0: rc, xc = cheby_ball(polyreg) status = rc > abs_tol else: status = np.zeros(lenP) for ii in range(lenP): rc, xc = cheby_ball(polyreg.list_poly[ii]) status[ii] = rc > abs_tol status = np.sum(status) status = status > 0 polyreg.fulldim = status return status def is_convex(reg, abs_tol=ABS_TOL): """Check if a region is convex. @type reg: L{Region} @return: result,envelope: result indicating if convex. if found to be convex the envelope describing the convex polytope is returned. """ if not is_fulldim(reg): return True if len(reg) == 0: return True outer = envelope(reg) if is_empty(outer): # Probably because input polytopes were so small and ugly.. return False, None Pl, Pu = reg.bounding_box Ol, Ou = outer.bounding_box bboxP = np.hstack([Pl, Pu]) bboxO = np.hstack([Ol, Ou]) if ( sum(abs(bboxP[:, 0] - bboxO[:, 0]) > abs_tol) > 0 or sum(abs(bboxP[:, 1] - bboxO[:, 1]) > abs_tol) > 0): return False, None if is_fulldim(outer.diff(reg)): return False, None else: return True, outer def is_inside(polyreg, point, abs_tol=ABS_TOL): """Return `point in polyreg`. @type point: `collections.abc.Sequence` or `numpy.ndarray` @rtype: bool """ warnings.warn( 'Write `point in polyreg` instead of ' 'calling this function.', DeprecationWarning) if not isinstance(point, np.ndarray): point = np.array(point) return polyreg.contains(point[:, np.newaxis], abs_tol)[0] def is_subset(small, big, abs_tol=ABS_TOL): r"""Return True if small \subseteq big. @type small: L{Polytope} or L{Region} @type big: L{Polytope} or L{Region} @rtype: bool """ for x in [small, big]: if not isinstance(x, (Polytope, Region)): msg = 'Not a Polytope or Region, got instead:\n\t' msg += str(type(x)) raise TypeError(msg) diff = small.diff(big) volume = diff.volume if volume < abs_tol: return True else: return False def reduce(poly, nonEmptyBounded=1, abs_tol=ABS_TOL): """Remove redundant inequalities from the hyperplane representation. Uses the algorithm described at [1], by solving one LP for each facet. [1] https://www.cs.mcgill.ca/~fukuda/soft/polyfaq/node24.html Warning: - nonEmptyBounded == 0 case is not tested much. @type poly: L{Polytope} or L{Region} @return: Reduced L{Polytope} or L{Region} object """ if isinstance(poly, Region): lst = [] for poly2 in poly.list_poly: red = reduce(poly2) if is_fulldim(red): lst.append(red) if len(lst) > 0: return Region(lst, poly.props) else: return Polytope() # is `poly` already in minimal representation ? if poly.minrep: return poly if not is_fulldim(poly): return Polytope() # `poly` isn't flat A_arr = poly.A b_arr = poly.b # Remove rows with b = inf keep_row = np.nonzero(poly.b != np.inf) A_arr = A_arr[keep_row] b_arr = b_arr[keep_row] neq = np.shape(A_arr)[0] # first eliminate the linearly dependent rows # corresponding to the same hyperplane # Normalize all rows a_norm = 1 / np.sqrt(np.sum(A_arr.T**2, 0)) a_normed = np.dot(A_arr.T, np.diag(a_norm)).T remove_row = [] for i in range(neq): for j in range(i + 1, neq): # If the product of two vectors are close to 1, # since they are both unit vectors, # they must represent parallel hyperplanes if np.dot(a_normed[i].T, a_normed[j]) > 1 - abs_tol: # Check which inequality that constrains the most b_in = b_arr[i] * a_norm[i] b_jn = b_arr[j] * a_norm[j] if b_in < b_jn: remove_row.append(j) else: remove_row.append(i) keep_row = np.setdiff1d(range(neq), remove_row).tolist() A_arr = A_arr[keep_row] b_arr = b_arr[keep_row] neq, nx = A_arr.shape if nonEmptyBounded: if neq <= nx + 1: return Polytope(A_arr, b_arr) # Now eliminate hyperplanes outside the bounding box if neq > 3 * nx: lb, ub = Polytope(A_arr, b_arr).bounding_box # Do a coordinate system translation such that the lower bound is # moved to the origin # A*(x-lb) <= b - A*lb # Relative to the origin, a row ai in A with only positive coefficients # represents an upper bound. If ai*(x1-lb) <= bi, # the hyperplane is above x1. # Hence, if ai*(ub-lb) <= bi, then the hyperplane at row i # does not intersect the bounding box. # The same holds for rows with negative coefficients multiplied with # the origin. Rows with both negative and positive coefficients # are a mixture of the two extremes. cand = ~ (np.dot((A_arr > 0) * A_arr, ub - lb) - (np.array([b_arr]).T - np.dot(A_arr, lb)) < -1e-4) A_arr = A_arr[cand.squeeze()] b_arr = b_arr[cand.squeeze()] neq, nx = A_arr.shape if nonEmptyBounded: if neq <= nx + 1: return Polytope(A_arr, b_arr) # Check for each inequality whether it is implied by # the other inequalities, i.e., is it redundant? del keep_row[:] for k in range(neq): # Setup object function to maximize the linear function # defined as current row of A matrix f = -A_arr[k, :] G = A_arr h = b_arr # Give some slack in the current inequality h[k] += 0.1 sol = lpsolve(f, G, h) h[k] -= 0.1 if sol['status'] == 0: # If the maximum is greater than the constraint of # the inequality, then the inequality constrains solutions # and thus the inequality is non-redundant obj = -sol['fun'] - h[k] if obj > abs_tol: keep_row.append(k) elif sol['status'] == 3: keep_row.append(k) polyOut = Polytope(A_arr[keep_row], b_arr[keep_row]) polyOut.minrep = True return polyOut def union(polyreg1, polyreg2, check_convex=False): """Compute the union of polytopes or regions @type polyreg1: L{Polytope} or L{Region} @type polyreg2: L{Polytope} or L{Region} @param check_convex: if True, look for convex unions and simplify @return: region of non-overlapping polytopes describing the union """ # logger.debug('union') if is_empty(polyreg1): return polyreg2 if is_empty(polyreg2): return polyreg1 if check_convex: s1 = intersect(polyreg1, polyreg2) if is_fulldim(s1): s2 = polyreg2.diff(polyreg1) s3 = polyreg1.diff(polyreg2) else: s2 = polyreg1 s3 = polyreg2 else: s1 = polyreg1 s2 = polyreg2 s3 = None lst = [] if len(s1) == 0: if not is_empty(s1): lst.append(s1) else: for poly in s1.list_poly: if not is_empty(poly): lst.append(poly) if len(s2) == 0: if not is_empty(s2): lst.append(s2) else: for poly in s2.list_poly: if not is_empty(poly): lst.append(poly) if s3 is not None: if len(s3) == 0: if not is_empty(s3): lst.append(s3) else: for poly in s3.list_poly: if not is_empty(poly): lst.append(poly) if check_convex: final = [] N = len(lst) if N > 1: # Check convexity for each pair of polytopes while N > 0: templist = [lst[0]] for ii in range(1, N): templist.append(lst[ii]) is_conv, env = is_convex(Region(templist)) if not is_conv: templist.remove(lst[ii]) for poly in templist: lst.remove(poly) cvxpoly = reduce(envelope(Region(templist))) if not is_empty(cvxpoly): final.append(reduce(cvxpoly)) N = len(lst) else: final = lst ret = Region(final) else: ret = Region(lst) return ret def cheby_ball(poly1): """Calculate Chebyshev radius and center for a polytope. The Chebyshev radius is defined here as the radius of a maximal inscribed ball of the given polytope. The center of a maximal ball is also returned, but note that unlike the radius, it is not necessarily unique. If input is a region, then a largest Chebyshev ball is returned. N.B., this function will return whatever it finds in attributes chebR and chbXc if not None, without (re)computing the Chebyshev ball. Example (low dimension): r1,x1 = cheby_ball(P) calculates the center and half the length of the longest line segment along the first coordinate axis inside polytope P @type poly1: L{Polytope} or L{Region} @return: rc,xc: Chebyshev radius rc (float) and center xc (numpy array) """ #logger.debug('cheby ball') if (poly1._chebXc is not None) and (poly1._chebR is not None): # In case chebyshev ball already calculated and stored return poly1._chebR, poly1._chebXc if isinstance(poly1, Region): maxr = 0 maxx = None for poly in poly1.list_poly: rc, xc = cheby_ball(poly) if rc > maxr: maxr = rc maxx = xc poly1._chebXc = maxx poly1._chebR = maxr return maxr, maxx if is_empty(poly1): return 0, None # `poly1` is nonempty r = 0 xc = None A = poly1.A c = np.negative(np.r_[np.zeros(np.shape(A)[1]), 1]) norm2 = np.sqrt(np.sum(A * A, axis=1)) G = np.c_[A, norm2] h = poly1.b sol = lpsolve(c, G, h) if sol['status'] == 0: r = sol['x'][-1] if r < 0: return 0, None xc = sol['x'][0:-1] else: # Polytope is empty poly1 = Polytope(fulldim=False) return 0, None poly1._chebXc = np.array(xc) poly1._chebR = np.double(r) return poly1._chebR, poly1._chebXc def _bounding_box_to_polytope(lower, upper): """Return a `Polytope` that represents the given bounding box. @param lower: corner point of the bounding box @param upper: corner point of the bounding box @rtype: `Polytope` """ intervals = [(a[0], b[0]) for a, b in zip(lower, upper)] return box2poly(intervals) def bounding_box(polyreg): """Return smallest hyperbox containing polytope or region. If polyreg.bbox is not None, then it is returned without update. @type polyreg: L{Polytope} or L{Region} @return: (l, u) where: - l = [x1min, x2min, ... xNmin] - u = [x1max, x2max, ... xNmax] @rtype: - l = 2d array - u = 2d array """ if polyreg.bbox is not None: return polyreg.bbox # For regions, calculate recursively for each # convex polytope and take maximum if isinstance(polyreg, Region): lenP = len(polyreg) dimP = polyreg.dim alllower = np.zeros([lenP, dimP]) allupper = np.zeros([lenP, dimP]) # collect lower and upper bounds for ii in range(lenP): bbox = polyreg.list_poly[ii].bounding_box ll, uu = bbox alllower[ii, :] = ll.T allupper[ii, :] = uu.T l = np.zeros([dimP, 1]) u = np.zeros([dimP, 1]) # compute endpoints for ii in range(dimP): l[ii] = min(alllower[:, ii]) u[ii] = max(allupper[:, ii]) polyreg.bbox = l, u return l, u # For a single convex polytope, solve an optimization problem (m, n) = np.shape(polyreg.A) In = np.eye(n) l = np.zeros([n, 1]) u = np.zeros([n, 1]) # lower corner for i in range(n): c = np.array(In[:, i]) G = polyreg.A h = polyreg.b sol = lpsolve(c, G, h) if sol['status'] == 0: x = sol['x'] l[i] = x[i] else: raise RuntimeError(( '`polytope.solvers.lpsolve` returned: {v}\n' 'its docstring describes return values' ).format( v=sol)) # upper corner for i in range(n): c = np.negative(np.array(In[:, i])) G = polyreg.A h = polyreg.b sol = lpsolve(c, G, h) if sol['status'] == 0: x = sol['x'] u[i] = x[i] else: raise RuntimeError(( '`polytope.solvers.lpsolve` returned: {v}\n' 'its docstring describes return values' ).format( v=sol)) polyreg.bbox = l, u return l, u def envelope(reg, abs_tol=ABS_TOL): """Compute envelope of a region. The envelope is the polytope defined by all "outer" inequalities a x < b such that {x | a x < b} intersection P = P for all polytopes P in the region. In other words we want to find all "outer" equalities of the region. If envelope can't be computed an empty polytope is returned @type reg: L{Region} @param abs_tol: Absolute tolerance for calculations @return: Envelope of input """ Ae = None be = None nP = len(reg.list_poly) for i in range(nP): poly1 = reg.list_poly[i] outer_i = np.ones(poly1.A.shape[0]) for ii in range(poly1.A.shape[0]): if outer_i[ii] == 0: # If inequality already discarded continue for j in range(nP): # Check for each polytope # if it intersects with inequality ii if i == j: continue poly2 = reg.list_poly[j] testA = np.vstack([poly2.A, -poly1.A[ii, :]]) testb = np.hstack([poly2.b, -poly1.b[ii]]) testP = Polytope(testA, testb) rc, xc = cheby_ball(testP) if rc > abs_tol: # poly2 intersects with inequality ii -> this inequality # can not be in envelope outer_i[ii] = 0 ind_i = np.nonzero(outer_i)[0] if Ae is None: Ae = poly1.A[ind_i, :] be = poly1.b[ind_i] else: Ae = np.vstack([Ae, poly1.A[ind_i, :]]) be = np.hstack([be, poly1.b[ind_i]]) ret = reduce(Polytope(Ae, be), abs_tol=abs_tol) if is_fulldim(ret): return ret else: return Polytope() count = 0 def mldivide(a, b, save=False): r"""Return set difference a \ b. @param a: L{Polytope} or L{Region} @param b: L{Polytope} to subtract @return: L{Region} describing the set difference """ if isinstance(b, Polytope): b = Region([b]) if isinstance(a, Region): logger.debug('mldivide got Region as minuend') P = Region() for poly in a: #assert(not is_fulldim(P.intersect(poly) ) ) Pdiff = poly for poly1 in b: Pdiff = mldivide(Pdiff, poly1, save=save) P = union(P, Pdiff, check_convex=True) if save: global count count = count + 1 # dump plot of `Pdiff` ax = Pdiff.plot() ax.axis([0.0, 1.0, 0.0, 2.0]) ax.figure.savefig('./img/Pdiff' + str(count) + '.pdf') # dump plot of `P` ax = P.plot() ax.axis([0.0, 1.0, 0.0, 2.0]) ax.figure.savefig('./img/P' + str(count) + '.pdf') elif isinstance(a, Polytope): logger.debug('a is Polytope') P = region_diff(a, b) else: raise Exception('a neither Region nor Polytope') return P def intersect(poly1, poly2, abs_tol=ABS_TOL): """Compute the intersection between two polytopes or regions @type poly1: L{Polytope} or L{Region} @type poly2: L{Polytope} or L{Region} @return: Intersection of poly1 and poly2 described by a polytope """ # raise NotImplementedError('Being removed, # use {Polytope, Region}.intersect instead') if isinstance(poly1, Region): return poly1.intersect(poly2, abs_tol=abs_tol) if isinstance(poly2, Region): return poly2.intersect(poly1, abs_tol=abs_tol) if not isinstance(poly1, Polytope): msg = 'poly1 not Region nor Polytope.' msg += 'Got instead: ' + str(type(poly1)) raise Exception(msg) return poly1.intersect(poly2, abs_tol) def volume(polyreg, nsamples=None, seed=None): """Approximately compute the volume of a Polytope or Region. A randomized algorithm is used. @type polyreg: L{Polytope} or L{Region} @param nsamples: number of samples to generate to use for estimating volume @type nsamples: positive integer @param seed: initialization for the random number generator. Passed as argument to the parameter `seed` of the function `numpy.random.default_rng`, read the docstring of that function for details. The seed can be used for reproducible volume computations. The documentation of the class `numpy.random.SeedSequence` includes useful recommendations for how to initialize a random generator automatically, and record the seed for reusing it. @return: Volume of input """ if not is_fulldim(polyreg): return 0.0 if polyreg._volume is not None: logger.debug('recomputing polytope volume...') # `Region` ? if isinstance(polyreg, Region): tot_vol = 0.0 for i in range(len(polyreg)): tot_vol += volume(polyreg.list_poly[i]) polyreg._set_volume(tot_vol) return tot_vol # `polyreg` is a `Polytope` n = polyreg.A.shape[1] if n == 1: N = 50 elif n == 2: N = 500 elif n == 3: N = 3000 else: N = 10000 if nsamples is not None and nsamples < 1: raise ValueError( '`nsamples` must be >= 1, given: {v}'.format( v=nsamples)) if nsamples is not None: N = nsamples if N != int(N): raise ValueError(( 'it appears that a noninteger number of samples ' 'has been given, namely: {v}' ).format( v=nsamples)) l_b, u_b = polyreg.bounding_box x = (np.tile(l_b, (1, N)) + np.random.default_rng(seed).random((n, N)) * np.tile(u_b - l_b, (1, N))) aux = (np.dot(polyreg.A, x) - np.tile(np.array([polyreg.b]).T, (1, N))) aux = np.nonzero(np.all(aux < 0, 0))[0].shape[0] vol = np.prod(u_b - l_b) * aux / N polyreg._set_volume(vol) return vol def extreme(poly1): """Compute the extreme points of a _bounded_ polytope @param poly1: Polytope in dimension d @return: A (N x d) numpy array containing the N vertices of poly1 """ if poly1.vertices is not None: # In case vertices already stored return poly1.vertices V = np.array([]) R = np.array([]) if isinstance(poly1, Region): raise Exception("extreme: not executable for regions") # `poly1` is a `Polytope` poly1 = reduce(poly1) # Need to have polytope non-redundant! if not is_fulldim(poly1): return None # `poly1` isn't flat A = poly1.A.copy() b = poly1.b.copy() sh = np.shape(A) nc = sh[0] nx = sh[1] # distinguish cases by dimension if nx == 1: # Polytope is a 1-dim line for ii in range(nc): V = np.append(V, b[ii] / A[ii]) if len(A) == 1: R = np.append(R, 1) raise Exception("extreme: polytope is unbounded") elif nx == 2: # Polytope is 2D alf = np.angle(A[:, 0] + 1j * A[:, 1]) I = np.argsort(alf) H = np.vstack([A, A[0, :]]) K = np.hstack([b, b[0]]) I = np.hstack([I, I[0]]) for ii in range(nc): HH = np.vstack([H[I[ii], :], H[I[ii + 1], :]]) KK = np.hstack([K[I[ii]], K[I[ii + 1]]]) if np.linalg.cond(HH) == np.inf: R = np.append(R, 1) raise Exception("extreme: polytope is unbounded") else: try: v = np.linalg.solve(HH, KK) except Exception: msg = 'Finding extreme points failed, ' msg += 'Check if any unbounded Polytope ' msg += 'is causing this.' raise Exception(msg) if len(V) == 0: V = np.append(V, v) else: V = np.vstack([V, v]) else: # General nD method, # solve a vertex enumeration problem for # the dual polytope rmid, xmid = cheby_ball(poly1) A = poly1.A.copy() b = poly1.b.copy() sh = np.shape(A) Ai = np.zeros(sh) for ii in range(sh[0]): Ai[ii, :] = A[ii, :] / (b[ii] - np.dot(A[ii, :], xmid)) Q = reduce(qhull(Ai)) if not is_fulldim(Q): return None # `Q` isn't flat H = Q.A K = Q.b sh = np.shape(H) nx = sh[1] V = np.zeros(sh) for iv in range(sh[0]): for ix in range(nx): V[iv, ix] = H[iv, ix] / K[iv] + xmid[ix] a = V.size / nx if not a.is_integer(): raise AssertionError(a) a = int(a) poly1.vertices = V.reshape((a, nx)) return poly1.vertices def qhull(vertices, abs_tol=ABS_TOL): """Use quickhull to compute a convex hull. @param vertices: A N x d array containing N vertices in dimension d @return: L{Polytope} describing the convex hull """ A, b, vert = quickhull(vertices, abs_tol=abs_tol) if A.size == 0: return Polytope() return Polytope(A, b, minrep=True, vertices=vert) def projection(poly1, dim, solver=None, abs_tol=ABS_TOL, verbose=0): """Projects a polytope onto lower dimensions. Available solvers are: - "esp": Equality Set Projection; - "exthull": vertex projection; - "fm": Fourier-Motzkin projection; - "iterhull": iterative hull method. Example: To project the polytope `P` onto the first three dimensions, use >>> P_proj = projection(P, [1,2,3]) @param poly1: Polytope to project @param dim: Dimensions on which to project @param solver: A solver can be specified, if left blank an attempt is made to choose the most suitable solver. @param verbose: if positive, print solver used in case of guessing; default is 0 (be silent). @rtype: L{Polytope} @return: Projected polytope in lower dimension """ if isinstance(poly1, Region): ret = Polytope() for i in range(len(poly1.list_poly)): p = projection( poly1.list_poly[i], dim, solver=solver, abs_tol=abs_tol) ret = ret + p return ret # flat ? if (poly1.dim < len(dim)) or is_empty(poly1): return poly1 # `poly1` isn't flat poly_dim = poly1.dim dim = np.array(dim) org_dim = range(poly_dim) new_dim = dim.flatten() - 1 del_dim = np.setdiff1d(org_dim, new_dim) # Index of dimensions to remove # logging logger.debug('polytope dim = ' + str(poly_dim)) logger.debug('project on dims = ' + str(new_dim)) logger.debug('original dims = ' + str(org_dim)) logger.debug('dims to delete = ' + str(del_dim)) mA, nA = poly1.A.shape # fewer rows than dimensions ? if mA < poly_dim: msg = 'fewer rows in A: ' + str(mA) msg += ', than polytope dimension: ' + str(poly_dim) logger.warning(msg) # enlarge A, b with zeros A = poly1.A.copy() poly1.A = np.zeros((poly_dim, poly_dim)) poly1.A[0:mA, 0:nA] = A # stack poly1.b = np.hstack([poly1.b, np.zeros(poly_dim - mA)]) logger.debug('m, n = ' + str((mA, nA))) # Compute cheby ball in lower dim to see if projection exists norm = np.sum(poly1.A * poly1.A, axis=1).flatten() norm[del_dim] = 0 c = np.zeros(len(org_dim) + 1, dtype=float) c[len(org_dim)] = -1 G = np.hstack([poly1.A, norm.reshape(norm.size, 1)]) h = poly1.b sol = lpsolve(c, G, h) if sol['status'] != 0: # Projection not fulldim return Polytope() if sol['x'][-1] < abs_tol: return Polytope() # select projection solver if solver == "esp": return projection_esp(poly1, new_dim, del_dim) elif solver == "exthull": return projection_exthull(poly1, new_dim) elif solver == "fm": return projection_fm(poly1, new_dim, del_dim) elif solver == "iterhull": return projection_iterhull(poly1, new_dim) elif solver is not None: logger.warning('unrecognized projection solver "' + str(solver) + '".') # `solver` undefined or unknown # select method based on dimension criteria if len(del_dim) <= 2: logger.debug("projection: using Fourier-Motzkin.") return projection_fm(poly1, new_dim, del_dim) elif len(org_dim) <= 4: logger.debug("projection: using exthull.") return projection_exthull(poly1, new_dim) else: logger.debug("projection: using iterative hull.") return projection_iterhull(poly1, new_dim) def separate(reg1, abs_tol=ABS_TOL): """Divide a region into several regions such that they are all connected. @type reg1: L{Region} @param abs_tol: Absolute tolerance @return: List [] of connected Regions """ final = [] ind_left = range(len(reg1)) props = reg1.props while len(ind_left) > 0: ind_del = [] connected_reg = Region( [reg1.list_poly[ind_left[0]]], []) ind_del.append(ind_left[0]) for i in range(1, len(ind_left)): j = ind_left[i] if is_adjacent(connected_reg, reg1.list_poly[j]): connected_reg = union( connected_reg, reg1.list_poly[j], check_convex=False) ind_del.append(j) connected_reg.props = props.copy() final.append(connected_reg) ind_left = np.setdiff1d(ind_left, ind_del) return final def is_adjacent(poly1, poly2, overlap=True, abs_tol=ABS_TOL): """Return True if two polytopes or regions are adjacent. Check by enlarging both slightly and checking for intersection. @type poly1, poly2: L{Polytope}s or L{Region}s @param overlap: return True if polytopes are neighbors OR overlap @param abs_tol: absolute tolerance @return: True if polytopes are adjacent """ if poly1.dim != poly2.dim: raise Exception("is_adjacent: " "polytopes do not have the same dimension") if isinstance(poly1, Region): for p in poly1: adj = is_adjacent(p, poly2, overlap=overlap, abs_tol=abs_tol) if adj: return True return False if isinstance(poly2, Region): for p in poly2: adj = is_adjacent(poly1, p, overlap=overlap, abs_tol=abs_tol) if adj: return True return False # copy A1_arr = poly1.A.copy() A2_arr = poly2.A.copy() b1_arr = poly1.b.copy() b2_arr = poly2.b.copy() if overlap: b1_arr += abs_tol b2_arr += abs_tol dummy = Polytope( np.concatenate((A1_arr, A2_arr)), np.concatenate((b1_arr, b2_arr))) return is_fulldim(dummy, abs_tol=abs_tol / 10) else: M1 = np.concatenate((poly1.A, np.array([poly1.b]).T), 1).T M1row = 1 / np.sqrt(np.sum(M1**2, 0)) M1n = np.dot(M1, np.diag(M1row)) M2 = np.concatenate((poly2.A, np.array([poly2.b]).T), 1).T M2row = 1 / np.sqrt(np.sum(M2**2, 0)) M2n = np.dot(M2, np.diag(M2row)) if not np.any(np.dot(M1n.T, M2n) < -0.99): return False dummy = np.dot(M1n.T, M2n) row, col = np.nonzero(np.isclose(dummy, dummy.min())) for i, j in zip(row, col): b1_arr[i] += abs_tol b2_arr[j] += abs_tol dummy = Polytope( np.concatenate((A1_arr, A2_arr)), np.concatenate((b1_arr, b2_arr))) return is_fulldim(dummy, abs_tol=abs_tol / 10) def is_interior(r0, r1, abs_tol=ABS_TOL): """Return True if r1 is strictly in the interior of r0. Checks if r1 enlarged by abs_tol is a subset of r0. @type r0: L{Polytope} or L{Region} @type r1: L{Polytope} or L{Region} @rtype: bool """ if isinstance(r0, Polytope): r0 = Region([r0]) if isinstance(r1, Polytope): r1 = Region([r1]) for p in r1: A = p.A.copy() b = p.b.copy() + abs_tol dummy = Polytope(A, b) if not dummy <= r0: return True return False #### Helper functions #### def projection_fm(poly1, new_dim, del_dim, abs_tol=ABS_TOL): """Help function implementing Fourier Motzkin projection. Should work well for eliminating few dimensions. """ # Remove last dim first to handle indices del_dim = -np.sort(-del_dim) if not poly1.minrep: poly1 = reduce(poly1) poly = poly1.copy() for i in del_dim: positive = np.nonzero(poly.A[:, i] > abs_tol)[0] negative = np.nonzero(poly.A[:, i] < -abs_tol)[0] null = np.nonzero(np.abs(poly.A[:, i]) < abs_tol)[0] nr = len(null) + len(positive) * len(negative) nc = np.shape(poly.A)[0] C = np.zeros([nr, nc]) A = poly.A[:, i].copy() row = 0 for j in positive: for k in negative: C[row, j] = -A[k] C[row, k] = A[j] row += 1 for j in null: C[row, j] = 1 row += 1 keep_dim = np.setdiff1d( range(poly.A.shape[1]), np.array([i])) poly = Polytope( np.dot(C, poly.A)[:, keep_dim], np.dot(C, poly.b)) if not is_fulldim(poly): return Polytope() poly = reduce(poly) return poly def projection_exthull(poly1, new_dim): """Help function implementing vertex projection. Efficient in low dimensions. """ vert = extreme(poly1) if vert is None: # qhull failed return Polytope(fulldim=False, minrep=True) return reduce(qhull(vert[:, new_dim])) def projection_iterhull(poly1, new_dim, max_iter=1000, verbose=0, abs_tol=ABS_TOL): """Helper function implementing the "iterative hull" method. Works best when projecting _to_ lower dimensions. """ r, xc = cheby_ball(poly1) org_dim = poly1.A.shape[1] logger.debug("Starting iterhull projection from dim " + str(org_dim) + " to dim " + str(len(new_dim))) if len(new_dim) == 1: f1 = np.zeros(poly1.A.shape[1]) f1[new_dim] = 1 sol = lpsolve(f1, poly1.A, poly1.b) if sol['status'] == 0: vert1 = sol['x'] else: raise RuntimeError(( '`polytope.solvers.lpsolve` returned: {v}\n' 'its docstring describes return values' ).format( v=sol)) sol = lpsolve(np.negative(f1), poly1.A, poly1.b) if sol['status'] == 0: vert2 = sol['x'] else: raise RuntimeError(( '`polytope.solvers.lpsolve` returned: {v}\n' 'its docstring describes return values' ).format( v=sol)) vert = np.vstack([vert1, vert2]) return qhull(vert, abs_tol=abs_tol) else: OK = False cnt = 0 Vert = None while not OK: # Maximizing in random directions # to find a starting simplex cnt += 1 if cnt > max_iter: raise Exception("iterative_hull: " "could not find starting simplex") f1 = np.random.rand(len(new_dim)).flatten() - 0.5 f = np.zeros(org_dim) f[new_dim] = f1 sol = lpsolve(np.negative(f), poly1.A, poly1.b) xopt = np.array(sol['x']).flatten() if Vert is None: Vert = xopt.reshape(1, xopt.size) else: k = np.nonzero(Vert[:, new_dim[0]] == xopt[new_dim[0]])[0] for j in new_dim[range(1, len(new_dim))]: ii = np.nonzero(Vert[k, j] == xopt[j])[0] k = k[ii] if k.size == 0: break if k.size == 0: Vert = np.vstack([Vert, xopt]) if Vert.shape[0] > len(new_dim): u, s, v = np.linalg.svd( np.transpose(Vert[:, new_dim] - Vert[0, new_dim])) rank = np.sum(s > abs_tol * 10) if rank == len(new_dim): # If rank full we have found a starting simplex OK = True logger.debug("Found starting simplex after " + str(cnt) + " iterations") cnt = 0 P1 = qhull(Vert[:, new_dim], abs_tol=abs_tol) HP = None while True: # Iteration: # Maximaze in direction of each facet # Take convex hull of all vertices cnt += 1 if cnt > max_iter: raise Exception("iterative_hull: " "maximum number of iterations reached") logger.debug("Iteration number " + str(cnt)) for ind in range(P1.A.shape[0]): f1 = np.round(P1.A[ind, :] / abs_tol) * abs_tol f2 = np.hstack([np.round(P1.A[ind, :] / abs_tol) * abs_tol, np.round(P1.b[ind] / abs_tol) * abs_tol]) # See if already stored k = np.array([]) if HP is not None: k = np.nonzero(HP[:, 0] == f2[0])[0] for j in range(1, np.shape(P1.A)[1] + 1): ii = np.nonzero(HP[k, j] == f2[j])[0] k = k[ii] if k.size == 0: break if k.size == 1: # Already stored xopt = HP[ k, range( np.shape(P1.A)[1] + 1, np.shape(P1.A)[1] + np.shape(Vert)[1] + 1) ] else: # Solving optimization to find new vertex f = np.zeros(poly1.A.shape[1]) f[new_dim] = f1 sol = lpsolve(np.negative(f), poly1.A, poly1.b) if sol['status'] != 0: logger.error("iterhull: LP failure") continue xopt = np.array(sol['x']).flatten() add = np.hstack([f2, np.round(xopt / abs_tol) * abs_tol]) # Add new half plane information # HP format: [ P1.Ai P1.bi xopt] if HP is None: HP = add.reshape(1, add.size) else: HP = np.vstack([HP, add]) Vert = np.vstack([Vert, xopt]) logger.debug("Taking convex hull of new points") P2 = qhull(Vert[:, new_dim], abs_tol=abs_tol) logger.debug("Checking if new points are inside convex hull") OK = 1 for i in range(np.shape(Vert)[0]): if not P1.contains(np.transpose([Vert[i, new_dim]]), abs_tol=abs_tol): # If all new points are inside # old polytope -> Finished OK = 0 break if OK == 1: logger.debug("Returning projection after " + str(cnt) + " iterations\n") return P2 else: # Iterate P1 = P2 def projection_esp(poly1, keep_dim, del_dim): """Helper function implementing "Equality set projection". CAUTION: Very buggy. """ C = poly1.A[:, keep_dim] D = poly1.A[:, del_dim] if not is_fulldim(poly1): return Polytope() G, g, E = esp(C, D, poly1.b) return Polytope(G, g) def region_diff(poly, reg, abs_tol=ABS_TOL, intersect_tol=ABS_TOL, save=False): """Subtract a region from a polytope @param poly: polytope from which to subtract a region @param reg: region which should be subtracted @param abs_tol: absolute tolerance @return: polytope or region containing non-overlapping polytopes """ if not isinstance(poly, Polytope): raise Exception('poly not a Polytope, but: ' + str(type(poly))) poly = poly.copy() if isinstance(reg, Polytope): reg = Region([reg]) if not isinstance(reg, Region): raise Exception('reg not a Region, but: ' + str(type(reg))) N = len(reg) if N == 0: # Hack if reg happens to be a polytope reg = Region([reg]) N = 1 if is_empty(reg): return poly if is_empty(poly): return Polytope() # Checking intersections to find Polytopes in Region # that intersect the Polytope Rc = np.zeros(N) for i, poly1 in enumerate(reg): A_dummy = np.vstack([poly.A, poly1.A]) b_dummy = np.hstack([poly.b, poly1.b]) dummy = Polytope(A_dummy, b_dummy) Rc[i], xc = cheby_ball(dummy) N = np.sum(Rc >= intersect_tol) if N == 0: logger.debug('no Polytope in the Region intersects the given Polytope') return poly # Sort radii Rc = -Rc ind = np.argsort(Rc) #val = Rc[ind] A = poly.A.copy() B = poly.b.copy() H = A.copy() K = B.copy() m = np.shape(A)[0] mi = np.zeros(N, dtype=int) # Finding constraints that are not in original polytope HK = np.hstack([H, np.array([K]).T]) for ii in range(N): i = ind[ii] if not is_fulldim(reg.list_poly[i]): continue Hni = reg.list_poly[i].A.copy() Kni = reg.list_poly[i].b.copy() for j in range(np.shape(Hni)[0]): HKnij = np.hstack([Hni[j, :], Kni[j]]) HK2 = np.tile(HKnij, [m, 1]) abs = np.abs(HK - HK2) # is the constraint `HKnij` not in the original polytope ? if np.all(np.sum(abs, axis=1) >= abs_tol): mi[ii] = mi[ii] + 1 A = np.vstack([A, Hni[j, :]]) B = np.hstack([B, Kni[j]]) # If some Ri has no active constraints, Ri covers R if np.any(mi == 0): return Polytope() # some constraints are active M = np.sum(mi) if len(mi[0:len(mi) - 1]) > 0: csum = np.cumsum(np.hstack([0, mi[0:len(mi) - 1]])) beg_mi = csum + m * np.ones(len(csum), dtype=int) else: beg_mi = np.array([m]) A = np.vstack([A, -A[range(m, m + M), :]]) B = np.hstack([B, -B[range(m, m + M)]]) counter = np.zeros([N, 1], dtype=int) INDICES = np.arange(m, dtype=int) level = 0 res_count = 0 res = Polytope() # Initialize output while level != -1: if save: if res: ax = res.plot() ax.axis([0.0, 1.0, 0.0, 2.0]) ax.figure.savefig('./img/res' + str(res_count) + '.pdf') res_count += 1 if counter[level] == 0: if save: logger.debug('counter[level] is 0') for j in range(level, N): auxINDICES = np.hstack([ INDICES, range(beg_mi[j], beg_mi[j] + mi[j]) ]) Adummy = A[auxINDICES, :] bdummy = B[auxINDICES] R, xopt = cheby_ball(Polytope(Adummy, bdummy)) if R > abs_tol: level = j counter[level] = 1 INDICES = np.hstack([INDICES, beg_mi[level] + M]) break if R < abs_tol: level = level - 1 res = union(res, Polytope(A[INDICES, :], B[INDICES]), False) nzcount = np.nonzero(counter)[0] for jj in range(len(nzcount) - 1, -1, -1): if counter[level] <= mi[level]: INDICES[len(INDICES) - 1] = INDICES[len(INDICES) - 1] - M INDICES = np.hstack([ INDICES, beg_mi[level] + counter[level] + M ]) break else: counter[level] = 0 INDICES = INDICES[0:m + sum(counter)] if level == -1: logger.debug('returning res from 1st point') return res else: if save: logger.debug('counter[level] > 0') # counter(level) > 0 nzcount = np.nonzero(counter)[0] for jj in range(len(nzcount) - 1, -1, -1): level = nzcount[jj] counter[level] = counter[level] + 1 if counter[level] <= mi[level]: INDICES[len(INDICES) - 1] = INDICES[len(INDICES) - 1] - M INDICES = np.hstack([ INDICES, beg_mi[level] + counter[level] + M - 1 ]) break else: counter[level] = 0 INDICES = INDICES[0:m + np.sum(counter)] level = level - 1 if level == -1: if save: if save: if res: ax = res.plot() ax.axis([0.0, 1.0, 0.0, 2.0]) ax.figure.savefig('./img/res_returned' + str(res_count) + '.pdf') logger.debug('returning res from 2nd point') return res test_poly = Polytope(A[INDICES, :], B[INDICES]) rc, xc = cheby_ball(test_poly) if rc > abs_tol: if level == N - 1: res = union(res, reduce(test_poly), False) else: level = level + 1 logger.debug('returning res from end') return res def num_bin(N, places=8): """Return N as list of bits, zero-filled to places. E.g., given N=7, num_bin returns [1, 1, 1, 0, 0, 0, 0, 0]. """ return [(N >> k) & 0x1 for k in range(places)] def box2poly(box): """Return new Polytope from box. @param box: defining the Polytope @type box: [[x1min, x1max], [x2min, x2max],...] """ return Polytope.from_box(box) def _get_patch(poly1, **kwargs): """Return matplotlib patch for given Polytope. Example:: > # Plot Polytope objects poly1 and poly2 in the same plot > import matplotlib.pyplot as plt > fig = plt.figure() > ax = fig.add_subplot(111) > p1 = _get_patch(poly1, color="blue") > p2 = _get_patch(poly2, color="yellow") > ax.add_patch(p1) > ax.add_patch(p2) > ax.set_xlim(xl, xu) # Optional: set axis max/min > ax.set_ylim(yl, yu) > plt.show() @type poly1: L{Polytope} @param kwargs: any keyword arguments valid for matplotlib.patches.Polygon """ import matplotlib as mpl V = extreme(poly1) rc, xc = cheby_ball(poly1) x = V[:, 1] - xc[1] y = V[:, 0] - xc[0] mult = np.sqrt(x**2 + y**2) x = x / mult angle = np.arccos(x) corr = np.ones(y.size) - 2 * (y < 0) angle = angle * corr ind = np.argsort(angle) # create patch patch = mpl.patches.Polygon(V[ind, :], True, **kwargs) patch.set_zorder(0) return patch def enumerate_integral_points(poly): """Return all points in `poly` that have integer coordinates. @param poly: polytope @type poly: `polytope.polytope.Region` or `polytope.polytope.Polytope` @return: coordinates of `m` points as `d X m` array, where `d` the dimension @rtype: `numpy.ndarray` """ a, b = poly.bounding_box a_int = np.floor(a) b_int = np.ceil(b) intervals = list(zip(a_int.flatten(), b_int.flatten())) box = box2poly(intervals) res = [int(b - a + 1) for a, b in intervals] grid, _ = grid_region(box, res=res) inside = poly.contains(grid) return grid[:, inside] def grid_region(polyreg, res=None): """Return bounding box grid points within `polyreg`. @type polyreg: L{Polytope} or L{Region} @param res: resolution of grid, i.e., how many points the grid has per dimension, before filtering based on which points are contained in the polytope @type res: `list` of `int` >= 1, with `len` equal to `polyreg.dim` """ # grid corners bbox = polyreg.bounding_box # grid resolution if res is None: density = 8 res = [ math.ceil(density * (b - a)) for a, b in zip(*bbox)] if len(res) != polyreg.dim: raise ValueError(( "`len(res)` must equal the polytope's dimension " "(which is {dim}), but instead `res` is: {res}" ).format(dim=polyreg.dim, res=res)) if any(n < 1 for n in res): raise ValueError(( '`res` must contain `int` values >= 1, ' 'instead `res` equals: {res}' ).format(res=res)) linspaces = list() for a, b, n in zip(*bbox, res): r = np.linspace(a, b, num=n) linspaces.append(r) points = np.meshgrid(*linspaces) x = np.vstack(list(map(np.ravel, points))) x = x[:, polyreg.contains(x)] return (x, res) def _plot_text(polyreg, txt, ax, color): """Annotate center of Chebyshev ball with `txt`.""" ax = _newax(ax) rc, xc = cheby_ball(polyreg) ax.text(xc[0], xc[1], txt, color=color) def _newax(ax=None): """Add subplot to current figure and return axes.""" from matplotlib import pyplot as plt if ax is not None: return ax fig = plt.figure() ax = fig.add_subplot(1, 1, 1) return ax def simplices2polytopes(points, triangles): """Convert a simplicial mesh to polytope H-representation. @type points: N x d @type triangles: NT x 3 References ========== https://en.wikipedia.org/wiki/Simplex https://en.wikipedia.org/wiki/Triangle_mesh """ polytopes = [] for triangle in triangles: logger.debug('Triangle: ' + str(triangle)) triangle_vertices = points[triangle, :] logger.debug('\t triangle points: ' + str(triangle_vertices)) poly = qhull(triangle_vertices) logger.debug('\n Polytope:\n:' + str(poly)) polytopes += [poly] return polytopes
79,593
31.876497
111
py
polytope
polytope-main/polytope/plot.py
# Copyright (c) 2011-2014 by California Institute of Technology # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the California Institute of Technology nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH # OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. # """Functions for plotting Partitions.""" import logging import numpy as np import networkx as nx from . import polytope as pc # inline imports: # # import matplotlib as mpl logger = logging.getLogger(__name__) def plot_partition( ppp, trans=None, ppp2trans=None, only_adjacent=False, ax=None, plot_numbers=True, color_seed=None): """Plot partition with arrows from digraph. For filtering edges based on label use L{plot_ts_on_partition}. See Also ======== L{abstract.prop2partition.PropPreservingPartition}, L{plot_trajectory} @type ppp: L{PropPreservingPartition} @param trans: Transition matrix. If used, then transitions in C{ppp} are shown with arrows. Otherwise C{ppp.adj} is plotted. To plot C{ppp.adj}, pass: trans = True @param plot_numbers: If True, then annotate each Region center with its number. @param ax: axes where to plot @param color_seed: seed for reproducible random coloring @param ppp2trans: order mapping ppp indices to trans states @type ppp2trans: list of trans states """ import matplotlib as mpl # needs to be converted to adjacency matrix ? if isinstance(trans, nx.MultiDiGraph): if trans is not None and ppp2trans is None: msg = ( 'trans is a networkx MultiDiGraph, ' 'so ppp2trans required to define state order,\n' 'used when converting the graph to ' 'an adjacency matrix.') raise Exception(msg) trans = nx.to_numpy_array(trans, nodelist=ppp2trans) l, u = ppp.domain.bounding_box arr_size = (u[0, 0] - l[0, 0]) / 50.0 ax = pc._newax(ax) # no trans given: use partition's if trans is True and ppp.adj is not None: ax.set_title('Adjacency from Partition') trans = ppp.adj elif trans is None: trans = 'none' else: ax.set_title('Adjacency from given Transitions') ax.set_xlim(l[0, 0], u[0, 0]) ax.set_ylim(l[1, 0], u[1, 0]) # repeatable coloring ? if color_seed is not None: prng = np.random.RandomState(color_seed) else: prng = np.random.RandomState() # plot polytope patches for i, reg in enumerate(ppp.regions): # select random color, # same color for all polytopes in each region col = prng.rand(3) # single polytope or region ? reg.plot(color=col, ax=ax) if plot_numbers: reg.text(str(i), ax, color='black') # not show trans ? if trans is 'none': return ax # plot transition arrows between patches rows, cols = np.nonzero(trans) for i, j in zip(rows, cols): # mask non-adjacent cell transitions ? if only_adjacent: if ppp.adj[i, j] == 0: continue plot_transition_arrow( ppp.regions[i], ppp.regions[j], ax, arr_size) return ax def plot_transition_arrow(polyreg0, polyreg1, ax, arr_size=None): """Plot arrow starting from polyreg0 and ending at polyreg1. @type polyreg0: L{Polytope} or L{Region} @type polyreg1: L{Polytope} or L{Region} @param ax: axes where to plot @return: arrow object """ from matplotlib import patches # brevity p0 = polyreg0 p1 = polyreg1 rc0, xc0 = pc.cheby_ball(p0) rc1, xc1 = pc.cheby_ball(p1) if np.sum(np.abs(xc1 - xc0)) < 1e-7: return None if arr_size is None: l, u = polyreg1.bounding_box arr_size = (u[0, 0] - l[0, 0]) / 25.0 # TODO: 3d x = xc0[0] y = xc0[1] dx = xc1[0] - xc0[0] dy = xc1[1] - xc0[1] arrow = patches.Arrow( float(x), float(y), float(dx), float(dy), width=arr_size, color='black') ax.add_patch(arrow) return arrow
5,420
32.257669
73
py
polytope
polytope-main/polytope/__init__.py
# Copyright (c) 2011-2014 by California Institute of Technology # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the California Institute of Technology nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH # OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. """Polytope package.""" from .version import version as __version__ from .polytope import ( Polytope, Region, is_empty, is_fulldim, is_convex, is_adjacent, is_subset, reduce, separate, box2poly, grid_region, cheby_ball, bounding_box, envelope, extreme, qhull, is_inside, union, mldivide, intersect, volume, projection ) # from .plot import plot_partition, plot_transition_arrow from .prop2partition import ( Partition, MetricPartition, find_adjacent_regions)
2,081
45.266667
73
py
polytope
polytope-main/polytope/solvers.py
# Copyright (c) 2011-2017 by California Institute of Technology # All rights reserved. Licensed under 3-clause BSD. """Interface to linear programming solvers. The `polytope` package selects the default solver as follows: 1. use GLPK if installed 2. otherwise use SciPy You can change this default at runtime by setting the variable `default_solver` in the module `solvers`. For example: ```python from polytope import solvers solvers.default_solver = 'scipy' # to inspect which solvers were successfully imported: print(solvers.installed_solvers) ``` Choose an installed solver to avoid errors. """ import logging import numpy as np from scipy import optimize logger = logging.getLogger(__name__) installed_solvers = {'scipy'} try: import cvxopt as cvx import cvxopt.glpk from cvxopt import matrix installed_solvers.add('glpk') # Hide optimizer output cvx.solvers.options['show_progress'] = False cvx.glpk.options['msg_lev'] = 'GLP_MSG_OFF' except ImportError: logger.warning( '`polytope` failed to import `cvxopt.glpk`.') try: import mosek installed_solvers.add('mosek') except ImportError: logger.info('MOSEK solver not found.') # choose default from installed choices if 'glpk' in installed_solvers: default_solver = 'glpk' elif 'scipy' in installed_solvers: default_solver = 'scipy' logger.warning('will use `scipy.optimize.linprog`') else: raise ValueError( "`installed_solvers` wasn't empty above?") def lpsolve(c, G, h, solver=None): """Try to solve linear program with given or default solver. Solvers: - `cvxopt.glpk`: identified by `'glpk'` - `scipy.optimize.linprog`: identified by `'scipy'` - MOSEK: identified by `'mosek'` @param solver: - `in {'glpk', 'mosek', 'scipy'}` - `None`: use the module's `default_solver` You can change the default choice of solver by setting the module variable `default_solver`. See the module's docstring for an example. @return: solution with status as in `scipy.optimize.linprog` @rtype: `dict(status=int, x=argmin, fun=min_value)` """ if solver is None: solver = default_solver if solver == 'glpk' or solver == 'mosek': result = _solve_lp_using_cvxopt(c, G, h, solver=solver) elif solver == 'scipy': result = _solve_lp_using_scipy(c, G, h) else: raise Exception( 'unknown LP solver "{s}".'.format(s=solver)) return result def _solve_lp_using_cvxopt(c, G, h, A=None, b=None, solver='glpk'): """Attempt linear optimization using `cvxopt.glpk` or MOSEK. @param solver: `in {'glpk', 'mosek'}` """ _assert_have_solver(solver) if A is not None: A = matrix(A) if b is not None: b = matrix(b) sol = cvx.solvers.lp( c=matrix(c), G=matrix(G), h=matrix(h), A=A, b=b, solver=solver) result = dict() if sol['status'] == 'optimal': result['status'] = 0 elif sol['status'] == 'primal infeasible': result['status'] = 2 elif sol['status'] == 'dual infeasible': result['status'] = 3 elif sol['status'] == 'unknown': result['status'] = 4 else: raise ValueError(( '`cvxopt.solvers.lp` returned unexpected ' 'status value: {v}').format(v=sol['status'])) # `cvxopt.solvers.lp` returns an array of shape `(2, 1)` # squeeze only the second dimension, to obtain a 1-D array # thus match what `scipy.optimize.linprog` returns. x = sol['x'] if x is not None: if x.typecode != 'd': raise AssertionError(x.typecode) result['x'] = np.fromiter(x, dtype=np.double) else: result['x'] = None result['fun'] = sol['primal objective'] return result def _solve_lp_using_scipy(c, G, h): """Attempt linear optimization using `scipy.optimize.linprog`.""" _assert_have_solver('scipy') sol = optimize.linprog( c, G, np.transpose(h), None, None, bounds=(None, None)) return dict( status=sol.status, x=sol.x, fun=sol.fun) def _assert_have_solver(solver): """Raise `RuntimeError` if `solver` is absent.""" if solver in installed_solvers: return raise RuntimeError(( 'solver {solver} not in ' 'installed solvers: {have}').format( solver=solver, have=installed_solvers))
4,447
27.512821
69
py
polytope
polytope-main/polytope/quickhull.py
# Copyright (c) 2011-2014 by California Institute of Technology # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # 3. Neither the name of the California Institute of Technology nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH # OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF # USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT # OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. r"""Implementation of the Quickhull algorithm for computing convex hulls. Reference ========= \cite{Barber96toms} """ # Created by P. Nilsson, 8/2/11 import numpy as np class Facet(object): """Face of dimension n-1 of n-dimensional polyhedron. A class describing a facet (n-1 dimensional face) of an n dimensional polyhedron with the following fields: N.B. Polyhedron is assumed to contain the origin (inside and outside are defined accordingly) - `outside`: a list of points outside the facet - `vertices`: the vertices of the facet in a n*n matrix where each row denotes a vertex - `neighbors`: a list of other facets with which the facet shares n-1 vertices - `normal`: a normalized vector perpendicular to the facet, pointing "out" - `distance`: the normal distance of the facet from origo """ def __init__(self, points): self.outside = [] self.vertices = points self.neighbors = [] self.normal = None self.distance = None sh = np.shape(points) A0 = np.hstack([points, np.ones([sh[0], 1])]) b0 = np.zeros([sh[0], 1]) b = np.vstack([np.zeros([sh[0], 1]), 1]) c = np.zeros(sh[1] + 1) c[-1] = -1. A = np.vstack([A0, c]) sol = np.linalg.solve(A, b) xx = sol[0:sh[1]] mult = np.sqrt(np.sum(xx**2)) n = xx / mult d = sol[sh[1]] / mult # Test to check that n is >outer< normal if np.sum(n.flatten() * points[0]) < 0: n = -n self.normal = n self.distance = -d def get_furthest(self): """Return point outside the furthest away from the facet.""" N = len(self.outside) if N == 1: ret = self.outside[0] del self.outside[0] return ret else: p0 = self.outside[0] inddel = 0 for i in range(1, N): if p0.distance < self.outside[i].distance: p0 = self.outside[i] inddel = i del self.outside[inddel] return p0 class Outside_point(object): """Point coordinates and distance to facet. The distance is between the point and the facet that the point is assigned to. """ def __init__(self, coordinates, distance): self.distance = distance self.coordinates = coordinates def distance(p, fac1): """Calculate the distance from a facet to a point.""" n = fac1.normal d = fac1.distance return np.sum(n.flatten() * p.flatten()) - d def is_neighbor(fac1, fac2, abs_tol=1e-7): """Determine if two facets share d-1 vertices.""" vert1 = fac1.vertices vert2 = fac2.vertices sh = np.shape(vert1) dim = sh[1] same = 0 for ii in range(dim): v1 = vert1[ii, :] for jj in range(dim): v2 = vert2[jj, :] if np.all(np.abs(v1 - v2) < abs_tol): same += 1 break return (same == dim - 1) def quickhull(POINTS, abs_tol=1e-7): """Compute the convex hull of a set of points. @param POINTS: a n*d np array where each row denotes a point @return: A,b,vertices: `A` and `b describing the convex hull polytope as A x <= b (H-representation). `vertices is a list of all the points in the convex hull (V-representation). """ POINTS = POINTS.astype('float') sh = np.shape(POINTS) dim = sh[1] npt = sh[0] if npt <= dim: # Convex hull is empty return np.array([]), np.array([]), None # Check if convex hull is fully dimensional u, s, v = np.linalg.svd(np.transpose(POINTS - POINTS[0, :])) rank = np.sum(s > 1e-15) if rank < dim: print( "Warning: convex hull is not fully dimensional, " "returning empty polytope") return np.array([]), np.array([]), None # Choose starting simplex by choosing maximum # points in random directions rank = 0 while rank < dim: ind = [] d = 0 while d < dim + 1: rand = np.random.rand(dim) - 0.5 test = np.dot(POINTS, rand) index = np.argsort(test) i = 0 b = index[i] in ind while b: i += 1 b = index[i] in ind ind.append(index[i]) d += 1 startsimplex = POINTS[ind, :] u, s, v = np.linalg.svd( np.transpose(startsimplex - startsimplex[0, :])) rank = np.sum(s > 1e-10) unassigned_points = POINTS[np.setdiff1d(range(npt), ind), :] # Center starting simplex around origin by translation xc = np.zeros(dim) for ii in range(dim + 1): xc += startsimplex[ii, :] / (dim + 1) startsimplex = startsimplex - xc unassigned_points = unassigned_points - xc Forg = [] F = [] R = [] for i in range(dim + 1): ind = np.setdiff1d(np.arange(dim + 1), [i]) fac = Facet(startsimplex[ind, :]) Forg.append(fac) if npt == dim + 1: # If only d+1 facets, we already have convex hull num = len(Forg) A = np.zeros([num, dim]) b = np.zeros([num, 1]) vert = np.zeros([num * dim, dim]) for ii in range(num): idx = np.ix_(range(ii * dim, (ii + 1) * dim)) vert[idx, :] = Forg[ii].vertices + xc A[ii, :] = Forg[ii].normal.flatten() b[ii] = Forg[ii].distance vert = np.unique( vert.view([('', vert.dtype)] * vert.shape[1])).view( vert.dtype).reshape(-1, vert.shape[1]) b = b.flatten() + np.dot(A, xc.flatten()) return A, b.flatten(), vert for ii in range(len(Forg)): # In the starting simplex, all facets are neighbors for jj in range(ii + 1, len(Forg)): fac1 = Forg[ii] fac2 = Forg[jj] ind = np.setdiff1d(np.arange(dim + 1), [ii, jj]) fac1.neighbors.append(fac2) fac2.neighbors.append(fac1) for fac1 in Forg: # Assign outside points to facets npt = np.shape(unassigned_points)[0] keep_list = np.ones(npt, dtype=int) for ii in range(npt): if npt == 1: pu = unassigned_points else: pu = unassigned_points[ii, :] d = distance(pu, fac1) if d > abs_tol: op = Outside_point(pu.flatten(), d) fac1.outside.append(op) keep_list[ii] = 0 if len(fac1.outside) > 0: F.append(fac1) ind = np.nonzero(keep_list)[0] if len(ind) > 0: unassigned_points = unassigned_points[ind, :] else: unassigned_points = None break # We now have a collection F of facets with outer points! # Selecting the point furthest away from a facet while len(F) > 0: facet = F[0] p = facet.get_furthest().coordinates V = [] # Initialize visible set # Want to add all facets that are visible from p Ncoll = [] # Set of unvisited neighbors visited = [] V.append(facet) # facet itself is visible by definition visited.append(facet) # facet is visited for N in facet.neighbors: # add all neighbors for visit Ncoll.append(N) while len(Ncoll) > 0: # Visit all neighbours N = Ncoll[0] visited.append(N) if distance(p, N) > abs_tol: V.append(N) for neighbor in N.neighbors: if (neighbor not in visited) & (neighbor not in Ncoll): Ncoll.append(neighbor) del Ncoll[0] # Should now have all visible facets in V NV = [] unassigned_points = None for fac1 in V: # Move points from facets in V to the set unassigned_points N = len(fac1.outside) for ii in range(N): if unassigned_points is None: unassigned_points = np.array( [fac1.outside[ii].coordinates]) else: unassigned_points = np.vstack( [unassigned_points, fac1.outside[ii].coordinates]) for fac1 in V: # Figure out the boundary of V, and create new facets for fac2 in fac1.neighbors: if not (fac2 in V): # fac1 is on the boundary! # Create new facet from intersection between fac1 and fac2 # and p vert1 = fac1.vertices vert2 = fac2.vertices for ii in range(dim): p1 = vert1[ii, :] test = np.sum(vert2 == p1, 1) if not np.any(test == dim): ind = np.setdiff1d(np.arange(dim), np.array([ii])) points = vert1[ind] break points = np.vstack([p, points]) # Vertex points are in points R = Facet(points) R.neighbors.append(fac2) fac2.neighbors.append(R) NV.append(R) # Establish other neighbor relations in NV for ii in range(len(NV)): for jj in range(ii + 1, len(NV)): if is_neighbor(NV[ii], NV[jj], abs_tol=abs_tol): NV[ii].neighbors.append(NV[jj]) NV[jj].neighbors.append(NV[ii]) # Assign unassigned points to facets in NV, # and add facets to F or Forg for fac1 in NV: if unassigned_points is None: Forg.append(fac1) continue npt = np.shape(unassigned_points)[0] keep_list = np.ones(npt, dtype=int) for ii in range(npt): if npt == 1: pu = unassigned_points else: pu = unassigned_points[ii, :] d = distance(pu, fac1) if d > abs_tol: op = Outside_point(pu.flatten(), d) fac1.outside.append(op) keep_list[ii] = 0 if len(fac1.outside) > 0: F.append(fac1) Forg.append(fac1) else: Forg.append(fac1) ind = np.nonzero(keep_list) if len(ind[0]) > 0: unassigned_points = unassigned_points[ind[0], :] else: unassigned_points = None # Delete facets in V, and neighbor references for fac1 in V: for fac2 in fac1.neighbors: fac2.neighbors.remove(fac1) if fac1 in F: F.remove(fac1) Forg.remove(fac1) fac1.neighbors = [] V = [] num = len(Forg) A = np.zeros([num, dim]) b = np.zeros([num, 1]) vert = np.zeros([num * dim, dim]) for ii in range(num): vert[np.ix_(range(ii * dim, (ii + 1) * dim)), :] = Forg[ii].vertices + xc A[ii, :] = Forg[ii].normal.flatten() b[ii] = Forg[ii].distance vert = np.unique( vert.view([('', vert.dtype)] * vert.shape[1])).view( vert.dtype).reshape(-1, vert.shape[1]) b = b.flatten() + np.dot(A, xc.flatten()) return A, b.flatten(), vert
13,137
35.494444
78
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/setup.py
from setuptools import setup setup( name="ds_gamma_mltool", version="0.0.1", description="ML processes for W->DS+Gamma", author="Evelin Bakos", packages=['ml_tool'], package_data={'ml_tool': ["default_model.json"]}, entry_points={ 'console_scripts': [ 'ml_tool=ml_tool:main' ] }, install_requires=[ "numpy", "matplotlib", "tensorflow", "uproot", "scikit-learn", "pandas", "seaborn" ], zip_safe=False )
531
19.461538
53
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/__main__.py
import sys import traceback import argparse from datetime import datetime from pathlib import Path from .dataset import DataSet, BackgroundMode def parse_arguments(args) -> argparse.Namespace: parser = argparse.ArgumentParser(prog='ml_tool') subparsers = parser.add_subparsers(help='This tool has several modes.', dest="subtool") train_parser = subparsers.add_parser("train", help="Train ML models") train_parser.add_argument("-d", "--data-directory", type=str, default="data", help="Where to load data files from") train_parser.add_argument("-m", "--model-directory", type=str, default="models", help="Where to store model files") train_parser.add_argument("-j", "--config-file", type=str, default="default_model.json", help="json file with config options") ##plot... plot_parser = subparsers.add_parser("plot", help="Plot ML models") plot_parser.add_argument("-d", "--data-directory", type=str, default="data", help="Where to load data files from") plot_parser.add_argument("-m", "--model-directory", type=str, default="models", help="Where to load model files") plot_parser.add_argument("-p", "--plot-directory", type=str, default="plots", help="Where to store plot images") plot_parser.add_argument("-i", "--images", action="store_true", default=False, help="Run with convolutional images.") group2 = plot_parser.add_mutually_exclusive_group() group2.add_argument("--test-qq", action='store_true', help="Test on qq only") group2.add_argument("--test-gg", action='store_true', help="Test on gg only") ##compariplot... compariplot = subparsers.add_parser("compariplot", help="Make an overview of models as a seaborn swarmplot") compariplot.add_argument("-m", "--model-directory", type=str, default="models", help="Where to load model files") compariplot.add_argument("-p", "--plot-directory", type=str, default="plots", help="Where to store plot images") compariplot.add_argument("-r", "--range", type=float, nargs=2, default=None, help="Y-axis range") compariplot.add_argument("-c", "--constraint", action='append', type=str, nargs=2, help="constraints on variables") compariplot.add_argument("category", type=str, help="Category for the X axis") compariplot.add_argument("variable", type=str, help="Variable out of metadata which to put on the Y axis") compariplot.add_argument("-o", "--color-category", type=str, help="colour of points category") compariplot.add_argument("-f", "--filename", type=str, default="", help="output plot filename") compariplot.add_argument("-s", "--markersize", type=float, default=3, help="markersize") ##tabulate tabulate_parser = subparsers.add_parser("tabulate", help="Tabulate ML models") tabulate_parser.add_argument("-m", "--model-directory", type=str, default="models", help="Where to load model files") tabulate_parser.add_argument("variable", type=str, help="Variable name") ##correlate correlate_parser = subparsers.add_parser("correlate", help="Correlate 2 ML models") correlate_parser.add_argument("-d", "--data-directory", type=str, default="data", help="Where to load data files from") correlate_parser.add_argument("-m1", "--model1", type=str, help="Model 1") correlate_parser.add_argument("-m2", "--model2", type=str, help="Model 2") ##reevaluate reevaluate_parser = subparsers.add_parser("reevaluate", help="Re-evaluate ML models") reevaluate_parser.add_argument("-d", "--data-directory", type=str, default="data", help="Where to load data files from") reevaluate_parser.add_argument("-m", "--model", type=str, help="Model") group3 = reevaluate_parser.add_mutually_exclusive_group() group3.add_argument("--train-qq", action='store_true', help="Train on qq only") group3.add_argument("--train-gg", action='store_true', help="Train on gg only") group4 = reevaluate_parser.add_mutually_exclusive_group() group4.add_argument("--test-qq", action='store_true', help="Test on qq only") group4.add_argument("--test-gg", action='store_true', help="Test on gg only") return parser, parser.parse_args(args) def command(args): parser, arguments = parse_arguments(args) if not arguments: parser.print_help() return 1 ##train models if arguments.subtool == 'train': from .trainer import train from .config import get_configs import tensorflow as tf gpus = tf.config.experimental.list_physical_devices('GPU') tf.config.experimental.set_memory_growth(gpus[0], True) dataset = DataSet(arguments.data_directory, BackgroundMode.Mixed, BackgroundMode.Mixed) start = datetime.now() num_runs = sum(1 for x in get_configs(arguments.config_file)) ##get config file: i = 0 iterable = iter(get_configs(arguments.config_file)) config = next(iterable) while True: try: while True: train_mode = BackgroundMode.Mixed if config['train_qq']: train_mode = BackgroundMode.QQOnly elif config['train_gg']: train_mode = BackgroundMode.GGOnly test_mode = BackgroundMode.Mixed if config['test_qq']: test_mode = BackgroundMode.QQOnly elif config['test_gg']: test_mode = BackgroundMode.GGOnly keys = DataSet.nominal_keys.copy() if config['run_options'] == 'conv_only' or config['run_options'] == 'combi': keys.append('jet_image') dataset.train_mode = train_mode dataset.test_mode = test_mode dataset.reset_keys(keys) try: train(dataset, arguments.model_directory, config) except KeyboardInterrupt as e: raise e except: with open(f"{arguments.model_directory}/{i}.log", "w") as f: f.write(traceback.format_exc()) print(f"Model {i} failed to train, exception logged to file {arguments.model_directory}/{i}.log") # Time projection now = datetime.now() duration = now - start total_duration = duration / (i + 1) * num_runs left_duration = total_duration - duration finished = now + left_duration print(f"{i+1}/{num_runs} done, time elapsed: {duration}, estimated time left: {left_duration}, projected finish by {finished}. ") i += 1 config = next(iterable) except KeyboardInterrupt: del dataset del train import gc gc.collect() tf.keras.backend.clear_session() print("Pausing, do you wish to continue? [y/n].") pauset = datetime.now() while True: a = input(':') if a == 'n': sys.exit(0) if a == 'y': break from .trainer import train dataset = DataSet(arguments.data_directory, BackgroundMode.Mixed, BackgroundMode.Mixed) start -= datetime.now() - pauset ## make table if arguments.subtool == 'tabulate': from .tabulate import tabulate tabulate(arguments.model_directory, arguments.variable) ## correlate if arguments.subtool == 'correlate': from .correlate import correlate dataset = DataSet(arguments.data_directory, BackgroundMode.Mixed, BackgroundMode.Mixed) correlate(Path(arguments.model1), Path(arguments.model2), dataset) ##plot models if arguments.subtool == 'plot': from .plotter import plot train_mode = BackgroundMode.Mixed test_mode = BackgroundMode.Mixed if arguments.test_qq: test_mode = BackgroundMode.QQOnly elif arguments.test_gg: test_mode = BackgroundMode.GGOnly dataset = DataSet(arguments.data_directory, train_mode, test_mode) modeldir = Path(arguments.model_directory).resolve() plotdir = Path(arguments.plot_directory).resolve() plot(modeldir, plotdir, dataset) ##compariplot models if arguments.subtool == "compariplot": from .compariplot import compariplot compariplot( arguments.model_directory, arguments.plot_directory, arguments.range, arguments.constraint, arguments.category, arguments.variable, arguments.color_category, arguments.filename, arguments.markersize ) ##reevaluate models if arguments.subtool == 'reevaluate': from .reevaluate import reevaluate import tensorflow as tf gpus = tf.config.experimental.list_physical_devices('GPU') tf.config.experimental.set_memory_growth(gpus[0], True) train_mode = BackgroundMode.Mixed if arguments.train_qq: train_mode = BackgroundMode.QQOnly elif arguments.train_gg: train_mode = BackgroundMode.GGOnly test_mode = BackgroundMode.Mixed if arguments.test_qq: test_mode = BackgroundMode.QQOnly elif arguments.test_gg: test_mode = BackgroundMode.GGOnly dataset = DataSet(arguments.data_directory, train_mode, test_mode) reevaluate(Path(arguments.model), dataset) def main(): return command(sys.argv[1:]) if __name__ == "__main__": main()
9,864
43.638009
149
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/correlate.py
from .model import Model from .dataset import DataSet import numpy as np def correlate(model_1, model_2, dataset: DataSet): model1 = Model.load(model_1) model2 = Model.load(model_2) # Model1 prediction dataset.reset_keys(model1.metadata["keys"]) test_x1, test_y1 = dataset.test_data() pred_y1 = model1.model.predict(test_x1).flatten() # Model2 prediction dataset.reset_keys(model2.metadata["keys"]) test_x2, test_y2 = dataset.test_data() pred_y2 = model2.model.predict(test_x2).flatten() assert np.all(test_y1 == test_y2) # true-pos1 true-pos2 num_tp_tp = np.logical_and(test_y1 == 1, np.logical_and(pred_y1 >= 0.5, pred_y2 >= 0.5)).sum() # false-neg1 true-pos2 num_fn_tp = np.logical_and(test_y1 == 1, np.logical_and(pred_y1 < 0.5, pred_y2 >= 0.5)).sum() # true-pos1 false-neg2 num_tp_fn = np.logical_and(test_y1 == 1, np.logical_and(pred_y1 >= 0.5, pred_y2 < 0.5)).sum() # false-neg1 false-neg2 num_fn_fn = np.logical_and(test_y1 == 1, np.logical_and(pred_y1 < 0.5, pred_y2 < 0.5)).sum() # true-neg1 true-neg2 num_tn_tn = np.logical_and(test_y1 == 0, np.logical_and(pred_y1 < 0.5, pred_y2 < 0.5)).sum() # false-pos1 true-neg2 num_fp_tn = np.logical_and(test_y1 == 0, np.logical_and(pred_y1 >= 0.5, pred_y2 < 0.5)).sum() # true-neg1 false-pos2 num_tn_fp = np.logical_and(test_y1 == 0, np.logical_and(pred_y1 < 0.5, pred_y2 >= 0.5)).sum() # false-pos1 false-pos2 num_fp_fp = np.logical_and(test_y1 == 0, np.logical_and(pred_y1 >= 0.5, pred_y2 >= 0.5)).sum() print("true-pos-1 true-pos-2", num_tp_tp) print("false-neg-1 true-pos-2", num_fn_tp) print("true-pos-1 false-neg-2", num_tp_fn) print("false-neg-1 false-neg-2", num_fn_fn) print("true-neg-1 true-neg-2", num_tn_tn) print("false-pos-1 true-neg-2", num_fp_tn) print("true-neg-1 false-pos-2", num_tn_fp) print("false-pos-1 false-pos-2", num_fp_fp) print("total ev", test_y1.shape[0]) print("percentages") print("true-pos-1 true-pos-2", num_tp_tp / float(test_y1.shape[0]) * 100) print("false-neg-1 true-pos-2", num_fn_tp / float(test_y1.shape[0]) * 100) print("true-pos-1 false-neg-2", num_tp_fn / float(test_y1.shape[0]) * 100) print("false-neg-1 false-neg-2", num_fn_fn / float(test_y1.shape[0]) * 100) print("true-neg-1 true-neg-2", num_tn_tn / float(test_y1.shape[0]) * 100) print("false-pos-1 true-neg-2", num_fp_tn / float(test_y1.shape[0]) * 100) print("true-neg-1 false-pos-2", num_tn_fp / float(test_y1.shape[0]) * 100) print("false-pos-1 false-pos-2", num_fp_fp / float(test_y1.shape[0]) * 100) print("Models agree correctly:", (num_tp_tp + num_tn_tn) / float(test_y1.shape[0]) * 100) print("Models agree incorrectly:", (num_fp_fp + num_fn_fn) / float(test_y1.shape[0]) * 100) print("Model 1 is right, 2 is wrong:", (num_tp_fn + num_tn_fp) / float(test_y1.shape[0]) * 100) print("Model 2 is right, 1 is wrong:", (num_fn_tp + num_fp_tn) / float(test_y1.shape[0]) * 100)
3,050
39.68
99
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/tabulate.py
from .model import Model def tabulate(model_directory, variable): models = Model.load_multi_meta_only(model_directory) with open("table.csv", "w") as f: for model in models: print(model.name + ', ' + str(model.metadata[variable]), file=f)
269
29
76
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/designer.py
from tensorflow.keras import layers from tensorflow.keras import models import tensorflow as tf from tensorflow import keras from .model import Model from .config import * ## Here define models: # dense models def create_dense_layers(config): return_layers = [] return_layers.append(layers.Dense(config['layer1_nodes'], activation=config['layer1_activation'])) if config['layer1_dropout']: return_layers.append(layers.Dropout(config['layer1_dropout_nodes'])) if config['layer2']: return_layers.append(layers.Dense(config['layer2_nodes'], activation=config['layer2_activation'])) if config['layer2_dropout']: return_layers.append(layers.Dropout(config['layer2_dropout_nodes'])) if config['layer3']: return_layers.append(layers.Dense(config['layer3_nodes'], activation=config['layer3_activation'])) if config['layer3_dropout']: return_layers.append(layers.Dropout(config['layer3_dropout_nodes'])) if config['run_options'] == "dense_only": return_layers.append(layers.Dense(1, activation=config['layer_output_activation'])) return return_layers #This is needed to create model from the layers above def create_model(name, prepped_layers, input_size): all_layers = [layers.InputLayer(input_shape=(input_size,))] + prepped_layers model = keras.Sequential(all_layers) model.compile(loss="binary_crossentropy", optimizer="adam", metrics=["accuracy"]) return Model(model, name=name, metadata={}) # Convolutional only def create_conv_layers(config): return_layers = [] param1 = config['conv_layer1_nodes'] return_layers.append(layers.Conv2D(param1[0], (param1[1], param1[2]), activation = config['conv_layer1_activation'], padding="same")) if config['conv_layer1_maxpooling']: return_layers.append(layers.MaxPooling2D()) if config['conv_layer2']: param2 = config['conv_layer2_nodes'] return_layers.append(layers.Conv2D(param2[0], (param2[1], param2[2]), activation = config['conv_layer2_activation'], padding="same")) if config['conv_layer2_maxpooling']: return_layers.append(layers.MaxPooling2D()) if config['conv_layer3']: param3 = config['conv_layer3_nodes'] return_layers.append(layers.Conv2D(param3[0], (param3[1], param3[2]), activation = config['conv_layer3_activation'], padding="same")) if config['conv_layer3_maxpooling']: return_layers.append(layers.MaxPooling2D()) return_layers.append(layers.Flatten()) # Dense layers to finish the convoutional model: if config['conv_dense']: return_layers.append(layers.Dense(config['conv_denselayer_nodes'], activation=config['conv_denselayer_activation'])) if config['run_options'] == 'conv_only': return_layers.append(layers.Dense(1, config['conv_output_activation'])) return return_layers #This is needed to create model from the layers above def create_conv_model(name, prepped_layers, conv_input_shape): all_layers = [layers.InputLayer(input_shape=conv_input_shape)] + prepped_layers model = keras.Sequential(all_layers) model.compile(loss="binary_crossentropy", optimizer="adam", metrics=["accuracy"]) return Model(model, name=name, metadata={}) # convolutional + dense def create_conv_plus_dense_model(config, dense_input_shape, conv_input_shape, dense_layers, conv_layers): #dense layers final_dense_layers = [layers.InputLayer(input_shape=dense_input_shape)] + dense_layers dense = keras.Sequential(final_dense_layers) #convolutional layers final_conv_layers = [layers.InputLayer(input_shape=conv_input_shape)] + conv_layers conv = keras.Sequential(final_conv_layers) combined = layers.concatenate((dense.output, conv.output)) x = layers.Dense(config['comb_denselayer_nodes'], activation=config['comb_denselayer_activation'])(combined) x = layers.Dense(1, activation=config['comb_output_activation'])(x) model = models.Model(inputs=[dense.input, conv.input], outputs=x) model.compile(loss="binary_crossentropy", optimizer="adam", metrics=["accuracy"]) return Model(model, name=config['model_name'], metadata={})
4,161
46.83908
141
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/model.py
from dataclasses import dataclass from typing import Dict, List, Union from pathlib import Path import json from tensorflow import keras @dataclass class Model: model: keras.Model name: str metadata: Dict[str, Union[str, int, bool, list]] def save(self, directory) -> None: self.model.save(str(Path(directory).resolve() / self.name)) (Path(directory).resolve() / self.name / ".custom.metadata.json").write_text(json.dumps({ "name": self.name, "metadata": self.metadata })) @classmethod def load(cls, file) -> 'Model': return cls( model=keras.models.load_model(str(file)), **json.loads((file / ".custom.metadata.json").read_text()) ) @classmethod def load_multi(cls, directory) -> List['Model']: return [cls.load(file) for file in Path(directory).resolve().glob("*") if file.is_dir() and (file / ".custom.metadata.json").exists()] @classmethod def load_multi_meta_only(cls, directory) -> List['Model']: return [Model( model=None, **json.loads((file / ".custom.metadata.json").read_text()) ) for file in Path(directory).resolve().glob("*") if file.is_dir() and (file / ".custom.metadata.json").exists()]
1,289
30.463415
143
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/dataset.py
from enum import Enum, auto from pathlib import Path from sklearn.model_selection import train_test_split from sklearn.utils import shuffle import numpy as np import uproot class BackgroundMode(Enum): Mixed = auto() QQOnly = auto() GGOnly = auto() class DataSet: nominal_keys = [ 'delta_eta', 'delta_phi', 'n_neutral', 'n_charged', 'charge', 'invariant_mass', 'btag', 'e_had_over_e_em', 'tau_0', 'tau_1', 'tau_2', 'abs_qj', 'r_em', 'r_track', 'f_em', 'p_core_1', 'p_core_2', 'f_core_1', 'f_core_2', 'f_core_3', 'pt_d_square', 'les_houches_angularity', 'width', 'mass', 'track_magnitude' ] @staticmethod def transform(keys, data): keys = keys.copy() if 'jet_image' in keys: jet_img_data = data['jet_image'].array(library='np')[:80000] keys.remove('jet_image') if not keys: return jet_img_data otherdata = np.array(data.arrays(keys, library='np', how=tuple)).T[:80000] return otherdata, jet_img_data return np.array(data.arrays(keys, library='np', how=tuple)).T[:80000] def __init__(self, directory, train_mode=BackgroundMode.Mixed, test_mode=BackgroundMode.Mixed, keys=nominal_keys): path = Path(directory).resolve() self.gg_up = uproot.open(str(path / 'gg_ntuples.root') + ":DS") self.qq_up = uproot.open(str(path / 'qq_ntuples.root') + ":DS") self.wp_up = uproot.open(str(path / 'wp_ntuples.root') + ":DS") self.wm_up = uproot.open(str(path / 'wm_ntuples.root') + ":DS") self.train_mode = train_mode self.test_mode = test_mode self.reset_keys(keys) def _preload_data(self): self.gg_data = self.transform(self._keys, self.gg_up) self.qq_data = self.transform(self._keys, self.qq_up) self.wp_data = self.transform(self._keys, self.wp_up) self.wm_data = self.transform(self._keys, self.wm_up) (self.gg_data_train, self.gg_data_test), (self.qq_data_train, self.qq_data_test), \ (self.wp_data_train, self.wp_data_test), (self.wm_data_train, self.wm_data_test) = (\ train_test_split( ds, test_size=0.3, shuffle=True, random_state=0 ) for ds in (self.gg_data, self.qq_data, self.wp_data, self.wm_data) ) def _preload_data_with_image(self): self.gg_data, self.gg_images = self.transform(self._keys, self.gg_up) self.qq_data, self.qq_images = self.transform(self._keys, self.qq_up) self.wp_data, self.wp_images = self.transform(self._keys, self.wp_up) self.wm_data, self.wm_images = self.transform(self._keys, self.wm_up) (self.gg_data_train, self.gg_data_test, self.gg_image_train, self.gg_image_test), \ (self.qq_data_train, self.qq_data_test, self.qq_image_train, self.qq_image_test), \ (self.wp_data_train, self.wp_data_test, self.wp_image_train, self.wp_image_test), \ (self.wm_data_train, self.wm_data_test, self.wm_image_train, self.wm_image_test) = (\ train_test_split( ds, img, test_size=0.3, shuffle=True, random_state=0 ) for ds, img in ( (self.gg_data, self.gg_images), (self.qq_data, self.qq_images), (self.wp_data, self.wp_images), (self.wm_data, self.wm_images) ) ) def reset_keys(self, keys): self._keys = keys.copy() if 'jet_image' in self._keys and len(self._keys) > 1: self._preload_data_with_image() else: self._preload_data() def keys(self): return self._keys def image_dimensions(self): return self.qq_images[0].shape def _dataset(self, bkgs, sigs): data = np.concatenate(list(bkgs) + list(sigs), axis=0) labels =np.concatenate(( np.zeros((sum((len(bkg) for bkg in bkgs))),), np.ones((sum((len(sig) for sig in sigs))),), ), axis=0) return shuffle(data, labels, random_state=0) def _dataset_withimage(self, bkgs, image_bkgs, sigs, image_sigs): data = np.concatenate(list(bkgs) + list(sigs), axis=0) images = np.concatenate(list(image_bkgs) + list(image_sigs), axis=0) labels =np.concatenate(( np.zeros((sum((len(bkg) for bkg in bkgs))),), np.ones((sum((len(sig) for sig in sigs))),), ), axis=0) ds_d, ds_i, ds_l = shuffle(data, images, labels, random_state=0) return [ds_d, ds_i], ds_l def train_data(self): if 'jet_image' in self._keys and len(self._keys) > 1: if self.train_mode == BackgroundMode.Mixed: return self._dataset_withimage( bkgs=[self.gg_data_train, self.qq_data_train], image_bkgs=[self.gg_image_train, self.qq_image_train], sigs=[self.wp_data_train, self.wm_data_train], image_sigs=[self.wp_image_train, self.wm_image_train] ) elif self.train_mode == BackgroundMode.GGOnly: return self._dataset_withimage( bkgs=[self.gg_data_train], image_bkgs=[self.gg_image_train], sigs=[self.wp_data_train, self.wm_data_train], image_sigs=[self.wp_image_train, self.wm_image_train] ) elif self.train_mode == BackgroundMode.QQOnly: return self._dataset_withimage( bkgs=[self.qq_data_train], image_bkgs=[self.qq_image_train], sigs=[self.wp_data_train, self.wm_data_train], image_sigs=[self.wp_image_train, self.wm_image_train] ) if self.train_mode == BackgroundMode.Mixed: return self._dataset( bkgs=[self.gg_data_train, self.qq_data_train], sigs=[self.wp_data_train, self.wm_data_train] ) elif self.train_mode == BackgroundMode.GGOnly: return self._dataset( bkgs=[self.gg_data_train], sigs=[self.wp_data_train, self.wm_data_train] ) elif self.train_mode == BackgroundMode.QQOnly: return self._dataset( bkgs=[self.self.qq_data_train], sigs=[self.wp_data_train, self.wm_data_train] ) else: raise Exception("Invalid train mode") def test_data(self): if 'jet_image' in self._keys and len(self._keys) > 1: if self.test_mode == BackgroundMode.Mixed: return self._dataset_withimage( bkgs=[self.gg_data_test, self.qq_data_test], image_bkgs=[self.gg_image_test, self.qq_image_test], sigs=[self.wp_data_test, self.wm_data_test], image_sigs=[self.wp_image_test, self.wm_image_test] ) elif self.test_mode == BackgroundMode.GGOnly: return self._dataset_withimage( bkgs=[self.gg_data_test], image_bkgs=[self.gg_image_test], sigs=[self.wp_data_test, self.wm_data_test], image_sigs=[self.wp_image_test, self.wm_image_test] ) elif self.test_mode == BackgroundMode.QQOnly: return self._dataset_withimage( bkgs=[self.qq_data_test], image_bkgs=[self.qq_image_test], sigs=[self.wp_data_test, self.wm_data_test], image_sigs=[self.wp_image_test, self.wm_image_test] ) if self.test_mode == BackgroundMode.Mixed: return self._dataset( bkgs=[self.gg_data_test, self.qq_data_test], sigs=[self.wp_data_test, self.wm_data_test] ) elif self.test_mode == BackgroundMode.GGOnly: return self._dataset( bkgs=[self.gg_data_test], sigs=[self.wp_data_test, self.wm_data_test] ) elif self.test_mode == BackgroundMode.QQOnly: return self._dataset( bkgs=[self.self.qq_data_test], sigs=[self.wp_data_test, self.wm_data_test] ) else: raise Exception("Invalid test mode")
8,531
41.447761
118
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/compariplot.py
from .model import Model import pandas as pd import seaborn as sns import json import numpy as np from matplotlib.ticker import FuncFormatter sns.set_theme(style="whitegrid", palette="colorblind") def compariplot(model_directory, plot_directory, yrange, constraints, category, variable, col_category, filename, markersize): models = Model.load_multi_meta_only(model_directory) if type(models[0].metadata["config"][category]) in [list, dict]: conv = lambda x: str(x) else: conv = lambda x: x vari_data = np.array([model.metadata[variable] for model in models]) cati_data = np.array([conv(model.metadata["config"][category]) for model in models]) if col_category is not None: ccati_data = np.array([str(model.metadata["config"][col_category]) for model in models]) sel = None if constraints: for constraint in constraints: subcati_data = np.array([str(model.metadata["config"][constraint[0]]) for model in models]) sel = (subcati_data == constraint[1]) if sel is None else np.logical_and(sel, subcati_data == constraint[1]) if sel is not None: sel = np.where(sel) vari_data = vari_data[sel] cati_data = cati_data[sel] if col_category is not None: ccati_data = ccati_data[sel] if yrange is not None: sel = np.where(np.logical_and(vari_data > yrange[0], vari_data < yrange[1])) vari_data = vari_data[sel] cati_data = cati_data[sel] if col_category is not None: ccati_data = ccati_data[sel] if variable == "accuracy": variable = "accuracy (%)" data = { variable: vari_data, category: cati_data } try: vals = list(set(cati_data)) try: sorter = lambda x: x[0] dd = [(json.loads(v), v) for v in vals] if dd[0][0] == list: sorter = lambda x: x[0][0] order = list(k[1] for k in sorted(dd, key=sorter)) except: order = list(sorted(vals)) if "softmax" in order: order.remove("softmax") order.append("softmax") except: order = None if col_category is not None: data[col_category] = ccati_data hue_order = list(sorted(set(ccati_data))) if "softmax" in hue_order: hue_order.remove("softmax") hue_order.append("softmax") ds = pd.DataFrame(data) if col_category is not None: ax = sns.swarmplot(data=ds, x=category, y=variable, hue=col_category, size=markersize, order=order, hue_order=hue_order, dodge=True) else: ax = sns.swarmplot(data=ds, x=category, y=variable, size=markersize, order=order) fig = ax.get_figure() if yrange: ax.set_ylim(*yrange) ax.yaxis.set_major_formatter(FuncFormatter(lambda y, _: '{:.1f}'.format(y*100))) sns.despine(top=True, right=True, left=False, bottom=True) fig.tight_layout() filename = filename or f"{variable}_{category}.png" fig.savefig(f"{plot_directory}/{filename}", dpi=600)
3,097
33.043956
140
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/reevaluate.py
from .model import Model from .dataset import DataSet def reevaluate(model, dataset: DataSet): model = Model.load(model) batch_size = model.metadata['batch_size'] epochs = model.metadata['epochs'] dataset.reset_keys(model.metadata['keys']) train_x, train_Y = dataset.train_data() test_x, test_Y = dataset.test_data() loss, accuracy = model.model.evaluate(x=test_x, y=test_Y, batch_size=batch_size, verbose=0) print(f"Pre re-training {model.name}.") print(f" Validation loss: {loss}") print(f" Validation accuracy: {accuracy}") print() for layer in model.model.layers: if hasattr(layer, 'kernel_initializer') and \ hasattr(layer, 'bias_initializer'): print(f"Resetting layer {layer}") weight_initializer = layer.kernel_initializer bias_initializer = layer.bias_initializer old_weights, old_biases = layer.get_weights() layer.set_weights([ weight_initializer(shape=old_weights.shape), bias_initializer(shape=old_biases.shape)]) else: print(f"Not resetting layer {layer}") model.model.fit(x=train_x, y=train_Y, batch_size=batch_size, epochs=epochs, verbose=1) loss, accuracy = model.model.evaluate(x=test_x, y=test_Y, batch_size=batch_size, verbose=0) print(f"Post re-training {model.name}.") print(f" Validation loss: {loss}") print(f" Validation accuracy: {accuracy}") print()
1,505
34.857143
95
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/config.py
import itertools from pathlib import Path import itertools import json from typing import Type def get_configs(json_file_name): #this function returns the temporary configurations with open(Path(__file__).resolve().parent / 'default_model.json') as f_def: temp_config = json.load(f_def) if json_file_name != 'default_model.json': with open(json_file_name) as f: for k,v in json.load(f).items(): if k not in temp_config: raise ValueError(f"'{k}' as specified in your config {json_file_name} is not a valid config parameter.") temp_config[k] = v list_fields = ["excluded_keys", "conv_layer1_nodes", "conv_layer2_nodes", "conv_layer3_nodes"] noncombinators = {} combinators = {} for k, v in temp_config.items(): if k in list_fields: if type(v[0]) == list: # 2D combinators[k] = v else: noncombinators[k] = v else: if type(v) == list: # 1D combinators[k] = v else: noncombinators[k] = v for i, combination in enumerate(itertools.product(*list(combinators.values()))): config = noncombinators.copy() config["model_name"] = f"{config['model_name']}_{i}" for key, value in zip(combinators.keys(), combination): config[key] = value yield config
1,460
31.466667
124
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/__init__.py
import os # Need to do this so windows doesn't freak out when tenserflow loads the libs for dir in os.environ.get("PATH").split(os.pathsep): try: os.add_dll_directory(dir) except: pass from .__main__ import main
237
22.8
77
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/plotter.py
from .model import Model from .dataset import DataSet import matplotlib.pyplot as plt from matplotlib.patches import Rectangle from sklearn.metrics import roc_curve import numpy as np def plot(model_directory, plot_directory, dataset: DataSet): fig, ax = plt.subplots() ax.plot([0, 1], [0, 1], 'k--') models = Model.load_multi(model_directory) for model in models: dataset.reset_keys(model.metadata["keys"]) test_x, test_y = dataset.test_data() bsel = np.where(test_y == 0) ssel = np.where(test_y == 1) if type(test_x) == list: bkgdata = [test_x[0][bsel], test_x[1][bsel]] sigdata = [test_x[0][ssel], test_x[1][ssel]] else: bkgdata = test_x[bsel] sigdata = test_x[ssel] score = model.model.evaluate(x=test_x, y=test_y, batch_size=model.metadata['batch_size'], verbose=0) # ROC draw y_pred = model.model.predict(test_x) fpr, tpr, thr = roc_curve(test_y, y_pred) ax.plot(fpr, tpr, label=f"{model.name} {score[1]*100:.2f}% {score[0]:.3f}") # NNout fig1, ax1 = plt.subplots() bkgpred = model.model.predict(bkgdata) sigpred = model.model.predict(sigdata) ax1.set_xlabel('NNout') ax1.set_ylabel('#entries') r1 = Rectangle((0,0), 1, 1, fill=False, edgecolor='none', visible=False) ax1.hist(bkgpred, bins=20, range=(0,1), color='blue', histtype='bar', label='Background') ax1.hist(sigpred, bins=20, range=(0,1), color='red', histtype='bar', label='Signal') ax1.legend([r1], [model.name], loc=1) fig1.tight_layout() fig1.savefig(str(plot_directory / f"{model.name}_nnout.png")) fig1.clf() ax.set_xlabel('False positive rate') ax.set_ylabel('True positive rate') ax.set_title(f'ROC curves') ax.set_ylim(0.8, 1) ax.set_xlim(0, 0.2) ax.legend(loc='best') fig.savefig(str(plot_directory / "roc_curves_zoomed.png")) ax.set_ylim(0, 1) ax.set_xlim(0, 1) ax.legend(loc='best') fig.savefig(str(plot_directory / "roc_curves.png")) fig.clf()
2,138
32.421875
108
py
DsGammaAnalysis
DsGammaAnalysis-main/ml_tool/ml_tool/trainer.py
from .model import Model from .dataset import DataSet from .designer import create_dense_layers, create_model, create_conv_plus_dense_model, create_conv_layers, create_conv_model def trainer(model: Model, dataset: DataSet, config): print(f"Training model {model.name}") train_x, train_Y = dataset.train_data() test_x, test_Y = dataset.test_data() model.model.fit(x=train_x, y=train_Y, batch_size=config['batch_size'], epochs=config['training_epochs'], verbose=0) loss, accuracy = model.model.evaluate(x=test_x, y=test_Y, batch_size=config['batch_size'], verbose=0) print(f"Finished trainig model {model.name}.") print(f" Validation loss: {loss}") print(f" Validation accuracy: {accuracy}") print() model.metadata['loss'] = loss model.metadata['accuracy'] = accuracy model.metadata['batch_size'] = config['batch_size'] model.metadata['epochs'] = config['training_epochs'] model.metadata['keys'] = dataset.keys() model.metadata['config'] = config def train(dataset: DataSet, model_directory, config): ## Dense only if config['run_options'] == 'dense_only': layers = create_dense_layers(config) model = create_model(config['model_name'], layers, len(dataset.keys())) trainer(model, dataset, config) model.save(model_directory) ## Leave one or group out elif config['run_options'] == 'leave_one_out': keys = dataset.keys().copy() #create layers layers = create_dense_layers(config) new_keys = keys.copy() for key in config['excluded_keys']: new_keys.remove(key) dataset.reset_keys(new_keys) model = create_model(f"{config['model_name']}_no_{'_'.join(config['excluded_keys'])}", layers, len(new_keys)) trainer(model, dataset, config) model.save(model_directory) ## run with convolutional only: elif config['run_options'] == 'conv_only': dataset.reset_keys(["jet_image"]) layers = create_conv_layers(config) model = create_conv_model(config['model_name'], layers, dataset.image_dimensions()) trainer(model, dataset,config) model.save(model_directory) ## run with combined model: Dense + Convoltutional elif config['run_options'] == 'combi': dense_layers = create_dense_layers(config) conv_layers = create_conv_layers(config) model = create_conv_plus_dense_model(config, len(dataset.keys()) - 1, dataset.image_dimensions(), dense_layers, conv_layers) trainer(model, dataset, config) model.save(model_directory) else: print('Wrong run options argument!')
2,670
36.619718
132
py
airflow
airflow-main/setup.py
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Setup.py for the Airflow project.""" # To make sure the CI build is using "upgrade to newer dependencies", which is useful when you want to check # if the dependencies are still compatible with the latest versions as they seem to break some unrelated # tests in main, you can modify this file. The modification can be simply modifying this particular comment. # e.g. you can modify the following number "00001" to something else to trigger it. from __future__ import annotations import glob import json import logging import os import subprocess import sys import unittest from copy import deepcopy from os.path import relpath from pathlib import Path from textwrap import wrap from typing import Iterable from setuptools import Command, Distribution, find_namespace_packages, setup from setuptools.command.develop import develop as develop_orig from setuptools.command.install import install as install_orig # Setuptools patches this import to point to a vendored copy instead of the # stdlib, which is deprecated in Python 3.10 and will be removed in 3.12. from distutils import log # isort: skip # Controls whether providers are installed from packages or directly from sources # It is turned on by default in case of development environments such as Breeze # And it is particularly useful when you add a new provider and there is no # PyPI version to install the provider package from INSTALL_PROVIDERS_FROM_SOURCES = "INSTALL_PROVIDERS_FROM_SOURCES" PY39 = sys.version_info >= (3, 9) logger = logging.getLogger(__name__) AIRFLOW_SOURCES_ROOT = Path(__file__).parent.resolve() PROVIDERS_ROOT = AIRFLOW_SOURCES_ROOT / "airflow" / "providers" CROSS_PROVIDERS_DEPS = "cross-providers-deps" DEPS = "deps" CURRENT_PYTHON_VERSION = f"{sys.version_info.major}.{sys.version_info.minor}" # # NOTE! IN Airflow 2.4.+ dependencies for providers are maintained in `provider.yaml` files for each # provider separately. They are loaded here and if you want to modify them, you need to modify # corresponding provider.yaml file. # def fill_provider_dependencies() -> dict[str, dict[str, list[str]]]: # in case we are loading setup from pre-commits, we want to skip the check for python version # because if someone uses a version of Python where providers are excluded, the setup will fail # to see the extras for those providers skip_python_version_check = os.environ.get("_SKIP_PYTHON_VERSION_CHECK") try: with AIRFLOW_SOURCES_ROOT.joinpath("generated", "provider_dependencies.json").open() as f: dependencies = json.load(f) return { key: value for key, value in dependencies.items() if CURRENT_PYTHON_VERSION not in value["excluded-python-versions"] or skip_python_version_check } except Exception as e: print(f"Exception while loading provider dependencies {e}") # we can ignore loading dependencies when they are missing - they are only used to generate # correct extras when packages are build and when we install airflow from sources # (in both cases the provider_dependencies should be present). return {} PROVIDER_DEPENDENCIES = fill_provider_dependencies() def airflow_test_suite() -> unittest.TestSuite: """Test suite for Airflow tests.""" test_loader = unittest.TestLoader() test_suite = test_loader.discover(str(AIRFLOW_SOURCES_ROOT / "tests"), pattern="test_*.py") return test_suite class CleanCommand(Command): """ Command to tidy up the project root. Registered as cmdclass in setup() so it can be called with ``python setup.py extra_clean``. """ description = "Tidy up the project root" user_options: list[str] = [] def initialize_options(self) -> None: """Set default values for options.""" def finalize_options(self) -> None: """Set final values for options.""" @staticmethod def rm_all_files(files: list[str]) -> None: """Remove all files from the list.""" for file in files: try: os.remove(file) except Exception as e: logger.warning("Error when removing %s: %s", file, e) def run(self) -> None: """Remove temporary files and directories.""" os.chdir(str(AIRFLOW_SOURCES_ROOT)) self.rm_all_files(glob.glob("./build/*")) self.rm_all_files(glob.glob("./**/__pycache__/*", recursive=True)) self.rm_all_files(glob.glob("./**/*.pyc", recursive=True)) self.rm_all_files(glob.glob("./dist/*")) self.rm_all_files(glob.glob("./*.egg-info")) self.rm_all_files(glob.glob("./docker-context-files/*.whl")) self.rm_all_files(glob.glob("./docker-context-files/*.tgz")) class CompileAssets(Command): """ Compile and build the frontend assets using yarn and webpack. Registered as cmdclass in setup() so it can be called with ``python setup.py compile_assets``. """ description = "Compile and build the frontend assets" user_options: list[str] = [] def initialize_options(self) -> None: """Set default values for options.""" def finalize_options(self) -> None: """Set final values for options.""" def run(self) -> None: """Run a command to compile and build assets.""" www_dir = AIRFLOW_SOURCES_ROOT / "airflow" / "www" subprocess.check_call(["yarn", "install", "--frozen-lockfile"], cwd=str(www_dir)) subprocess.check_call(["yarn", "run", "build"], cwd=str(www_dir)) class ListExtras(Command): """ List all available extras Registered as cmdclass in setup() so it can be called with ``python setup.py list_extras``. """ description = "List available extras" user_options: list[str] = [] def initialize_options(self) -> None: """Set default values for options.""" def finalize_options(self) -> None: """Set final values for options.""" def run(self) -> None: """List extras.""" print("\n".join(wrap(", ".join(EXTRAS_DEPENDENCIES.keys()), 100))) def git_version() -> str: """ Return a version to identify the state of the underlying git repo. The version will indicate whether the head of the current git-backed working directory is tied to a release tag or not : it will indicate the former with a 'release:{version}' prefix and the latter with a '.dev0' suffix. Following the prefix will be a sha of the current branch head. Finally, a "dirty" suffix is appended to indicate that uncommitted changes are present. :return: Found Airflow version in Git repo """ try: import git try: repo = git.Repo(str(AIRFLOW_SOURCES_ROOT / ".git")) except (git.NoSuchPathError): logger.warning(".git directory not found: Cannot compute the git version") return "" except git.InvalidGitRepositoryError: logger.warning("Invalid .git directory not found: Cannot compute the git version") return "" except ImportError: logger.warning("gitpython not found: Cannot compute the git version.") return "" if repo: sha = repo.head.commit.hexsha if repo.is_dirty(): return f".dev0+{sha}.dirty" # commit is clean return f".release:{sha}" return "no_git_version" def write_version(filename: str = str(AIRFLOW_SOURCES_ROOT / "airflow" / "git_version")) -> None: """ Write the Semver version + git hash to file, e.g. ".dev0+2f635dc265e78db6708f59f68e8009abb92c1e65". :param str filename: Destination file to write. """ text = git_version() with open(filename, "w") as file: file.write(text) # # NOTE! IN Airflow 2.4.+ dependencies for providers are maintained in `provider.yaml` files for each # provider separately. Before, the provider dependencies were kept here. THEY ARE NOT HERE ANYMORE. # # 'Start dependencies group' and 'End dependencies group' are marks for ./scripts/ci/check_order_setup.py # If you change these marks you should also change ./scripts/ci/check_order_setup.py # Start dependencies group async_packages = [ "eventlet>=0.33.3", "gevent>=0.13", "greenlet>=0.4.9", ] atlas = [ "atlasclient>=0.1.2", ] celery = [ # The Celery is known to introduce problems when upgraded to a MAJOR version. Airflow Core # Uses Celery for CeleryExecutor, and we also know that Kubernetes Python client follows SemVer # (https://docs.celeryq.dev/en/stable/contributing.html?highlight=semver#versions). # This is a crucial component of Airflow, so we should limit it to the next MAJOR version and only # deliberately bump the version when we tested it, and we know it can be bumped. # Bumping this version should also be connected with # limiting minimum airflow version supported in celery provider due to the # potential breaking changes in Airflow Core as well (celery is added as extra, so Airflow # core is not hard-limited via install-requires, only by extra). "celery>=5.2.3,<6" ] cgroups = [ # Cgroupspy 0.2.2 added Python 3.10 compatibility "cgroupspy>=0.2.2", ] dask = [ # Dask support is limited, we need Dask team to upgrade support for dask if we were to continue # Supporting it in the future "cloudpickle>=1.4.1", # Dask and distributed in version 2023.5.0 break our tests for Python > 3.7 # See https://github.com/dask/dask/issues/10279 "dask>=2.9.0,!=2022.10.1,!=2023.5.0", "distributed>=2.11.1,!=2023.5.0", ] deprecated_api = [ "requests>=2.26.0", ] doc = [ "astroid>=2.12.3", "checksumdir", # Click 8.1.4 breaks our mypy checks. The upper limit can be lifted when the # https://github.com/apache/airflow/issues/32412 issue is resolved "click>=8.0,<8.1.4", # Docutils 0.17.0 converts generated <div class="section"> into <section> and breaks our doc formatting # By adding a lot of whitespace separation. This limit can be lifted when we update our doc to handle # <section> tags for sections "docutils<0.17.0", "eralchemy2", "sphinx-airflow-theme", "sphinx-argparse>=0.1.13", "sphinx-autoapi>=2.0.0", "sphinx-copybutton", "sphinx-jinja>=2.0", "sphinx-rtd-theme>=0.1.6", "sphinx>=5.2.0", "sphinxcontrib-httpdomain>=1.7.0", "sphinxcontrib-redoc>=1.6.0", "sphinxcontrib-spelling>=7.3", ] doc_gen = [ "eralchemy2", ] flask_appbuilder_oauth = [ "authlib>=1.0.0", # The version here should be upgraded at the same time as flask-appbuilder in setup.cfg "flask-appbuilder[oauth]==4.3.3", ] kerberos = [ "pykerberos>=1.1.13", "requests_kerberos>=0.10.0", "thrift_sasl>=0.2.0", ] kubernetes = [ # The Kubernetes API is known to introduce problems when upgraded to a MAJOR version. Airflow Core # Uses Kubernetes for Kubernetes executor, and we also know that Kubernetes Python client follows SemVer # (https://github.com/kubernetes-client/python#compatibility). This is a crucial component of Airflow # So we should limit it to the next MAJOR version and only deliberately bump the version when we # tested it, and we know it can be bumped. Bumping this version should also be connected with # limiting minimum airflow version supported in cncf.kubernetes provider, due to the # potential breaking changes in Airflow Core as well (kubernetes is added as extra, so Airflow # core is not hard-limited via install-requires, only by extra). "cryptography>=2.0.0", "kubernetes>=21.7.0,<24", ] ldap = [ "ldap3>=2.5.1", "python-ldap", ] leveldb = ["plyvel"] otel = ["opentelemetry-exporter-prometheus"] pandas = ["pandas>=0.17.1", "pyarrow>=9.0.0"] password = [ "bcrypt>=2.0.0", "flask-bcrypt>=0.7.1", ] rabbitmq = [ "amqp", ] sentry = [ "blinker>=1.1", "sentry-sdk>=0.8.0", ] statsd = [ "statsd>=3.3.0", ] virtualenv = [ "virtualenv", ] webhdfs = [ "hdfs[avro,dataframe,kerberos]>=2.0.4", ] # End dependencies group # Mypy 0.900 and above ships only with stubs from stdlib so if we need other stubs, we need to install them # manually as `types-*`. See https://mypy.readthedocs.io/en/stable/running_mypy.html#missing-imports # for details. We want to install them explicitly because we want to eventually move to # mypyd which does not support installing the types dynamically with --install-types mypy_dependencies = [ # TODO: upgrade to newer versions of MyPy continuously as they are released # Make sure to upgrade the mypy version in update-common-sql-api-stubs in .pre-commit-config.yaml # when you upgrade it here !!!! "mypy==1.2.0", "types-boto", "types-certifi", "types-croniter", "types-Deprecated", "types-docutils", "types-paramiko", "types-protobuf", "types-python-dateutil", "types-python-slugify", "types-pytz", "types-redis", "types-requests", "types-setuptools", "types-termcolor", "types-tabulate", "types-toml", "types-Markdown", "types-PyMySQL", "types-PyYAML", ] # Dependencies needed for development only devel_only = [ "aws_xray_sdk", "beautifulsoup4>=4.7.1", "black", "blinker", "bowler", "click>=8.0", "coverage", "filelock", "gitpython", "ipdb", "jira", "jsondiff", "jsonpath_ng>=1.5.3", "mongomock", "moto[cloudformation, glue]>=4.0", "paramiko", "pipdeptree", "pre-commit", "pypsrp", "pygithub", "pytest", "pytest-asyncio", "pytest-capture-warnings", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-rerunfailures", "pytest-timeouts", "pytest-xdist", "python-jose", "pywinrm", "qds-sdk>=1.9.6", "pytest-httpx", "requests_mock", "rich-click>=1.5", "ruff>=0.0.219", "semver", "time-machine", "towncrier", "twine", "wheel", "yamllint", "aioresponses", ] aiobotocore = [ # This required for AWS deferrable operators. # There is conflict between boto3 and aiobotocore dependency botocore. # TODO: We can remove it once boto3 and aiobotocore both have compatible botocore version or # boto3 have native aync support and we move away from aio aiobotocore "aiobotocore>=2.1.1", ] def get_provider_dependencies(provider_name: str) -> list[str]: if provider_name not in PROVIDER_DEPENDENCIES: return [] return PROVIDER_DEPENDENCIES[provider_name][DEPS] def get_unique_dependency_list(req_list_iterable: Iterable[list[str]]): _all_reqs: set[str] = set() for req_list in req_list_iterable: for req in req_list: _all_reqs.add(req) return list(_all_reqs) devel = get_unique_dependency_list( [ aiobotocore, cgroups, devel_only, doc, kubernetes, mypy_dependencies, get_provider_dependencies("mysql"), pandas, password, ] ) devel_hadoop = get_unique_dependency_list( [ devel, get_provider_dependencies("apache.hdfs"), get_provider_dependencies("apache.hive"), get_provider_dependencies("apache.hdfs"), get_provider_dependencies("apache.hive"), get_provider_dependencies("apache.impala"), kerberos, get_provider_dependencies("presto"), webhdfs, ] ) # Those are all additional extras which do not have their own 'providers' # The 'apache.atlas' and 'apache.webhdfs' are extras that provide additional libraries # but they do not have separate providers (yet?), they are merely there to add extra libraries # That can be used in custom python/bash operators. ADDITIONAL_EXTRAS_DEPENDENCIES: dict[str, list[str]] = { "apache.atlas": atlas, "apache.webhdfs": webhdfs, } # Those are extras that are extensions of the 'core' Airflow. They provide additional features # To airflow core. They do not have separate providers because they do not have any operators/hooks etc. CORE_EXTRAS_DEPENDENCIES: dict[str, list[str]] = { "aiobotocore": aiobotocore, "async": async_packages, "celery": celery, "cgroups": cgroups, "cncf.kubernetes": kubernetes, "dask": dask, "deprecated_api": deprecated_api, "github_enterprise": flask_appbuilder_oauth, "google_auth": flask_appbuilder_oauth, "kerberos": kerberos, "ldap": ldap, "leveldb": leveldb, "otel": otel, "pandas": pandas, "password": password, "rabbitmq": rabbitmq, "sentry": sentry, "statsd": statsd, "virtualenv": virtualenv, } def filter_out_excluded_extras() -> Iterable[tuple[str, list[str]]]: for key, value in CORE_EXTRAS_DEPENDENCIES.items(): if value: yield key, value else: print(f"Removing extra {key} as it has been excluded") CORE_EXTRAS_DEPENDENCIES = dict(filter_out_excluded_extras()) EXTRAS_DEPENDENCIES: dict[str, list[str]] = deepcopy(CORE_EXTRAS_DEPENDENCIES) def add_extras_for_all_providers() -> None: for (provider_name, provider_dict) in PROVIDER_DEPENDENCIES.items(): EXTRAS_DEPENDENCIES[provider_name] = provider_dict[DEPS] def add_additional_extras() -> None: for (extra_name, extra_dependencies) in ADDITIONAL_EXTRAS_DEPENDENCIES.items(): EXTRAS_DEPENDENCIES[extra_name] = extra_dependencies add_extras_for_all_providers() add_additional_extras() ############################################################################################################# # The whole section can be removed in Airflow 3.0 as those old aliases are deprecated in 2.* series ############################################################################################################# # Dictionary of aliases from 1.10 - deprecated in Airflow 2.* EXTRAS_DEPRECATED_ALIASES: dict[str, str] = { "atlas": "apache.atlas", "aws": "amazon", "azure": "microsoft.azure", "cassandra": "apache.cassandra", "crypto": "", # this is legacy extra - all dependencies are already "install-requires" "druid": "apache.druid", "gcp": "google", "gcp_api": "google", "hdfs": "apache.hdfs", "hive": "apache.hive", "kubernetes": "cncf.kubernetes", "mssql": "microsoft.mssql", "pinot": "apache.pinot", "qds": "qubole", "s3": "amazon", "spark": "apache.spark", "webhdfs": "apache.webhdfs", "winrm": "microsoft.winrm", } EXTRAS_DEPRECATED_ALIASES_NOT_PROVIDERS: list[str] = [ "crypto", "webhdfs", ] EXTRAS_DEPRECATED_ALIASES_IGNORED_FROM_REF_DOCS: list[str] = [ "jira", ] def add_extras_for_all_deprecated_aliases() -> None: """ Add extras for all deprecated aliases. Requirements for those deprecated aliases are the same as the extras they are replaced with. The dependencies are not copies - those are the same lists as for the new extras. This is intended. Thanks to that if the original extras are later extended with providers, aliases are extended as well. """ for alias, extra in EXTRAS_DEPRECATED_ALIASES.items(): dependencies = EXTRAS_DEPENDENCIES.get(extra) if extra != "" else [] if dependencies is None: continue EXTRAS_DEPENDENCIES[alias] = dependencies def add_all_deprecated_provider_packages() -> None: """ For deprecated aliases that are providers, we will swap the providers dependencies to instead be the provider itself. e.g. {"kubernetes": ["kubernetes>=3.0.0, <12.0.0", ...]} becomes {"kubernetes": ["apache-airflow-provider-cncf-kubernetes"]} """ for alias, provider in EXTRAS_DEPRECATED_ALIASES.items(): if alias in EXTRAS_DEPRECATED_ALIASES_NOT_PROVIDERS: continue replace_extra_dependencies_with_provider_packages(alias, [provider]) add_extras_for_all_deprecated_aliases() ############################################################################################################# # End of deprecated section ############################################################################################################# # This is list of all providers. It's a shortcut for anyone who would like to easily get list of # All providers. It is used by pre-commits. ALL_PROVIDERS = list(PROVIDER_DEPENDENCIES.keys()) ALL_DB_PROVIDERS = [ "apache.cassandra", "apache.drill", "apache.druid", "apache.hdfs", "apache.hive", "apache.impala", "apache.pinot", "arangodb", "cloudant", "databricks", "exasol", "influxdb", "microsoft.mssql", "mongo", "mysql", "neo4j", "postgres", "presto", "trino", "vertica", ] def get_all_db_dependencies() -> list[str]: _all_db_reqs: set[str] = set() for provider in ALL_DB_PROVIDERS: if provider not in PROVIDER_DEPENDENCIES: continue for req in PROVIDER_DEPENDENCIES[provider][DEPS]: _all_db_reqs.add(req) return list(_all_db_reqs) # Special dependencies for all database-related providers. They are de-duplicated. all_dbs = get_all_db_dependencies() # All db user extras here EXTRAS_DEPENDENCIES["all_dbs"] = all_dbs # Requirements for all "user" extras (no devel). They are de-duplicated. Note that we do not need # to separately add providers dependencies - they have been already added as 'providers' extras above _all_dependencies = get_unique_dependency_list(EXTRAS_DEPENDENCIES.values()) _all_dependencies_without_airflow_providers = list( filter(lambda k: "apache-airflow-" not in k, _all_dependencies) ) # All user extras here # all is purely development extra and it should contain only direct dependencies of Airflow # It should contain all dependencies of airflow and dependencies of all community providers, # but not the providers themselves EXTRAS_DEPENDENCIES["all"] = _all_dependencies_without_airflow_providers # This can be simplified to devel_hadoop + _all_dependencies due to inclusions # but we keep it for explicit sake. We are de-duplicating it anyway. devel_all = get_unique_dependency_list( [_all_dependencies_without_airflow_providers, doc, doc_gen, devel, devel_hadoop] ) # Those are packages excluded for "all" dependencies PACKAGES_EXCLUDED_FOR_ALL: list[str] = [] def is_package_excluded(package: str, exclusion_list: list[str]) -> bool: """ Checks if package should be excluded. :param package: package name (beginning of it) :param exclusion_list: list of excluded packages :return: true if package should be excluded """ return any(package.startswith(excluded_package) for excluded_package in exclusion_list) def remove_provider_limits(package: str) -> str: """ Removes the limit for providers in devel_all to account for pre-release and development packages. :param package: package name (beginning of it) :return: true if package should be excluded """ return ( package.split(">=")[0] if package.startswith("apache-airflow-providers") and ">=" in package else package ) devel = [remove_provider_limits(package) for package in devel] devel_all = [ remove_provider_limits(package) for package in devel_all if not is_package_excluded(package=package, exclusion_list=PACKAGES_EXCLUDED_FOR_ALL) ] devel_hadoop = [remove_provider_limits(package) for package in devel_hadoop] devel_ci = devel_all # Those are extras that we have to add for development purposes # They can be use to install some predefined set of dependencies. EXTRAS_DEPENDENCIES["doc"] = doc EXTRAS_DEPENDENCIES["doc_gen"] = doc_gen EXTRAS_DEPENDENCIES["devel"] = devel # devel already includes doc EXTRAS_DEPENDENCIES["devel_hadoop"] = devel_hadoop # devel_hadoop already includes devel EXTRAS_DEPENDENCIES["devel_all"] = devel_all EXTRAS_DEPENDENCIES["devel_ci"] = devel_ci def sort_extras_dependencies() -> dict[str, list[str]]: """ The dictionary order remains when keys() are retrieved. Sort both: extras and list of dependencies to make it easier to analyse problems external packages will be first, then if providers are added they are added at the end of the lists. """ sorted_dependencies: dict[str, list[str]] = {} sorted_extra_ids = sorted(EXTRAS_DEPENDENCIES.keys()) for extra_id in sorted_extra_ids: sorted_dependencies[extra_id] = sorted(EXTRAS_DEPENDENCIES[extra_id]) return sorted_dependencies EXTRAS_DEPENDENCIES = sort_extras_dependencies() # Those providers are pre-installed always when airflow is installed. # Those providers do not have dependency on airflow2.0 because that would lead to circular dependencies. # This is not a problem for PIP but some tools (pipdeptree) show those as a warning. PREINSTALLED_PROVIDERS = [ # TODO: When we release 3.3.0 version of celery provider we should change it to "celery>=3.3.0" here # In order to make sure executors are available in the celery provider "celery", "common.sql", "ftp", "http", "imap", "sqlite", ] def get_provider_package_name_from_package_id(package_id: str) -> str: """ Builds the name of provider package out of the package id provided/. :param package_id: id of the package (like amazon or microsoft.azure) :return: full name of package in PyPI """ version_spec = "" if ">=" in package_id: package, version = package_id.split(">=") version_spec = f">={version}" version_suffix = os.environ.get("VERSION_SUFFIX_FOR_PYPI") if version_suffix: version_spec += version_suffix else: package = package_id package_suffix = package.replace(".", "-") return f"apache-airflow-providers-{package_suffix}{version_spec}" def get_excluded_providers() -> list[str]: """Returns packages excluded for the current python version.""" return [] def get_all_provider_packages() -> str: """Returns all provider packages configured in setup.py.""" excluded_providers = get_excluded_providers() return " ".join( get_provider_package_name_from_package_id(package) for package in ALL_PROVIDERS if package not in excluded_providers ) class AirflowDistribution(Distribution): """The setuptools.Distribution subclass with Airflow specific behaviour.""" def __init__(self, attrs=None): super().__init__(attrs) self.install_requires = None def parse_config_files(self, *args, **kwargs) -> None: """ Ensure that when we have been asked to install providers from sources that we don't *also* try to install those providers from PyPI. Also we should make sure that in this case we copy provider.yaml files so that Providers manager can find package information. """ super().parse_config_files(*args, **kwargs) if os.getenv(INSTALL_PROVIDERS_FROM_SOURCES) == "true": self.install_requires = [ req for req in self.install_requires if not req.startswith("apache-airflow-providers-") ] provider_yaml_files = glob.glob("airflow/providers/**/provider.yaml", recursive=True) for provider_yaml_file in provider_yaml_files: provider_relative_path = relpath(provider_yaml_file, str(AIRFLOW_SOURCES_ROOT / "airflow")) self.package_data["airflow"].append(provider_relative_path) else: self.install_requires.extend( [ get_provider_package_name_from_package_id(package_id) for package_id in PREINSTALLED_PROVIDERS ] ) def replace_extra_dependencies_with_provider_packages(extra: str, providers: list[str]) -> None: """ Replaces extra dependencies with provider package. The intention here is that when the provider is added as dependency of extra, there is no need to add the dependencies separately. This is not needed and even harmful, because in case of future versions of the provider, the dependencies might change, so hard-coding dependencies from the version that was available at the release time might cause dependency conflicts in the future. Say for example that you have salesforce provider with those deps: { 'salesforce': ['simple-salesforce>=1.0.0', 'tableauserverclient'] } Initially ['salesforce'] extra has those dependencies, and it works like that when you install it when INSTALL_PROVIDERS_FROM_SOURCES is set to `true` (during the development). However, when the production installation is used, The dependencies are changed: { 'salesforce': ['apache-airflow-providers-salesforce'] } And then, 'apache-airflow-providers-salesforce' package has those 'install_requires' dependencies: ['simple-salesforce>=1.0.0', 'tableauserverclient'] So transitively 'salesforce' extra has all the dependencies it needs and in case the provider changes its dependencies, they will transitively change as well. In the constraint mechanism we save both - provider versions and it's dependencies version, which means that installation using constraints is repeatable. For K8s and Celery which are both "Core executors" and "Providers" we have to add the base dependencies to core as well, in order to mitigate problems where newer version of provider will have less strict limits. This should be done for both extras and their deprecated aliases. This is not a full protection however, the way extras work, this will not add "hard" limits for Airflow and the user who does not use constraints. :param extra: Name of the extra to add providers to :param providers: list of provider ids """ if extra in ["cncf.kubernetes", "kubernetes", "celery"]: EXTRAS_DEPENDENCIES[extra].extend( [get_provider_package_name_from_package_id(package_name) for package_name in providers] ) elif extra == "apache.hive": # We moved the hive macros to the hive provider, and they are available in hive provider only as of # 5.1.0 version only, so we have to make sure minimum version is used EXTRAS_DEPENDENCIES[extra] = ["apache-airflow-providers-apache-hive>=5.1.0"] else: EXTRAS_DEPENDENCIES[extra] = [ get_provider_package_name_from_package_id(package_name) for package_name in providers ] def add_provider_packages_to_extra_dependencies(extra: str, providers: list[str]) -> None: """ Adds provider packages as dependencies to extra. This is used to add provider packages as dependencies to the "bulk" kind of extras. Those bulk extras do not have the detailed 'extra' dependencies as initial values, so instead of replacing them (see previous function) we can extend them. :param extra: Name of the extra to add providers to :param providers: list of provider ids """ EXTRAS_DEPENDENCIES[extra].extend( [get_provider_package_name_from_package_id(package_name) for package_name in providers] ) def add_all_provider_packages() -> None: """ In case of regular installation (providers installed from packages), we should add extra dependencies to Airflow - to get the providers automatically installed when those extras are installed. For providers installed from sources we skip that step. That helps to test and install airflow with all packages in CI - for example when new providers are added, otherwise the installation would fail as the new provider is not yet in PyPI. """ for provider_id in ALL_PROVIDERS: replace_extra_dependencies_with_provider_packages(provider_id, [provider_id]) add_provider_packages_to_extra_dependencies("all", ALL_PROVIDERS) add_provider_packages_to_extra_dependencies("devel_ci", ALL_PROVIDERS) add_provider_packages_to_extra_dependencies("devel_all", ALL_PROVIDERS) add_provider_packages_to_extra_dependencies("all_dbs", ALL_DB_PROVIDERS) add_provider_packages_to_extra_dependencies( "devel_hadoop", ["apache.hdfs", "apache.hive", "presto", "trino"] ) add_all_deprecated_provider_packages() class Develop(develop_orig): """Forces removal of providers in editable mode.""" def run(self) -> None: # type: ignore self.announce("Installing in editable mode. Uninstalling provider packages!", level=log.INFO) # We need to run "python3 -m pip" because it might be that older PIP binary is in the path # And it results with an error when running pip directly (cannot import pip module) # also PIP does not have a stable API so we have to run subprocesses ¯\_(ツ)_/¯ try: installed_packages = ( subprocess.check_output(["python3", "-m", "pip", "freeze"]).decode().splitlines() ) airflow_provider_packages = [ package_line.split("=")[0] for package_line in installed_packages if package_line.startswith("apache-airflow-providers") ] self.announce(f"Uninstalling ${airflow_provider_packages}!", level=log.INFO) subprocess.check_call(["python3", "-m", "pip", "uninstall", "--yes", *airflow_provider_packages]) except subprocess.CalledProcessError as e: self.announce(f"Error when uninstalling airflow provider packages: {e}!", level=log.WARN) super().run() class Install(install_orig): """Forces installation of providers from sources in editable mode.""" def run(self) -> None: self.announce("Standard installation. Providers are installed from packages", level=log.INFO) super().run() def do_setup() -> None: """ Perform the Airflow package setup. Most values come from setup.cfg, only the dynamically calculated ones are passed to setup function call. See https://setuptools.readthedocs.io/en/latest/userguide/declarative_config.html """ setup_kwargs = {} def include_provider_namespace_packages_when_installing_from_sources() -> None: """ When installing providers from sources we install all namespace packages found below airflow, including airflow and provider packages, otherwise defaults from setup.cfg control this. The kwargs in setup() call override those that are specified in setup.cfg. """ if os.getenv(INSTALL_PROVIDERS_FROM_SOURCES) == "true": setup_kwargs["packages"] = find_namespace_packages(include=["airflow*"]) include_provider_namespace_packages_when_installing_from_sources() if os.getenv(INSTALL_PROVIDERS_FROM_SOURCES) == "true": print("Installing providers from sources. Skip adding providers as dependencies") else: add_all_provider_packages() write_version() setup( distclass=AirflowDistribution, extras_require=EXTRAS_DEPENDENCIES, cmdclass={ "extra_clean": CleanCommand, "compile_assets": CompileAssets, "list_extras": ListExtras, "install": Install, # type: ignore "develop": Develop, }, test_suite="setup.airflow_test_suite", **setup_kwargs, # type: ignore ) if __name__ == "__main__": do_setup() # comment
36,110
36.189495
109
py
airflow
airflow-main/helm_tests/__init__.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License.
785
45.235294
62
py
airflow
airflow-main/helm_tests/security/test_scc_rolebinding.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestSCCActivation: """Tests SCCs.""" @pytest.mark.parametrize( "rbac_enabled,scc_enabled,created", [ (False, False, False), (False, True, False), (True, True, True), (True, False, False), ], ) def test_create_scc(self, rbac_enabled, scc_enabled, created): docs = render_chart( values={ "multiNamespaceMode": False, "webserver": {"defaultUser": {"enabled": True}}, "cleanup": {"enabled": True}, "flower": {"enabled": True}, "rbac": {"create": rbac_enabled, "createSCCRoleBinding": scc_enabled}, }, show_only=["templates/rbac/security-context-constraint-rolebinding.yaml"], ) assert bool(docs) is created if created: assert "RoleBinding" == jmespath.search("kind", docs[0]) assert "ClusterRole" == jmespath.search("roleRef.kind", docs[0]) assert "release-name-scc-rolebinding" == jmespath.search("metadata.name", docs[0]) assert "system:openshift:scc:anyuid" == jmespath.search("roleRef.name", docs[0]) assert "release-name-airflow-webserver" == jmespath.search("subjects[0].name", docs[0]) assert "release-name-airflow-worker" == jmespath.search("subjects[1].name", docs[0]) assert "release-name-airflow-scheduler" == jmespath.search("subjects[2].name", docs[0]) assert "release-name-airflow-statsd" == jmespath.search("subjects[3].name", docs[0]) assert "release-name-airflow-flower" == jmespath.search("subjects[4].name", docs[0]) assert "release-name-airflow-triggerer" == jmespath.search("subjects[5].name", docs[0]) assert "release-name-airflow-migrate-database-job" == jmespath.search("subjects[6].name", docs[0]) assert "release-name-airflow-create-user-job" == jmespath.search("subjects[7].name", docs[0]) assert "release-name-airflow-cleanup" == jmespath.search("subjects[8].name", docs[0]) @pytest.mark.parametrize( "rbac_enabled,scc_enabled,created", [ (True, True, True), ], ) def test_create_scc_multinamespace(self, rbac_enabled, scc_enabled, created): docs = render_chart( values={ "multiNamespaceMode": True, "webserver": {"defaultUser": {"enabled": False}}, "cleanup": {"enabled": False}, "flower": {"enabled": False}, "rbac": {"create": rbac_enabled, "createSCCRoleBinding": scc_enabled}, }, show_only=["templates/rbac/security-context-constraint-rolebinding.yaml"], ) assert bool(docs) is created if created: assert "ClusterRoleBinding" == jmespath.search("kind", docs[0]) assert "ClusterRole" == jmespath.search("roleRef.kind", docs[0]) assert "release-name-scc-rolebinding" == jmespath.search("metadata.name", docs[0]) assert "system:openshift:scc:anyuid" == jmespath.search("roleRef.name", docs[0]) @pytest.mark.parametrize( "rbac_enabled,scc_enabled,created", [ (True, True, True), ], ) def test_create_scc_worker_only(self, rbac_enabled, scc_enabled, created): docs = render_chart( values={ "multiNamespaceMode": False, "webserver": {"defaultUser": {"enabled": False}}, "cleanup": {"enabled": False}, "flower": {"enabled": False}, "statsd": {"enabled": False}, "rbac": {"create": rbac_enabled, "createSCCRoleBinding": scc_enabled}, }, show_only=["templates/rbac/security-context-constraint-rolebinding.yaml"], ) assert bool(docs) is created if created: assert "RoleBinding" == jmespath.search("kind", docs[0]) assert "ClusterRole" == jmespath.search("roleRef.kind", docs[0]) assert "release-name-scc-rolebinding" == jmespath.search("metadata.name", docs[0]) assert "system:openshift:scc:anyuid" == jmespath.search("roleRef.name", docs[0]) assert "release-name-airflow-webserver" == jmespath.search("subjects[0].name", docs[0]) assert "release-name-airflow-worker" == jmespath.search("subjects[1].name", docs[0]) assert "release-name-airflow-scheduler" == jmespath.search("subjects[2].name", docs[0]) assert "release-name-airflow-triggerer" == jmespath.search("subjects[3].name", docs[0]) assert "release-name-airflow-migrate-database-job" == jmespath.search("subjects[4].name", docs[0])
5,694
46.458333
110
py
airflow
airflow-main/helm_tests/security/test_kerberos.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import json import jmespath from tests.charts.helm_template_generator import render_chart class TestKerberos: """Tests kerberos.""" def test_kerberos_not_mentioned_in_render_if_disabled(self): # the name is deliberately shorter as we look for "kerberos" in the rendered chart k8s_objects = render_chart(name="no-krbros", values={"kerberos": {"enabled": False}}) # ignore airflow config map k8s_objects_to_consider = [ obj for obj in k8s_objects if obj["metadata"]["name"] != "no-krbros-airflow-config" ] k8s_objects_to_consider_str = json.dumps(k8s_objects_to_consider) assert k8s_objects_to_consider_str.count("kerberos") == 1 def test_kerberos_envs_available_in_worker_with_persistence(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "kerberosSidecar": {"enabled": True}, "persistence": { "enabled": True, }, }, "kerberos": { "enabled": True, "configPath": "/etc/krb5.conf", "ccacheMountPath": "/var/kerberos-ccache", "ccacheFileName": "ccache", }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert {"name": "KRB5_CONFIG", "value": "/etc/krb5.conf"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) assert {"name": "KRB5CCNAME", "value": "/var/kerberos-ccache/ccache"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) def test_kerberos_sidecar_resources(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "kerberosSidecar": { "enabled": True, "resources": { "requests": { "cpu": "200m", "memory": "200Mi", }, "limits": { "cpu": "201m", "memory": "201Mi", }, }, }, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[2].resources.requests.cpu", docs[0]) == "200m" assert ( jmespath.search("spec.template.spec.containers[2].resources.requests.memory", docs[0]) == "200Mi" ) assert jmespath.search("spec.template.spec.containers[2].resources.limits.cpu", docs[0]) == "201m" assert jmespath.search("spec.template.spec.containers[2].resources.limits.memory", docs[0]) == "201Mi" def test_keberos_sidecar_resources_are_not_added_by_default(self): docs = render_chart( show_only=["templates/workers/worker-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} def test_kerberos_keytab_exists_in_worker_when_enable(self): docs = render_chart( values={ "executor": "CeleryExecutor", "kerberos": { "enabled": True, "keytabBase64Content": "dGVzdGtleXRhYg==", "configPath": "/etc/krb5.conf", "ccacheMountPath": "/var/kerberos-ccache", "ccacheFileName": "ccache", }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert { "name": "kerberos-keytab", "subPath": "kerberos.keytab", "mountPath": "/etc/airflow.keytab", "readOnly": True, } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) def test_kerberos_keytab_secret_available(self): docs = render_chart( values={ "executor": "CeleryExecutor", "kerberos": { "enabled": True, "keytabBase64Content": "dGVzdGtleXRhYg==", "configPath": "/etc/krb5.conf", "ccacheMountPath": "/var/kerberos-ccache", "ccacheFileName": "ccache", }, }, show_only=["templates/secrets/kerberos-keytab-secret.yaml"], ) assert jmespath.search('data."kerberos.keytab"', docs[0]) == "dGVzdGtleXRhYg==" def test_kerberos_keytab_secret_unavailable_when_not_specified(self): docs = render_chart( values={ "executor": "CeleryExecutor", "kerberos": { "enabled": True, "configPath": "/etc/krb5.conf", "ccacheMountPath": "/var/kerberos-ccache", "ccacheFileName": "ccache", }, }, show_only=["templates/secrets/kerberos-keytab-secret.yaml"], ) assert 0 == len(docs)
6,133
38.320513
110
py
airflow
airflow-main/helm_tests/security/test_extra_configmaps_secrets.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import textwrap from base64 import b64encode from unittest import mock import pytest import yaml from tests.charts.helm_template_generator import prepare_k8s_lookup_dict, render_chart RELEASE_NAME = "test-extra-configmaps-secrets" class TestExtraConfigMapsSecrets: """Tests extra configmaps and secrets.""" def test_extra_configmaps(self): values_str = textwrap.dedent( """ extraConfigMaps: "{{ .Release.Name }}-airflow-variables": data: | AIRFLOW_VAR_HELLO_MESSAGE: "Hi!" AIRFLOW_VAR_KUBERNETES_NAMESPACE: "{{ .Release.Namespace }}" "{{ .Release.Name }}-other-variables": data: | HELLO_WORLD: "Hi again!" """ ) values = yaml.safe_load(values_str) k8s_objects = render_chart( RELEASE_NAME, values=values, show_only=["templates/configmaps/extra-configmaps.yaml"] ) k8s_objects_by_key = prepare_k8s_lookup_dict(k8s_objects) all_expected_keys = [ ("ConfigMap", f"{RELEASE_NAME}-airflow-variables"), ("ConfigMap", f"{RELEASE_NAME}-other-variables"), ] assert set(k8s_objects_by_key.keys()) == set(all_expected_keys) all_expected_data = [ {"AIRFLOW_VAR_HELLO_MESSAGE": "Hi!", "AIRFLOW_VAR_KUBERNETES_NAMESPACE": "default"}, {"HELLO_WORLD": "Hi again!"}, ] for expected_key, expected_data in zip(all_expected_keys, all_expected_data): configmap_obj = k8s_objects_by_key[expected_key] assert configmap_obj["data"] == expected_data def test_extra_secrets(self): values_str = textwrap.dedent( """ extraSecrets: "{{ .Release.Name }}-airflow-connections": data: | AIRFLOW_CON_AWS: {{ printf "aws_connection_string" | b64enc }} stringData: | AIRFLOW_CON_GCP: "gcp_connection_string" "{{ .Release.Name }}-other-secrets": data: | MY_SECRET_1: {{ printf "MY_SECRET_1" | b64enc }} MY_SECRET_2: {{ printf "MY_SECRET_2" | b64enc }} stringData: | MY_SECRET_3: "MY_SECRET_3" MY_SECRET_4: "MY_SECRET_4" "{{ .Release.Name }}-other-secrets-with-type": type: kubernetes.io/dockerconfigjson data: | MY_SECRET_5: {{ printf "MY_SECRET_5" | b64enc }} MY_SECRET_6: {{ printf "MY_SECRET_6" | b64enc }} stringData: | MY_SECRET_7: "MY_SECRET_7" MY_SECRET_8: "MY_SECRET_8" """ ) values = yaml.safe_load(values_str) k8s_objects = render_chart( RELEASE_NAME, values=values, show_only=["templates/secrets/extra-secrets.yaml"] ) k8s_objects_by_key = prepare_k8s_lookup_dict(k8s_objects) all_expected_keys = [ ("Secret", f"{RELEASE_NAME}-airflow-connections"), ("Secret", f"{RELEASE_NAME}-other-secrets"), ("Secret", f"{RELEASE_NAME}-other-secrets-with-type"), ] assert set(k8s_objects_by_key.keys()) == set(all_expected_keys) all_expected_data = [ {"AIRFLOW_CON_AWS": b64encode(b"aws_connection_string").decode("utf-8")}, { "MY_SECRET_1": b64encode(b"MY_SECRET_1").decode("utf-8"), "MY_SECRET_2": b64encode(b"MY_SECRET_2").decode("utf-8"), }, { "MY_SECRET_5": b64encode(b"MY_SECRET_5").decode("utf-8"), "MY_SECRET_6": b64encode(b"MY_SECRET_6").decode("utf-8"), }, ] all_expected_string_data = [ {"AIRFLOW_CON_GCP": "gcp_connection_string"}, {"MY_SECRET_3": "MY_SECRET_3", "MY_SECRET_4": "MY_SECRET_4"}, {"MY_SECRET_7": "MY_SECRET_7", "MY_SECRET_8": "MY_SECRET_8"}, ] all_expected_types = [None, None, "kubernetes.io/dockerconfigjson"] for expected_key, expected_data, expected_string_data, expected_type in zip( all_expected_keys, all_expected_data, all_expected_string_data, all_expected_types ): configmap_obj = k8s_objects_by_key[expected_key] if expected_type: assert configmap_obj["type"] == expected_type else: assert "type" not in configmap_obj assert configmap_obj["data"] == expected_data assert configmap_obj["stringData"] == expected_string_data def test_extra_configmaps_secrets_labels(self): k8s_objects = render_chart( name=RELEASE_NAME, values={ "labels": {"label1": "value1", "label2": "value2"}, "extraSecrets": {"{{ .Release.Name }}-extra-secret-1": {"stringData": "data: secretData"}}, "extraConfigMaps": {"{{ .Release.Name }}-extra-configmap-1": {"data": "data: configData"}}, }, show_only=["templates/configmaps/extra-configmaps.yaml", "templates/secrets/extra-secrets.yaml"], ) expected_labels = { "label1": "value1", "label2": "value2", "release": RELEASE_NAME, "heritage": "Helm", "chart": mock.ANY, } for k8s_object in k8s_objects: assert k8s_object["metadata"]["labels"] == expected_labels @pytest.mark.parametrize( "chart_labels, local_labels", [ ({}, {"label3": "value3", "label4": "value4"}), ({"label1": "value1", "label2": "value2"}, {}), ({"label1": "value1", "label2": "value2"}, {"label3": "value3", "label4": "value4"}), ], ) def test_extra_configmaps_secrets_additional_labels(self, chart_labels, local_labels): k8s_objects = render_chart( name=RELEASE_NAME, values={ "labels": chart_labels, "extraSecrets": { "{{ .Release.Name }}-extra-secret-1": { "labels": local_labels, "stringData": "data: secretData", } }, "extraConfigMaps": { "{{ .Release.Name }}-extra-configmap-1": { "labels": local_labels, "data": "data: configData", } }, }, show_only=["templates/configmaps/extra-configmaps.yaml", "templates/secrets/extra-secrets.yaml"], ) common_labels = { "release": RELEASE_NAME, "heritage": "Helm", "chart": mock.ANY, } for k8s_object in k8s_objects: assert k8s_object["metadata"]["labels"] == {**common_labels, **chart_labels, **local_labels} def test_extra_configmaps_secrets_additional_annotations(self): k8s_objects = render_chart( name=RELEASE_NAME, values={ "extraSecrets": { "{{ .Release.Name }}-extra-secret-1": { "annotations": {"test_annotation": "test_annotation_value"}, "stringData": "data: secretData", } }, "extraConfigMaps": { "{{ .Release.Name }}-extra-configmap-1": { "annotations": {"test_annotation": "test_annotation_value"}, "data": "data: configData", } }, }, show_only=["templates/configmaps/extra-configmaps.yaml", "templates/secrets/extra-secrets.yaml"], ) expected_annotations = { "helm.sh/hook": "pre-install,pre-upgrade", "helm.sh/hook-delete-policy": "before-hook-creation", "helm.sh/hook-weight": "0", "test_annotation": "test_annotation_value", } for k8s_object in k8s_objects: assert k8s_object["metadata"]["annotations"] == expected_annotations
9,037
40.081818
109
py
airflow
airflow-main/helm_tests/security/test_metadata_connection_secret.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import base64 import jmespath from tests.charts.helm_template_generator import render_chart class TestMetadataConnectionSecret: """Tests metadata connection secret.""" non_chart_database_values = { "user": "someuser", "pass": "somepass", "host": "somehost", "port": 7777, "db": "somedb", } def test_should_not_generate_a_document_if_using_existing_secret(self): docs = render_chart( values={"data": {"metadataSecretName": "foo"}}, show_only=["templates/secrets/metadata-connection-secret.yaml"], ) assert 0 == len(docs) def _get_connection(self, values: dict) -> str: docs = render_chart( values=values, show_only=["templates/secrets/metadata-connection-secret.yaml"], ) encoded_connection = jmespath.search("data.connection", docs[0]) return base64.b64decode(encoded_connection).decode() def test_default_connection(self): connection = self._get_connection({}) assert ( "postgresql://postgres:[email protected]:5432/postgres?sslmode=disable" == connection ) def test_should_set_pgbouncer_overrides_when_enabled(self): values = {"pgbouncer": {"enabled": True}} connection = self._get_connection(values) # host, port, dbname get overridden assert ( "postgresql://postgres:[email protected]:6543" "/release-name-metadata?sslmode=disable" == connection ) def test_should_set_pgbouncer_overrides_with_non_chart_database_when_enabled(self): values = { "pgbouncer": {"enabled": True}, "data": {"metadataConnection": {**self.non_chart_database_values}}, } connection = self._get_connection(values) # host, port, dbname still get overridden even with an non-chart db assert ( "postgresql://someuser:[email protected]:6543" "/release-name-metadata?sslmode=disable" == connection ) def test_should_correctly_use_non_chart_database(self): values = { "data": { "metadataConnection": { **self.non_chart_database_values, "sslmode": "require", } } } connection = self._get_connection(values) assert "postgresql://someuser:somepass@somehost:7777/somedb?sslmode=require" == connection def test_should_support_non_postgres_db(self): values = { "data": { "metadataConnection": { **self.non_chart_database_values, "protocol": "mysql", } } } connection = self._get_connection(values) # sslmode is only added for postgresql assert "mysql://someuser:somepass@somehost:7777/somedb" == connection def test_should_correctly_handle_password_with_special_characters(self): values = { "data": { "metadataConnection": { **self.non_chart_database_values, "user": "username@123123", "pass": "password@!@#$^&*()", } } } connection = self._get_connection(values) # sslmode is only added for postgresql assert ( "postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%29@somehost:7777/" "somedb?sslmode=disable" == connection )
4,457
33.828125
106
py
airflow
airflow-main/helm_tests/security/test_rbac_pod_log_reader.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestPodReader: """Tests RBAC Pod Reader.""" @pytest.mark.parametrize( "triggerer, webserver, expected", [ (True, True, ["release-name-airflow-webserver", "release-name-airflow-triggerer"]), (True, False, ["release-name-airflow-triggerer"]), (False, True, ["release-name-airflow-webserver"]), (False, False, []), ], ) def test_pod_log_reader_rolebinding(self, triggerer, webserver, expected): docs = render_chart( values={ "triggerer": {"enabled": triggerer}, "webserver": {"allowPodLogReading": webserver}, }, show_only=["templates/rbac/pod-log-reader-rolebinding.yaml"], ) actual = jmespath.search("subjects[*].name", docs[0]) if docs else [] assert actual == expected @pytest.mark.parametrize( "triggerer, webserver, expected", [ (True, True, "release-name-pod-log-reader-role"), (True, False, "release-name-pod-log-reader-role"), (False, True, "release-name-pod-log-reader-role"), (False, False, None), ], ) def test_pod_log_reader_role(self, triggerer, webserver, expected): docs = render_chart( values={ "triggerer": {"enabled": triggerer}, "webserver": {"allowPodLogReading": webserver}, }, show_only=["templates/rbac/pod-log-reader-role.yaml"], ) actual = jmespath.search("metadata.name", docs[0]) if docs else None assert actual == expected
2,545
37
95
py
airflow
airflow-main/helm_tests/security/test_security_context.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestSCBackwardsCompatibility: """Tests SC Backward Compatibility.""" def test_check_deployments_and_jobs(self): docs = render_chart( values={ "uid": 3000, "gid": 30, "webserver": {"defaultUser": {"enabled": True}}, "flower": {"enabled": True}, "airflowVersion": "2.2.0", "executor": "CeleryKubernetesExecutor", }, show_only=[ "templates/flower/flower-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", ], ) for index in range(len(docs)): assert 3000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[index]) assert 30 == jmespath.search("spec.template.spec.securityContext.fsGroup", docs[index]) def test_check_statsd_uid(self): docs = render_chart( values={"statsd": {"enabled": True, "uid": 3000}}, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert 3000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) def test_check_cleanup_job(self): docs = render_chart( values={"uid": 3000, "gid": 30, "cleanup": {"enabled": True}}, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert 3000 == jmespath.search( "spec.jobTemplate.spec.template.spec.securityContext.runAsUser", docs[0] ) assert 30 == jmespath.search("spec.jobTemplate.spec.template.spec.securityContext.fsGroup", docs[0]) def test_gitsync_sidecar_and_init_container(self): docs = render_chart( values={ "dags": {"gitSync": {"enabled": True, "uid": 3000}}, "airflowVersion": "1.10.15", }, show_only=[ "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", ], ) for index in range(len(docs)): assert "git-sync" in [ c["name"] for c in jmespath.search("spec.template.spec.containers", docs[index]) ] assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[index]) ] assert 3000 == jmespath.search( "spec.template.spec.initContainers[?name=='git-sync-init'].securityContext.runAsUser | [0]", docs[index], ) assert 3000 == jmespath.search( "spec.template.spec.containers[?name=='git-sync'].securityContext.runAsUser | [0]", docs[index], ) class TestSecurityContext: """Tests security context.""" # Test securityContext setting for Pods and Containers def test_check_default_setting(self): docs = render_chart( values={ "securityContext": {"runAsUser": 6000, "fsGroup": 60}, "webserver": {"defaultUser": {"enabled": True}}, "flower": {"enabled": True}, "statsd": {"enabled": False}, "airflowVersion": "2.2.0", "executor": "CeleryKubernetesExecutor", }, show_only=[ "templates/flower/flower-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", ], ) for index in range(len(docs)): print(docs[index]) assert 6000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[index]) assert 60 == jmespath.search("spec.template.spec.securityContext.fsGroup", docs[index]) # Test priority: # <local>.securityContext > securityContext > uid + gid def test_check_local_setting(self): component_contexts = {"securityContext": {"runAsUser": 9000, "fsGroup": 90}} docs = render_chart( values={ "uid": 3000, "gid": 30, "securityContext": {"runAsUser": 6000, "fsGroup": 60}, "webserver": {"defaultUser": {"enabled": True}, **component_contexts}, "workers": {**component_contexts}, "flower": {"enabled": True, **component_contexts}, "scheduler": {**component_contexts}, "createUserJob": {**component_contexts}, "migrateDatabaseJob": {**component_contexts}, "triggerer": {**component_contexts}, "redis": {**component_contexts}, "statsd": {"enabled": True, **component_contexts}, "airflowVersion": "2.2.0", "executor": "CeleryKubernetesExecutor", }, show_only=[ "templates/flower/flower-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", "templates/statsd/statsd-deployment.yaml", "templates/redis/redis-statefulset.yaml", ], ) for index in range(len(docs)): print(docs[index]) assert 9000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[index]) assert 90 == jmespath.search("spec.template.spec.securityContext.fsGroup", docs[index]) # Test containerSecurity priority over uid under components using localSecurityContext def test_check_local_uid(self): component_contexts = {"uid": 3000, "securityContext": {"runAsUser": 7000}} docs = render_chart( values={ "redis": {**component_contexts}, "statsd": {"enabled": True, **component_contexts}, }, show_only=[ "templates/statsd/statsd-deployment.yaml", "templates/redis/redis-statefulset.yaml", ], ) for doc in docs: assert 7000 == jmespath.search("spec.template.spec.securityContext.runAsUser", doc) # Test containerSecurity priority over uid under dags.gitSync def test_gitsync_sidecar_and_init_container(self): docs = render_chart( values={ "dags": {"gitSync": {"enabled": True, "uid": 9000, "securityContext": {"runAsUser": 8000}}}, "airflowVersion": "1.10.15", }, show_only=[ "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", ], ) for index in range(len(docs)): assert "git-sync" in [ c["name"] for c in jmespath.search("spec.template.spec.containers", docs[index]) ] assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[index]) ] assert 8000 == jmespath.search( "spec.template.spec.initContainers[?name=='git-sync-init'].securityContext.runAsUser | [0]", docs[index], ) assert 8000 == jmespath.search( "spec.template.spec.containers[?name=='git-sync'].securityContext.runAsUser | [0]", docs[index], ) # Test securityContexts for main containers def test_global_security_context(self): ctx_value_pod = {"runAsUser": 7000} ctx_value_container = {"allowPrivilegeEscalation": False} docs = render_chart( values={"securityContexts": {"containers": ctx_value_container, "pod": ctx_value_pod}}, show_only=[ "templates/flower/flower-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/statsd/statsd-deployment.yaml", "templates/redis/redis-statefulset.yaml", ], ) for index in range(len(docs) - 2): assert ctx_value_container == jmespath.search( "spec.template.spec.containers[0].securityContext", docs[index] ) assert ctx_value_pod == jmespath.search("spec.template.spec.securityContext", docs[index]) # Global security context is not propagated to redis and statsd, so we test default value default_ctx_value_container = {"allowPrivilegeEscalation": False, "capabilities": {"drop": ["ALL"]}} default_ctx_value_pod_statsd = {"runAsUser": 65534} default_ctx_value_pod_redis = {"runAsUser": 0} for index in range(len(docs) - 2, len(docs)): assert default_ctx_value_container == jmespath.search( "spec.template.spec.containers[0].securityContext", docs[index] ) assert default_ctx_value_pod_statsd == jmespath.search( "spec.template.spec.securityContext", docs[len(docs) - 2] ) assert default_ctx_value_pod_redis == jmespath.search( "spec.template.spec.securityContext", docs[len(docs) - 1] ) # Test securityContexts for main containers def test_main_container_setting(self): ctx_value = {"allowPrivilegeEscalation": False} security_context = {"securityContexts": {"container": ctx_value}} docs = render_chart( values={ "scheduler": {**security_context}, "webserver": {**security_context}, "workers": {**security_context}, "flower": {**security_context}, "statsd": {**security_context}, "createUserJob": {**security_context}, "migrateDatabaseJob": {**security_context}, "triggerer": {**security_context}, "pgbouncer": {**security_context}, "redis": {**security_context}, }, show_only=[ "templates/flower/flower-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/statsd/statsd-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/pgbouncer/pgbouncer-deployment.yaml", "templates/redis/redis-statefulset.yaml", ], ) for index in range(len(docs)): assert ctx_value == jmespath.search( "spec.template.spec.containers[0].securityContext", docs[index] ) # Test securityContexts for log-groomer-sidecar main container def test_log_groomer_sidecar_container_setting(self): ctx_value = {"allowPrivilegeEscalation": False} spec = {"logGroomerSidecar": {"securityContexts": {"container": ctx_value}}} docs = render_chart( values={ "scheduler": {**spec}, "workers": {**spec}, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", ], ) for index in range(len(docs)): assert ctx_value == jmespath.search( "spec.template.spec.containers[1].securityContext", docs[index] ) # Test securityContexts for metrics-explorer main container def test_metrics_explorer_container_setting(self): ctx_value = {"allowPrivilegeEscalation": False} docs = render_chart( values={ "pgbouncer": { "enabled": True, "metricsExporterSidecar": {"securityContexts": {"container": ctx_value}}, }, }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert ctx_value == jmespath.search("spec.template.spec.containers[1].securityContext", docs[0]) # Test securityContexts for worker-kerberos main container def test_worker_kerberos_container_setting(self): ctx_value = {"allowPrivilegeEscalation": False} docs = render_chart( values={ "workers": { "kerberosSidecar": {"enabled": True, "securityContexts": {"container": ctx_value}} }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert ctx_value == jmespath.search("spec.template.spec.containers[2].securityContext", docs[0]) # Test securityContexts for the wait-for-migrations init containers def test_wait_for_migrations_init_container_setting(self): ctx_value = {"allowPrivilegeEscalation": False} spec = { "waitForMigrations": { "enabled": True, "securityContexts": {"container": ctx_value}, } } docs = render_chart( values={ "scheduler": {**spec}, "webserver": {**spec}, "triggerer": {**spec}, "workers": {"waitForMigrations": {"securityContexts": {"container": ctx_value}}}, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/workers/worker-deployment.yaml", ], ) for index in range(len(docs)): assert ctx_value == jmespath.search( "spec.template.spec.initContainers[0].securityContext", docs[index] ) # Test securityContexts for volume-permissions init container def test_volume_permissions_init_container_setting(self): docs = render_chart( values={ "workers": { "persistence": { "enabled": True, "fixPermissions": True, "securityContexts": {"container": {"allowPrivilegeEscalation": False}}, } } }, show_only=["templates/workers/worker-deployment.yaml"], ) expected_ctx = { "allowPrivilegeEscalation": False, } assert expected_ctx == jmespath.search( "spec.template.spec.initContainers[0].securityContext", docs[0] ) # Test securityContexts for main pods def test_main_pod_setting(self): ctx_value = {"runAsUser": 7000} security_context = {"securityContexts": {"pod": ctx_value}} docs = render_chart( values={ "scheduler": {**security_context}, "webserver": {**security_context}, "workers": {**security_context}, "flower": {**security_context}, "statsd": {**security_context}, "createUserJob": {**security_context}, "migrateDatabaseJob": {**security_context}, "triggerer": {**security_context}, "pgbouncer": {**security_context}, "redis": {**security_context}, }, show_only=[ "templates/flower/flower-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/statsd/statsd-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/pgbouncer/pgbouncer-deployment.yaml", "templates/redis/redis-statefulset.yaml", ], ) for index in range(len(docs)): assert ctx_value == jmespath.search("spec.template.spec.securityContext", docs[index]) # Test securityContexts for main pods def test_main_pod_setting_legacy_security(self): ctx_value = {"runAsUser": 7000} security_context = {"securityContext": ctx_value} docs = render_chart( values={ "scheduler": {**security_context}, "webserver": {**security_context}, "workers": {**security_context}, "flower": {**security_context}, "statsd": {**security_context}, "createUserJob": {**security_context}, "migrateDatabaseJob": {**security_context}, "triggerer": {**security_context}, "redis": {**security_context}, }, show_only=[ "templates/flower/flower-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/statsd/statsd-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/redis/redis-statefulset.yaml", ], ) for index in range(len(docs)): assert ctx_value == jmespath.search("spec.template.spec.securityContext", docs[index])
19,827
42.292576
108
py
airflow
airflow-main/helm_tests/security/test_rbac.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations from copy import copy import jmespath import pytest from tests.charts.helm_template_generator import render_chart DEPLOYMENT_NO_RBAC_NO_SA_KIND_NAME_TUPLES = [ ("Secret", "test-rbac-postgresql"), ("Secret", "test-rbac-airflow-metadata"), ("Secret", "test-rbac-pgbouncer-config"), ("Secret", "test-rbac-pgbouncer-stats"), ("ConfigMap", "test-rbac-airflow-config"), ("ConfigMap", "test-rbac-statsd"), ("Service", "test-rbac-postgresql-hl"), ("Service", "test-rbac-postgresql"), ("Service", "test-rbac-statsd"), ("Service", "test-rbac-webserver"), ("Service", "test-rbac-flower"), ("Service", "test-rbac-pgbouncer"), ("Service", "test-rbac-redis"), ("Service", "test-rbac-worker"), ("Deployment", "test-rbac-scheduler"), ("Deployment", "test-rbac-statsd"), ("Deployment", "test-rbac-webserver"), ("Deployment", "test-rbac-flower"), ("Deployment", "test-rbac-pgbouncer"), ("StatefulSet", "test-rbac-postgresql"), ("StatefulSet", "test-rbac-redis"), ("StatefulSet", "test-rbac-worker"), ("Secret", "test-rbac-broker-url"), ("Secret", "test-rbac-fernet-key"), ("Secret", "test-rbac-redis-password"), ("Secret", "test-rbac-webserver-secret-key"), ("Job", "test-rbac-create-user"), ("Job", "test-rbac-run-airflow-migrations"), ("CronJob", "test-rbac-cleanup"), ] RBAC_ENABLED_KIND_NAME_TUPLES = [ ("Role", "test-rbac-pod-launcher-role"), ("Role", "test-rbac-cleanup-role"), ("Role", "test-rbac-pod-log-reader-role"), ("RoleBinding", "test-rbac-pod-launcher-rolebinding"), ("RoleBinding", "test-rbac-pod-log-reader-rolebinding"), ("RoleBinding", "test-rbac-cleanup-rolebinding"), ] SERVICE_ACCOUNT_NAME_TUPLES = [ ("ServiceAccount", "test-rbac-cleanup"), ("ServiceAccount", "test-rbac-scheduler"), ("ServiceAccount", "test-rbac-webserver"), ("ServiceAccount", "test-rbac-worker"), ("ServiceAccount", "test-rbac-triggerer"), ("ServiceAccount", "test-rbac-pgbouncer"), ("ServiceAccount", "test-rbac-flower"), ("ServiceAccount", "test-rbac-statsd"), ("ServiceAccount", "test-rbac-create-user-job"), ("ServiceAccount", "test-rbac-migrate-database-job"), ("ServiceAccount", "test-rbac-redis"), ] CUSTOM_SERVICE_ACCOUNT_NAMES = ( CUSTOM_SCHEDULER_NAME, CUSTOM_WEBSERVER_NAME, CUSTOM_WORKER_NAME, CUSTOM_TRIGGERER_NAME, CUSTOM_CLEANUP_NAME, CUSTOM_FLOWER_NAME, CUSTOM_PGBOUNCER_NAME, CUSTOM_STATSD_NAME, CUSTOM_CREATE_USER_JOBS_NAME, CUSTOM_MIGRATE_DATABASE_JOBS_NAME, CUSTOM_REDIS_NAME, CUSTOM_POSTGRESQL_NAME, ) = ( "TestScheduler", "TestWebserver", "TestWorker", "TestTriggerer", "TestCleanup", "TestFlower", "TestPGBouncer", "TestStatsd", "TestCreateUserJob", "TestMigrateDatabaseJob", "TestRedis", "TestPostgresql", ) class TestRBAC: """Tests RBAC.""" def _get_values_with_version(self, values, version): if version != "default": values["airflowVersion"] = version return values @staticmethod def _get_object_tuples(version): tuples = copy(DEPLOYMENT_NO_RBAC_NO_SA_KIND_NAME_TUPLES) if version == "default": tuples.append(("Service", "test-rbac-triggerer")) tuples.append(("StatefulSet", "test-rbac-triggerer")) else: tuples.append(("Deployment", "test-rbac-triggerer")) if version == "2.3.2": tuples.append(("Secret", "test-rbac-airflow-result-backend")) return tuples @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_deployments_no_rbac_no_sa(self, version): k8s_objects = render_chart( "test-rbac", values=self._get_values_with_version( values={ "fullnameOverride": "test-rbac", "rbac": {"create": False}, "cleanup": { "enabled": True, "serviceAccount": { "create": False, }, }, "pgbouncer": { "enabled": True, "serviceAccount": { "create": False, }, }, "redis": {"serviceAccount": {"create": False}}, "scheduler": {"serviceAccount": {"create": False}}, "webserver": {"serviceAccount": {"create": False}}, "workers": {"serviceAccount": {"create": False}}, "triggerer": {"serviceAccount": {"create": False}}, "statsd": {"serviceAccount": {"create": False}}, "createUserJob": {"serviceAccount": {"create": False}}, "migrateDatabaseJob": {"serviceAccount": {"create": False}}, "flower": {"enabled": True, "serviceAccount": {"create": False}}, }, version=version, ), ) list_of_kind_names_tuples = [ (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects ] assert sorted(list_of_kind_names_tuples) == sorted(self._get_object_tuples(version)) @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_deployments_no_rbac_with_sa(self, version): k8s_objects = render_chart( "test-rbac", values=self._get_values_with_version( values={ "fullnameOverride": "test-rbac", "rbac": {"create": False}, "cleanup": {"enabled": True}, "flower": {"enabled": True}, "pgbouncer": {"enabled": True}, }, version=version, ), ) list_of_kind_names_tuples = [ (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects ] real_list_of_kind_names = self._get_object_tuples(version) + SERVICE_ACCOUNT_NAME_TUPLES assert sorted(list_of_kind_names_tuples) == sorted(real_list_of_kind_names) @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_deployments_with_rbac_no_sa(self, version): k8s_objects = render_chart( "test-rbac", values=self._get_values_with_version( values={ "fullnameOverride": "test-rbac", "cleanup": { "enabled": True, "serviceAccount": { "create": False, }, }, "scheduler": {"serviceAccount": {"create": False}}, "webserver": {"serviceAccount": {"create": False}}, "workers": {"serviceAccount": {"create": False}}, "triggerer": {"serviceAccount": {"create": False}}, "flower": {"enabled": True, "serviceAccount": {"create": False}}, "statsd": {"serviceAccount": {"create": False}}, "redis": {"serviceAccount": {"create": False}}, "pgbouncer": { "enabled": True, "serviceAccount": { "create": False, }, }, "createUserJob": {"serviceAccount": {"create": False}}, "migrateDatabaseJob": {"serviceAccount": {"create": False}}, }, version=version, ), ) list_of_kind_names_tuples = [ (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects ] real_list_of_kind_names = self._get_object_tuples(version) + RBAC_ENABLED_KIND_NAME_TUPLES assert sorted(list_of_kind_names_tuples) == sorted(real_list_of_kind_names) @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_deployments_with_rbac_with_sa(self, version): k8s_objects = render_chart( "test-rbac", values=self._get_values_with_version( values={ "fullnameOverride": "test-rbac", "cleanup": {"enabled": True}, "flower": {"enabled": True}, "pgbouncer": {"enabled": True}, }, version=version, ), ) list_of_kind_names_tuples = [ (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects ] real_list_of_kind_names = ( self._get_object_tuples(version) + SERVICE_ACCOUNT_NAME_TUPLES + RBAC_ENABLED_KIND_NAME_TUPLES ) assert sorted(list_of_kind_names_tuples) == sorted(real_list_of_kind_names) def test_service_account_custom_names(self): k8s_objects = render_chart( "test-rbac", values={ "fullnameOverride": "test-rbac", "cleanup": { "enabled": True, "serviceAccount": { "name": CUSTOM_CLEANUP_NAME, }, }, "scheduler": {"serviceAccount": {"name": CUSTOM_SCHEDULER_NAME}}, "webserver": {"serviceAccount": {"name": CUSTOM_WEBSERVER_NAME}}, "workers": {"serviceAccount": {"name": CUSTOM_WORKER_NAME}}, "triggerer": {"serviceAccount": {"name": CUSTOM_TRIGGERER_NAME}}, "flower": {"enabled": True, "serviceAccount": {"name": CUSTOM_FLOWER_NAME}}, "statsd": {"serviceAccount": {"name": CUSTOM_STATSD_NAME}}, "redis": {"serviceAccount": {"name": CUSTOM_REDIS_NAME}}, "postgresql": {"serviceAccount": {"create": True, "name": CUSTOM_POSTGRESQL_NAME}}, "pgbouncer": { "enabled": True, "serviceAccount": { "name": CUSTOM_PGBOUNCER_NAME, }, }, "createUserJob": {"serviceAccount": {"name": CUSTOM_CREATE_USER_JOBS_NAME}}, "migrateDatabaseJob": {"serviceAccount": {"name": CUSTOM_MIGRATE_DATABASE_JOBS_NAME}}, }, ) list_of_sa_names = [ k8s_object["metadata"]["name"] for k8s_object in k8s_objects if k8s_object["kind"] == "ServiceAccount" ] assert sorted(list_of_sa_names) == sorted(CUSTOM_SERVICE_ACCOUNT_NAMES) def test_service_account_custom_names_in_objects(self): k8s_objects = render_chart( "test-rbac", values={ "fullnameOverride": "test-rbac", "cleanup": { "enabled": True, "serviceAccount": { "name": CUSTOM_CLEANUP_NAME, }, }, "scheduler": {"serviceAccount": {"name": CUSTOM_SCHEDULER_NAME}}, "webserver": {"serviceAccount": {"name": CUSTOM_WEBSERVER_NAME}}, "workers": {"serviceAccount": {"name": CUSTOM_WORKER_NAME}}, "triggerer": {"serviceAccount": {"name": CUSTOM_TRIGGERER_NAME}}, "flower": {"enabled": True, "serviceAccount": {"name": CUSTOM_FLOWER_NAME}}, "statsd": {"serviceAccount": {"name": CUSTOM_STATSD_NAME}}, "redis": {"serviceAccount": {"name": CUSTOM_REDIS_NAME}}, "postgresql": {"serviceAccount": {"name": CUSTOM_POSTGRESQL_NAME}}, "pgbouncer": { "enabled": True, "serviceAccount": { "name": CUSTOM_PGBOUNCER_NAME, }, }, "createUserJob": {"serviceAccount": {"name": CUSTOM_CREATE_USER_JOBS_NAME}}, "migrateDatabaseJob": {"serviceAccount": {"name": CUSTOM_MIGRATE_DATABASE_JOBS_NAME}}, }, ) list_of_sa_names_in_objects = [] for k8s_object in k8s_objects: name = ( jmespath.search("spec.template.spec.serviceAccountName", k8s_object) or jmespath.search( "spec.jobTemplate.spec.template.spec.serviceAccountName", k8s_object, ) or None ) if name and name not in list_of_sa_names_in_objects: list_of_sa_names_in_objects.append(name) assert sorted(list_of_sa_names_in_objects) == sorted(CUSTOM_SERVICE_ACCOUNT_NAMES) def test_service_account_without_resource(self): k8s_objects = render_chart( "test-rbac", values={ "fullnameOverride": "test-rbac", "executor": "LocalExecutor", "cleanup": {"enabled": False}, "pgbouncer": {"enabled": False}, "redis": {"enabled": False}, "flower": {"enabled": False}, "statsd": {"enabled": False}, "webserver": {"defaultUser": {"enabled": False}}, }, ) list_of_sa_names = [ k8s_object["metadata"]["name"] for k8s_object in k8s_objects if k8s_object["kind"] == "ServiceAccount" ] service_account_names = [ "test-rbac-scheduler", "test-rbac-webserver", "test-rbac-triggerer", "test-rbac-migrate-database-job", ] assert sorted(list_of_sa_names) == sorted(service_account_names)
14,635
40.228169
106
py
airflow
airflow-main/helm_tests/security/__init__.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License.
785
45.235294
62
py
airflow
airflow-main/helm_tests/security/test_result_backend_connection_secret.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import base64 import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestResultBackendConnectionSecret: """Tests result backend connection secret.""" def _get_values_with_version(self, values, version): if version != "default": values["airflowVersion"] = version return values def _assert_for_old_version(self, version, value, expected_value): if version == "2.3.2": assert value == expected_value else: assert value is None non_chart_database_values = { "user": "someuser", "pass": "somepass", "host": "somehost", "protocol": "postgresql", "port": 7777, "db": "somedb", "sslmode": "allow", } def test_should_not_generate_a_document_if_using_existing_secret(self): docs = render_chart( values={"data": {"resultBackendSecretName": "foo"}}, show_only=["templates/secrets/result-backend-connection-secret.yaml"], ) assert 0 == len(docs) @pytest.mark.parametrize( "executor, expected_doc_count", [ ("CeleryExecutor", 1), ("CeleryKubernetesExecutor", 1), ("LocalExecutor", 0), ], ) def test_should_a_document_be_generated_for_executor(self, executor, expected_doc_count): docs = render_chart( values={ "executor": executor, "data": { "metadataConnection": {**self.non_chart_database_values}, "resultBackendConnection": { **self.non_chart_database_values, "user": "anotheruser", "pass": "anotherpass", }, }, }, show_only=["templates/secrets/result-backend-connection-secret.yaml"], ) assert expected_doc_count == len(docs) def _get_connection(self, values: dict) -> str | None: docs = render_chart( values=values, show_only=["templates/secrets/result-backend-connection-secret.yaml"], ) if len(docs) == 0: return None encoded_connection = jmespath.search("data.connection", docs[0]) return base64.b64decode(encoded_connection).decode() @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_default_connection_old_version(self, version): connection = self._get_connection(self._get_values_with_version(version=version, values={})) self._assert_for_old_version( version, value=connection, expected_value="db+postgresql://postgres:postgres@release-name" "-postgresql:5432/postgres?sslmode=disable", ) @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_should_default_to_custom_metadata_db_connection_with_pgbouncer_overrides(self, version): values = { "pgbouncer": {"enabled": True}, "data": {"metadataConnection": {**self.non_chart_database_values}}, } connection = self._get_connection(self._get_values_with_version(values=values, version=version)) # host, port, dbname still get overridden self._assert_for_old_version( version, value=connection, expected_value="db+postgresql://someuser:somepass@release-name-pgbouncer" ":6543/release-name-result-backend?sslmode=allow", ) @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_should_set_pgbouncer_overrides_when_enabled(self, version): values = {"pgbouncer": {"enabled": True}} connection = self._get_connection(self._get_values_with_version(values=values, version=version)) # host, port, dbname get overridden self._assert_for_old_version( version, value=connection, expected_value="db+postgresql://postgres:postgres@release-name-pgbouncer" ":6543/release-name-result-backend?sslmode=disable", ) def test_should_set_pgbouncer_overrides_with_non_chart_database_when_enabled(self): values = { "pgbouncer": {"enabled": True}, "data": {"resultBackendConnection": {**self.non_chart_database_values}}, } connection = self._get_connection(values) # host, port, dbname still get overridden even with an non-chart db assert ( "db+postgresql://someuser:somepass@release-name-pgbouncer:6543" "/release-name-result-backend?sslmode=allow" == connection ) @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_should_default_to_custom_metadata_db_connection_in_old_version(self, version): values = { "data": {"metadataConnection": {**self.non_chart_database_values}}, } connection = self._get_connection(self._get_values_with_version(values=values, version=version)) self._assert_for_old_version( version, value=connection, expected_value="db+postgresql://someuser:somepass@somehost:7777/somedb?sslmode=allow", ) def test_should_correctly_use_non_chart_database(self): values = {"data": {"resultBackendConnection": {**self.non_chart_database_values}}} connection = self._get_connection(values) assert "db+postgresql://someuser:somepass@somehost:7777/somedb?sslmode=allow" == connection def test_should_support_non_postgres_db(self): values = { "data": { "resultBackendConnection": { **self.non_chart_database_values, "protocol": "mysql", } } } connection = self._get_connection(values) # sslmode is only added for postgresql assert "db+mysql://someuser:somepass@somehost:7777/somedb" == connection def test_should_correctly_use_non_chart_database_when_both_db_are_external(self): values = { "data": { "metadataConnection": {**self.non_chart_database_values}, "resultBackendConnection": { **self.non_chart_database_values, "user": "anotheruser", "pass": "anotherpass", }, } } connection = self._get_connection(values) assert "db+postgresql://anotheruser:anotherpass@somehost:7777/somedb?sslmode=allow" == connection def test_should_correctly_handle_password_with_special_characters(self): values = { "data": { "resultBackendConnection": { **self.non_chart_database_values, "user": "username@123123", "pass": "password@!@#$^&*()", }, } } connection = self._get_connection(values) assert ( "db+postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%29@somehost:7777/" "somedb?sslmode=allow" == connection )
8,036
37.271429
105
py
airflow
airflow-main/helm_tests/security/test_elasticsearch_secret.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import base64 from subprocess import CalledProcessError import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestElasticsearchSecret: """Tests elasticsearch secret.""" def test_should_not_generate_a_document_if_elasticsearch_disabled(self): docs = render_chart( values={"elasticsearch": {"enabled": False}}, show_only=["templates/secrets/elasticsearch-secret.yaml"], ) assert 0 == len(docs) def test_should_raise_error_when_connection_not_provided(self): with pytest.raises(CalledProcessError) as ex_ctx: render_chart( values={ "elasticsearch": { "enabled": True, } }, show_only=["templates/secrets/elasticsearch-secret.yaml"], ) assert ( "You must set one of the values elasticsearch.secretName or elasticsearch.connection " "when using a Elasticsearch" in ex_ctx.value.stderr.decode() ) def test_should_raise_error_when_conflicting_options(self): with pytest.raises(CalledProcessError) as ex_ctx: render_chart( values={ "elasticsearch": { "enabled": True, "secretName": "my-test", "connection": { "user": "username!@#$%%^&*()", "pass": "password!@#$%%^&*()", "host": "elastichostname", }, }, }, show_only=["templates/secrets/elasticsearch-secret.yaml"], ) assert ( "You must not set both values elasticsearch.secretName and elasticsearch.connection" in ex_ctx.value.stderr.decode() ) def _get_connection(self, values: dict) -> str: docs = render_chart( values=values, show_only=["templates/secrets/elasticsearch-secret.yaml"], ) encoded_connection = jmespath.search("data.connection", docs[0]) return base64.b64decode(encoded_connection).decode() def test_should_correctly_handle_password_with_special_characters(self): connection = self._get_connection( { "elasticsearch": { "enabled": True, "connection": { "user": "username!@#$%%^&*()", "pass": "password!@#$%%^&*()", "host": "elastichostname", }, } } ) assert ( "http://username%21%40%23$%25%25%5E&%2A%28%29:password%21%40%23$%25%25%5E&%2A%28%29@" "elastichostname:9200" == connection ) def test_should_generate_secret_with_specified_port(self): connection = self._get_connection( { "elasticsearch": { "enabled": True, "connection": { "user": "username", "pass": "password", "host": "elastichostname", "port": 2222, }, } } ) assert "http://username:password@elastichostname:2222" == connection @pytest.mark.parametrize("scheme", ["http", "https"]) def test_should_generate_secret_with_specified_schemes(self, scheme): connection = self._get_connection( { "elasticsearch": { "enabled": True, "connection": { "scheme": scheme, "user": "username", "pass": "password", "host": "elastichostname", }, } } ) assert f"{scheme}://username:password@elastichostname:9200" == connection @pytest.mark.parametrize( "extra_conn_kwargs, expected_user_info", [ # When both user and password are empty. ({}, ""), # When password is empty ({"user": "admin"}, ""), # When user is empty ({"pass": "password"}, ""), # Valid username/password ({"user": "admin", "pass": "password"}, "admin:password"), ], ) def test_url_generated_when_user_pass_empty_combinations(self, extra_conn_kwargs, expected_user_info): connection = self._get_connection( { "elasticsearch": { "enabled": True, "connection": {"host": "elastichostname", "port": 8080, **extra_conn_kwargs}, } } ) if not expected_user_info: assert "http://elastichostname:8080" == connection else: assert f"http://{expected_user_info}@elastichostname:8080" == connection
5,905
34.793939
106
py
airflow
airflow-main/helm_tests/other/test_pdb_pgbouncer.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations from tests.charts.helm_template_generator import render_chart class TestPgbouncerPdb: """Tests PgBouncer PDB.""" def test_should_pass_validation_with_just_pdb_enabled_v1(self): render_chart( values={"pgbouncer": {"enabled": True, "podDisruptionBudget": {"enabled": True}}}, show_only=["templates/pgbouncer/pgbouncer-poddisruptionbudget.yaml"], ) # checks that no validation exception is raised def test_should_pass_validation_with_just_pdb_enabled_v1beta1(self): render_chart( values={"pgbouncer": {"enabled": True, "podDisruptionBudget": {"enabled": True}}}, show_only=["templates/pgbouncer/pgbouncer-poddisruptionbudget.yaml"], kubernetes_version="1.16.0", ) # checks that no validation exception is raised def test_should_pass_validation_with_pdb_enabled_and_min_available_param(self): render_chart( values={ "pgbouncer": { "enabled": True, "podDisruptionBudget": { "enabled": True, "config": {"maxUnavailable": None, "minAvailable": 1}, }, } }, show_only=["templates/pgbouncer/pgbouncer-poddisruptionbudget.yaml"], ) # checks that no validation exception is raised
2,198
42.117647
94
py
airflow
airflow-main/helm_tests/other/test_redis.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import re from base64 import b64decode from subprocess import CalledProcessError import jmespath import pytest from tests.charts.helm_template_generator import prepare_k8s_lookup_dict, render_chart RELEASE_NAME_REDIS = "test-redis" REDIS_OBJECTS = { "NETWORK_POLICY": ("NetworkPolicy", f"{RELEASE_NAME_REDIS}-redis-policy"), "SERVICE": ("Service", f"{RELEASE_NAME_REDIS}-redis"), "STATEFUL_SET": ("StatefulSet", f"{RELEASE_NAME_REDIS}-redis"), "SECRET_PASSWORD": ("Secret", f"{RELEASE_NAME_REDIS}-redis-password"), "SECRET_BROKER_URL": ("Secret", f"{RELEASE_NAME_REDIS}-broker-url"), } SET_POSSIBLE_REDIS_OBJECT_KEYS = set(REDIS_OBJECTS.values()) CELERY_EXECUTORS_PARAMS = ["CeleryExecutor", "CeleryKubernetesExecutor"] class TestRedis: """Tests redis.""" @staticmethod def get_broker_url_in_broker_url_secret(k8s_obj_by_key): broker_url_in_obj = b64decode( k8s_obj_by_key[REDIS_OBJECTS["SECRET_BROKER_URL"]]["data"]["connection"] ).decode("utf-8") return broker_url_in_obj @staticmethod def get_redis_password_in_password_secret(k8s_obj_by_key): password_in_obj = b64decode( k8s_obj_by_key[REDIS_OBJECTS["SECRET_PASSWORD"]]["data"]["password"] ).decode("utf-8") return password_in_obj @staticmethod def get_broker_url_secret_in_deployment(k8s_obj_by_key, kind: str, name: str) -> str: deployment_obj = k8s_obj_by_key[(kind, f"{RELEASE_NAME_REDIS}-{name}")] containers = deployment_obj["spec"]["template"]["spec"]["containers"] container = next(obj for obj in containers if obj["name"] == name) envs = container["env"] env = next(obj for obj in envs if obj["name"] == "AIRFLOW__CELERY__BROKER_URL") return env["valueFrom"]["secretKeyRef"]["name"] def assert_password_and_broker_url_secrets( self, k8s_obj_by_key, expected_password_match: str | None, expected_broker_url_match: str | None ): if expected_password_match is not None: redis_password_in_password_secret = self.get_redis_password_in_password_secret(k8s_obj_by_key) assert re.search(expected_password_match, redis_password_in_password_secret) else: assert REDIS_OBJECTS["SECRET_PASSWORD"] not in k8s_obj_by_key.keys() if expected_broker_url_match is not None: # assert redis broker url in secret broker_url_in_broker_url_secret = self.get_broker_url_in_broker_url_secret(k8s_obj_by_key) assert re.search(expected_broker_url_match, broker_url_in_broker_url_secret) else: assert REDIS_OBJECTS["SECRET_BROKER_URL"] not in k8s_obj_by_key.keys() def assert_broker_url_env( self, k8s_obj_by_key, expected_broker_url_secret_name=REDIS_OBJECTS["SECRET_BROKER_URL"][1] ): broker_url_secret_in_scheduler = self.get_broker_url_secret_in_deployment( k8s_obj_by_key, "StatefulSet", "worker" ) assert broker_url_secret_in_scheduler == expected_broker_url_secret_name broker_url_secret_in_worker = self.get_broker_url_secret_in_deployment( k8s_obj_by_key, "Deployment", "scheduler" ) assert broker_url_secret_in_worker == expected_broker_url_secret_name @pytest.mark.parametrize("executor", CELERY_EXECUTORS_PARAMS) def test_redis_by_chart_default(self, executor): k8s_objects = render_chart( RELEASE_NAME_REDIS, { "executor": executor, "networkPolicies": {"enabled": True}, "redis": {"enabled": True}, }, ) k8s_obj_by_key = prepare_k8s_lookup_dict(k8s_objects) created_redis_objects = SET_POSSIBLE_REDIS_OBJECT_KEYS & set(k8s_obj_by_key.keys()) assert created_redis_objects == SET_POSSIBLE_REDIS_OBJECT_KEYS self.assert_password_and_broker_url_secrets( k8s_obj_by_key, expected_password_match=r"\w+", expected_broker_url_match=rf"redis://:.+@{RELEASE_NAME_REDIS}-redis:6379/0", ) self.assert_broker_url_env(k8s_obj_by_key) @pytest.mark.parametrize("executor", CELERY_EXECUTORS_PARAMS) def test_redis_by_chart_password(self, executor): k8s_objects = render_chart( RELEASE_NAME_REDIS, { "executor": executor, "networkPolicies": {"enabled": True}, "redis": {"enabled": True, "password": "test-redis-password!@#$%^&*()_+"}, }, ) k8s_obj_by_key = prepare_k8s_lookup_dict(k8s_objects) created_redis_objects = SET_POSSIBLE_REDIS_OBJECT_KEYS & set(k8s_obj_by_key.keys()) assert created_redis_objects == SET_POSSIBLE_REDIS_OBJECT_KEYS self.assert_password_and_broker_url_secrets( k8s_obj_by_key, expected_password_match="test-redis-password", expected_broker_url_match=re.escape( "redis://:test-redis-password%21%40%23$%25%5E&%2A%28%29_+@test-redis-redis:6379/0" ), ) self.assert_broker_url_env(k8s_obj_by_key) @pytest.mark.parametrize("executor", CELERY_EXECUTORS_PARAMS) def test_redis_by_chart_password_secret_name_missing_broker_url_secret_name(self, executor): with pytest.raises(CalledProcessError): render_chart( RELEASE_NAME_REDIS, { "executor": executor, "redis": { "enabled": True, "passwordSecretName": "test-redis-password-secret-name", }, }, ) @pytest.mark.parametrize("executor", CELERY_EXECUTORS_PARAMS) def test_redis_by_chart_password_secret_name(self, executor): expected_broker_url_secret_name = "test-redis-broker-url-secret-name" k8s_objects = render_chart( RELEASE_NAME_REDIS, { "executor": executor, "networkPolicies": {"enabled": True}, "data": {"brokerUrlSecretName": expected_broker_url_secret_name}, "redis": { "enabled": True, "passwordSecretName": "test-redis-password-secret-name", }, }, ) k8s_obj_by_key = prepare_k8s_lookup_dict(k8s_objects) created_redis_objects = SET_POSSIBLE_REDIS_OBJECT_KEYS & set(k8s_obj_by_key.keys()) assert created_redis_objects == SET_POSSIBLE_REDIS_OBJECT_KEYS - { REDIS_OBJECTS["SECRET_PASSWORD"], REDIS_OBJECTS["SECRET_BROKER_URL"], } self.assert_password_and_broker_url_secrets( k8s_obj_by_key, expected_password_match=None, expected_broker_url_match=None ) self.assert_broker_url_env(k8s_obj_by_key, expected_broker_url_secret_name) @pytest.mark.parametrize("executor", CELERY_EXECUTORS_PARAMS) def test_external_redis_broker_url(self, executor): k8s_objects = render_chart( RELEASE_NAME_REDIS, { "executor": executor, "networkPolicies": {"enabled": True}, "data": { "brokerUrl": "redis://redis-user:password@redis-host:6379/0", }, "redis": {"enabled": False}, }, ) k8s_obj_by_key = prepare_k8s_lookup_dict(k8s_objects) created_redis_objects = SET_POSSIBLE_REDIS_OBJECT_KEYS & set(k8s_obj_by_key.keys()) assert created_redis_objects == {REDIS_OBJECTS["SECRET_BROKER_URL"]} self.assert_password_and_broker_url_secrets( k8s_obj_by_key, expected_password_match=None, expected_broker_url_match="redis://redis-user:password@redis-host:6379/0", ) self.assert_broker_url_env(k8s_obj_by_key) @pytest.mark.parametrize("executor", CELERY_EXECUTORS_PARAMS) def test_external_redis_broker_url_secret_name(self, executor): expected_broker_url_secret_name = "redis-broker-url-secret-name" k8s_objects = render_chart( RELEASE_NAME_REDIS, { "executor": executor, "networkPolicies": {"enabled": True}, "data": {"brokerUrlSecretName": expected_broker_url_secret_name}, "redis": {"enabled": False}, }, ) k8s_obj_by_key = prepare_k8s_lookup_dict(k8s_objects) created_redis_objects = SET_POSSIBLE_REDIS_OBJECT_KEYS & set(k8s_obj_by_key.keys()) assert created_redis_objects == set() self.assert_password_and_broker_url_secrets( k8s_obj_by_key, expected_password_match=None, expected_broker_url_match=None ) self.assert_broker_url_env(k8s_obj_by_key, expected_broker_url_secret_name) def test_default_redis_secrets_created_with_non_celery_executor(self): # We want to make sure default redis secrets (if needed) are still # created during install, as they are marked "pre-install". # See note in templates/secrets/redis-secrets.yaml for more. docs = render_chart( values={"executor": "KubernetesExecutor"}, show_only=["templates/secrets/redis-secrets.yaml"] ) assert 2 == len(docs) def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "executor": "CeleryExecutor", "redis": { "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, }, }, show_only=["templates/redis/redis-statefulset.yaml"], ) assert "StatefulSet" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_redis_resources_are_configurable(self): docs = render_chart( values={ "redis": { "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, } }, }, show_only=["templates/redis/redis-statefulset.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) def test_redis_resources_are_not_added_by_default(self): docs = render_chart( show_only=["templates/redis/redis-statefulset.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} def test_should_set_correct_helm_hooks_weight(self): docs = render_chart( values={ "executor": "CeleryExecutor", }, show_only=["templates/secrets/redis-secrets.yaml"], ) annotations = jmespath.search("metadata.annotations", docs[0]) assert annotations["helm.sh/hook-weight"] == "0" def test_persistence_volume_annotations(self): docs = render_chart( values={"redis": {"persistence": {"annotations": {"foo": "bar"}}}}, show_only=["templates/redis/redis-statefulset.yaml"], ) assert {"foo": "bar"} == jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0])
13,716
40.441088
110
py
airflow
airflow-main/helm_tests/other/test_git_sync_scheduler.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestGitSyncSchedulerTest: """Test git sync scheduler.""" def test_should_add_dags_volume(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}}}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) # check that there is a volume and git-sync and scheduler container mount it assert len(jmespath.search("spec.template.spec.volumes[?name=='dags']", docs[0])) > 0 assert ( len( jmespath.search( "(spec.template.spec.containers[?name=='scheduler'].volumeMounts[])[?name=='dags']", docs[0], ) ) > 0 ) assert ( len( jmespath.search( "(spec.template.spec.containers[?name=='git-sync'].volumeMounts[])[?name=='dags']", docs[0], ) ) > 0 ) def test_validate_the_git_sync_container_spec(self): docs = render_chart( values={ "images": { "gitSync": { "repository": "test-registry/test-repo", "tag": "test-tag", "pullPolicy": "Always", } }, "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "wait": 66, "maxFailures": 70, "subPath": "path1/path2", "rev": "HEAD", "depth": 1, "repo": "https://github.com/apache/airflow.git", "branch": "test-branch", "sshKeySecret": None, "credentialsSecret": None, "knownHosts": None, }, "persistence": {"enabled": True}, }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert { "name": "git-sync-test", "securityContext": {"runAsUser": 65533}, "image": "test-registry/test-repo:test-tag", "imagePullPolicy": "Always", "env": [ {"name": "GIT_SYNC_REV", "value": "HEAD"}, {"name": "GIT_SYNC_BRANCH", "value": "test-branch"}, {"name": "GIT_SYNC_REPO", "value": "https://github.com/apache/airflow.git"}, {"name": "GIT_SYNC_DEPTH", "value": "1"}, {"name": "GIT_SYNC_ROOT", "value": "/git"}, {"name": "GIT_SYNC_DEST", "value": "repo"}, {"name": "GIT_SYNC_ADD_USER", "value": "true"}, {"name": "GIT_SYNC_WAIT", "value": "66"}, {"name": "GIT_SYNC_MAX_SYNC_FAILURES", "value": "70"}, ], "volumeMounts": [{"mountPath": "/git", "name": "dags"}], "resources": {}, } == jmespath.search("spec.template.spec.containers[1]", docs[0]) def test_validate_if_ssh_params_are_added(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "sshKeySecret": "ssh-secret", "knownHosts": None, "branch": "test-branch", } } }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"name": "GIT_SSH_KEY_FILE", "value": "/etc/git-secret/ssh"} in jmespath.search( "spec.template.spec.containers[1].env", docs[0] ) assert {"name": "GIT_SYNC_SSH", "value": "true"} in jmespath.search( "spec.template.spec.containers[1].env", docs[0] ) assert {"name": "GIT_KNOWN_HOSTS", "value": "false"} in jmespath.search( "spec.template.spec.containers[1].env", docs[0] ) assert { "name": "git-sync-ssh-key", "secret": {"secretName": "ssh-secret", "defaultMode": 288}, } in jmespath.search("spec.template.spec.volumes", docs[0]) def test_validate_sshkeysecret_not_added_when_persistence_is_enabled(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "sshKeySecret": "ssh-secret", "knownHosts": None, "branch": "test-branch", }, "persistence": {"enabled": True}, } }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "git-sync-ssh-key" not in jmespath.search("spec.template.spec.volumes[].name", docs[0]) def test_should_set_username_and_pass_env_variables(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "credentialsSecret": "user-pass-secret", "sshKeySecret": None, } } }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert { "name": "GIT_SYNC_USERNAME", "valueFrom": {"secretKeyRef": {"name": "user-pass-secret", "key": "GIT_SYNC_USERNAME"}}, } in jmespath.search("spec.template.spec.containers[1].env", docs[0]) assert { "name": "GIT_SYNC_PASSWORD", "valueFrom": {"secretKeyRef": {"name": "user-pass-secret", "key": "GIT_SYNC_PASSWORD"}}, } in jmespath.search("spec.template.spec.containers[1].env", docs[0]) def test_should_set_the_volume_claim_correctly_when_using_an_existing_claim(self): docs = render_chart( values={"dags": {"persistence": {"enabled": True, "existingClaim": "test-claim"}}}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"name": "dags", "persistentVolumeClaim": {"claimName": "test-claim"}} in jmespath.search( "spec.template.spec.volumes", docs[0] ) def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( values={ "executor": "CeleryExecutor", "scheduler": { "extraVolumes": [{"name": "test-volume-{{ .Chart.Name }}", "emptyDir": {}}], "extraVolumeMounts": [ {"name": "test-volume-{{ .Chart.Name }}", "mountPath": "/opt/test"} ], }, "dags": { "gitSync": { "enabled": True, } }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"name": "test-volume-airflow", "emptyDir": {}} in jmespath.search( "spec.template.spec.volumes", docs[0] ) assert {"name": "test-volume-airflow", "mountPath": "/opt/test"} in jmespath.search( "spec.template.spec.containers[0].volumeMounts", docs[0] ) def test_extra_volume_and_git_sync_extra_volume_mount(self): docs = render_chart( values={ "executor": "CeleryExecutor", "scheduler": { "extraVolumes": [{"name": "test-volume-{{ .Values.executor }}", "emptyDir": {}}], }, "dags": { "gitSync": { "enabled": True, "extraVolumeMounts": [ {"mountPath": "/opt/test", "name": "test-volume-{{ .Values.executor }}"} ], } }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"name": "test-volume-CeleryExecutor", "emptyDir": {}} in jmespath.search( "spec.template.spec.volumes", docs[0] ) assert {"mountPath": "/git", "name": "dags"} in jmespath.search( "spec.template.spec.containers[1].volumeMounts", docs[0] ) assert {"name": "test-volume-CeleryExecutor", "mountPath": "/opt/test"} in jmespath.search( "spec.template.spec.containers[1].volumeMounts", docs[0] ) def test_should_add_env(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "env": [{"name": "FOO", "value": "bar"}], } }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"name": "FOO", "value": "bar"} in jmespath.search( "spec.template.spec.containers[1].env", docs[0] ) def test_resources_are_configurable(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, }, }, }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[1].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0])
11,088
38.888489
110
py
airflow
airflow-main/helm_tests/other/test_git_sync_webserver.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestGitSyncWebserver: """Test git sync webserver.""" def test_should_add_dags_volume_to_the_webserver_if_git_sync_and_persistence_is_enabled(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) def test_should_add_dags_volume_to_the_webserver_if_git_sync_is_enabled_and_persistence_is_disabled(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": False}}, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) def test_should_add_git_sync_container_to_webserver_if_persistence_is_not_enabled_but_git_sync_is(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": { "gitSync": {"enabled": True, "containerName": "git-sync"}, "persistence": {"enabled": False}, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "git-sync" == jmespath.search("spec.template.spec.containers[1].name", docs[0]) def test_should_have_service_account_defined(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "release-name-airflow-webserver" == jmespath.search( "spec.template.spec.serviceAccountName", docs[0] ) @pytest.mark.parametrize( "airflow_version, exclude_webserver", [ ("2.0.0", True), ("2.0.2", True), ("1.10.14", False), ("1.9.0", False), ("2.1.0", True), ], ) def test_git_sync_with_different_airflow_versions(self, airflow_version, exclude_webserver): """ If Airflow >= 2.0.0 - git sync related containers, volume mounts & volumes are not created. """ docs = render_chart( values={ "airflowVersion": airflow_version, "dags": { "gitSync": { "enabled": True, }, "persistence": {"enabled": False}, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) containers_names = [ container["name"] for container in jmespath.search("spec.template.spec.containers", docs[0]) ] volume_mount_names = [ vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] volume_names = [volume["name"] for volume in jmespath.search("spec.template.spec.volumes", docs[0])] if exclude_webserver: assert "git-sync" not in containers_names assert "dags" not in volume_mount_names assert "dags" not in volume_names else: assert "git-sync" in containers_names assert "dags" in volume_mount_names assert "dags" in volume_names def test_should_add_env(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": { "gitSync": { "enabled": True, "env": [{"name": "FOO", "value": "bar"}], } }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert {"name": "FOO", "value": "bar"} in jmespath.search( "spec.template.spec.containers[1].env", docs[0] ) def test_resources_are_configurable(self): docs = render_chart( values={ "airflowVersion": "1.10.14", "dags": { "gitSync": { "enabled": True, "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, }, }, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[1].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0]) def test_validate_sshkeysecret_not_added_when_persistence_is_enabled(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "sshKeySecret": "ssh-secret", "knownHosts": None, "branch": "test-branch", }, "persistence": {"enabled": True}, } }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "git-sync-ssh-key" not in jmespath.search("spec.template.spec.volumes[].name", docs[0])
6,797
37.191011
110
py
airflow
airflow-main/helm_tests/other/test_dags_persistent_volume_claim.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestDagsPersistentVolumeClaim: """Tests DAGs PVC.""" def test_should_not_generate_a_document_if_persistence_is_disabled(self): docs = render_chart( values={"dags": {"persistence": {"enabled": False}}}, show_only=["templates/dags-persistent-volume-claim.yaml"], ) assert 0 == len(docs) def test_should_not_generate_a_document_when_using_an_existing_claim(self): docs = render_chart( values={"dags": {"persistence": {"enabled": True, "existingClaim": "test-claim"}}}, show_only=["templates/dags-persistent-volume-claim.yaml"], ) assert 0 == len(docs) def test_should_generate_a_document_if_persistence_is_enabled_and_not_using_an_existing_claim(self): docs = render_chart( values={"dags": {"persistence": {"enabled": True, "existingClaim": None}}}, show_only=["templates/dags-persistent-volume-claim.yaml"], ) assert 1 == len(docs) def test_should_set_pvc_details_correctly(self): docs = render_chart( values={ "dags": { "persistence": { "enabled": True, "size": "1G", "existingClaim": None, "storageClassName": "MyStorageClass", "accessMode": "ReadWriteMany", } } }, show_only=["templates/dags-persistent-volume-claim.yaml"], ) assert { "accessModes": ["ReadWriteMany"], "resources": {"requests": {"storage": "1G"}}, "storageClassName": "MyStorageClass", } == jmespath.search("spec", docs[0]) def test_single_annotation(self): docs = render_chart( values={ "dags": { "persistence": { "enabled": True, "size": "1G", "existingClaim": None, "storageClassName": "MyStorageClass", "accessMode": "ReadWriteMany", "annotations": {"key": "value"}, } } }, show_only=["templates/dags-persistent-volume-claim.yaml"], ) annotations = jmespath.search("metadata.annotations", docs[0]) assert "value" == annotations.get("key") def test_multiple_annotations(self): docs = render_chart( values={ "dags": { "persistence": { "enabled": True, "size": "1G", "existingClaim": None, "storageClassName": "MyStorageClass", "accessMode": "ReadWriteMany", "annotations": {"key": "value", "key-two": "value-two"}, } } }, show_only=["templates/dags-persistent-volume-claim.yaml"], ) annotations = jmespath.search("metadata.annotations", docs[0]) assert "value" == annotations.get("key") assert "value-two" == annotations.get("key-two")
4,170
35.911504
104
py
airflow
airflow-main/helm_tests/other/test_flower.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestFlowerDeployment: """Tests flower deployment.""" @pytest.mark.parametrize( "executor,flower_enabled,created", [ ("CeleryExecutor", False, False), ("CeleryKubernetesExecutor", False, False), ("KubernetesExecutor", False, False), ("CeleryExecutor", True, True), ("CeleryKubernetesExecutor", True, True), ("KubernetesExecutor", True, False), ], ) def test_create_flower(self, executor, flower_enabled, created): docs = render_chart( values={"executor": executor, "flower": {"enabled": flower_enabled}}, show_only=["templates/flower/flower-deployment.yaml"], ) assert bool(docs) is created if created: assert "release-name-flower" == jmespath.search("metadata.name", docs[0]) assert "flower" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", [(8, 10), (10, 8), (8, None), (None, 10), (None, None)], ) def test_revision_history_limit(self, revision_history_limit, global_revision_history_limit): values = { "flower": { "enabled": True, } } if revision_history_limit: values["flower"]["revisionHistoryLimit"] = revision_history_limit if global_revision_history_limit: values["revisionHistoryLimit"] = global_revision_history_limit docs = render_chart( values=values, show_only=["templates/flower/flower-deployment.yaml"], ) expected_result = revision_history_limit if revision_history_limit else global_revision_history_limit assert jmespath.search("spec.revisionHistoryLimit", docs[0]) == expected_result @pytest.mark.parametrize( "airflow_version, expected_arg", [ ("2.0.2", "airflow celery flower"), ("1.10.14", "airflow flower"), ("1.9.0", "airflow flower"), ("2.1.0", "airflow celery flower"), ], ) def test_args_with_airflow_version(self, airflow_version, expected_arg): docs = render_chart( values={ "executor": "CeleryExecutor", "flower": {"enabled": True}, "airflowVersion": airflow_version, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == [ "bash", "-c", f"exec \\\n{expected_arg}", ] @pytest.mark.parametrize( "command, args", [ (None, None), (None, ["custom", "args"]), (["custom", "command"], None), (["custom", "command"], ["custom", "args"]), ], ) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={"flower": {"enabled": True, "command": command, "args": args}}, show_only=["templates/flower/flower-deployment.yaml"], ) assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={ "flower": { "enabled": True, "command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"], } }, show_only=["templates/flower/flower-deployment.yaml"], ) assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_should_create_flower_deployment_with_authorization(self): docs = render_chart( values={ "flower": {"enabled": True, "username": "flower", "password": "fl0w3r"}, "ports": {"flowerUI": 7777}, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert "AIRFLOW__CELERY__FLOWER_BASIC_AUTH" == jmespath.search( "spec.template.spec.containers[0].env[0].name", docs[0] ) assert ["curl", "--user", "$AIRFLOW__CELERY__FLOWER_BASIC_AUTH", "localhost:7777"] == jmespath.search( "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] ) assert ["curl", "--user", "$AIRFLOW__CELERY__FLOWER_BASIC_AUTH", "localhost:7777"] == jmespath.search( "spec.template.spec.containers[0].readinessProbe.exec.command", docs[0] ) def test_should_create_flower_deployment_without_authorization(self): docs = render_chart( values={ "flower": {"enabled": True}, "ports": {"flowerUI": 7777}, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert "AIRFLOW__CORE__FERNET_KEY" == jmespath.search( "spec.template.spec.containers[0].env[0].name", docs[0] ) assert ["curl", "localhost:7777"] == jmespath.search( "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] ) assert ["curl", "localhost:7777"] == jmespath.search( "spec.template.spec.containers[0].readinessProbe.exec.command", docs[0] ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "flower": { "enabled": True, "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, }, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert "Deployment" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_flower_resources_are_configurable(self): docs = render_chart( values={ "flower": { "enabled": True, "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, }, }, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) def test_flower_resources_are_not_added_by_default(self): docs = render_chart( values={"flower": {"enabled": True}}, show_only=["templates/flower/flower-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} def test_should_add_extra_containers(self): docs = render_chart( values={ "flower": { "enabled": True, "extraContainers": [ {"name": "test-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert { "name": "test-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( values={ "flower": { "enabled": True, "extraVolumes": [{"name": "myvolume-{{ .Chart.Name }}", "emptyDir": {}}], "extraVolumeMounts": [{"name": "myvolume-{{ .Chart.Name }}", "mountPath": "/opt/test"}], }, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert {"name": "myvolume-airflow", "emptyDir": {}} in jmespath.search( "spec.template.spec.volumes", docs[0] ) assert {"name": "myvolume-airflow", "mountPath": "/opt/test"} in jmespath.search( "spec.template.spec.containers[0].volumeMounts", docs[0] ) def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( values={ "flower": { "enabled": True, }, "volumes": [{"name": "myvolume", "emptyDir": {}}], "volumeMounts": [{"name": "myvolume", "mountPath": "/opt/test"}], }, show_only=["templates/flower/flower-deployment.yaml"], ) assert {"name": "myvolume", "emptyDir": {}} in jmespath.search("spec.template.spec.volumes", docs[0]) assert {"name": "myvolume", "mountPath": "/opt/test"} in jmespath.search( "spec.template.spec.containers[0].volumeMounts", docs[0] ) def test_should_add_extraEnvs(self): docs = render_chart( values={ "flower": { "enabled": True, "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], } }, show_only=["templates/flower/flower-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "flower": { "enabled": True, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert "test_label" in jmespath.search("spec.template.metadata.labels", docs[0]) assert jmespath.search("spec.template.metadata.labels", docs[0])["test_label"] == "test_label_value" def test_no_airflow_local_settings(self): docs = render_chart( values={"flower": {"enabled": True}, "airflowLocalSettings": None}, show_only=["templates/flower/flower-deployment.yaml"], ) volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts) def test_airflow_local_settings(self): docs = render_chart( values={"flower": {"enabled": True}, "airflowLocalSettings": "# Well hello!"}, show_only=["templates/flower/flower-deployment.yaml"], ) volume_mount = { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } assert volume_mount in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "flower": { "enabled": True, "annotations": {"test_annotation": "test_annotation_value"}, }, }, show_only=["templates/flower/flower-deployment.yaml"], ) assert "annotations" in jmespath.search("metadata", docs[0]) assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value" class TestFlowerService: """Tests flower service.""" @pytest.mark.parametrize( "executor,flower_enabled,created", [ ("CeleryExecutor", False, False), ("CeleryKubernetesExecutor", False, False), ("KubernetesExecutor", False, False), ("CeleryExecutor", True, True), ("CeleryKubernetesExecutor", True, True), ("KubernetesExecutor", True, False), ], ) def test_create_flower(self, executor, flower_enabled, created): docs = render_chart( values={"executor": executor, "flower": {"enabled": flower_enabled}}, show_only=["templates/flower/flower-service.yaml"], ) assert bool(docs) is created if created: assert "release-name-flower" == jmespath.search("metadata.name", docs[0]) def test_default_service(self): docs = render_chart( values={"flower": {"enabled": True}}, show_only=["templates/flower/flower-service.yaml"], ) assert "release-name-flower" == jmespath.search("metadata.name", docs[0]) assert jmespath.search("metadata.annotations", docs[0]) is None assert {"tier": "airflow", "component": "flower", "release": "release-name"} == jmespath.search( "spec.selector", docs[0] ) assert "ClusterIP" == jmespath.search("spec.type", docs[0]) assert {"name": "flower-ui", "port": 5555} in jmespath.search("spec.ports", docs[0]) def test_overrides(self): docs = render_chart( values={ "ports": {"flowerUI": 9000}, "flower": { "enabled": True, "service": { "type": "LoadBalancer", "loadBalancerIP": "127.0.0.1", "annotations": {"foo": "bar"}, "loadBalancerSourceRanges": ["10.123.0.0/16"], }, }, }, show_only=["templates/flower/flower-service.yaml"], ) assert {"foo": "bar"} == jmespath.search("metadata.annotations", docs[0]) assert "LoadBalancer" == jmespath.search("spec.type", docs[0]) assert {"name": "flower-ui", "port": 9000} in jmespath.search("spec.ports", docs[0]) assert "127.0.0.1" == jmespath.search("spec.loadBalancerIP", docs[0]) assert ["10.123.0.0/16"] == jmespath.search("spec.loadBalancerSourceRanges", docs[0]) @pytest.mark.parametrize( "ports, expected_ports", [ ([{"port": 8888}], [{"port": 8888}]), # name is optional with a single port ( [{"name": "{{ .Release.Name }}", "protocol": "UDP", "port": "{{ .Values.ports.flowerUI }}"}], [{"name": "release-name", "protocol": "UDP", "port": 5555}], ), ([{"name": "only_sidecar", "port": "{{ int 9000 }}"}], [{"name": "only_sidecar", "port": 9000}]), ( [ {"name": "flower-ui", "port": "{{ .Values.ports.flowerUI }}"}, {"name": "sidecar", "port": 80, "targetPort": "sidecar"}, ], [ {"name": "flower-ui", "port": 5555}, {"name": "sidecar", "port": 80, "targetPort": "sidecar"}, ], ), ], ) def test_ports_overrides(self, ports, expected_ports): docs = render_chart( values={ "flower": {"enabled": True, "service": {"ports": ports}}, }, show_only=["templates/flower/flower-service.yaml"], ) assert expected_ports == jmespath.search("spec.ports", docs[0]) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "flower": { "enabled": True, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/flower/flower-service.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" class TestFlowerNetworkPolicy: """Tests flower network policy.""" def test_off_by_default(self): docs = render_chart( show_only=["templates/flower/flower-networkpolicy.yaml"], ) assert 0 == len(docs) def test_defaults(self): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "flower": { "enabled": True, "networkPolicy": { "ingress": { "from": [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] } }, }, }, show_only=["templates/flower/flower-networkpolicy.yaml"], ) assert 1 == len(docs) assert "NetworkPolicy" == docs[0]["kind"] assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( "spec.ingress[0].from", docs[0] ) assert [{"port": 5555}] == jmespath.search("spec.ingress[0].ports", docs[0]) @pytest.mark.parametrize( "ports, expected_ports", [ ([{"port": "sidecar"}], [{"port": "sidecar"}]), ( [ {"port": "{{ .Values.ports.flowerUI }}"}, {"port": 80}, ], [ {"port": 5555}, {"port": 80}, ], ), ], ) def test_ports_overrides(self, ports, expected_ports): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "flower": { "enabled": True, "networkPolicy": { "ingress": { "from": [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}], "ports": ports, } }, }, }, show_only=["templates/flower/flower-networkpolicy.yaml"], ) assert expected_ports == jmespath.search("spec.ingress[0].ports", docs[0]) def test_deprecated_from_param(self): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "flower": { "enabled": True, "extraNetworkPolicies": [ {"namespaceSelector": {"matchLabels": {"release": "myrelease"}}} ], }, }, show_only=["templates/flower/flower-networkpolicy.yaml"], ) assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( "spec.ingress[0].from", docs[0] ) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "flower": { "enabled": True, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/flower/flower-networkpolicy.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" class TestFlowerServiceAccount: """Tests flower service account.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "flower": { "enabled": True, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/flower/flower-serviceaccount.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
22,498
37.791379
110
py
airflow
airflow-main/helm_tests/other/test_pgbouncer.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import base64 import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestPgbouncer: """Tests PgBouncer.""" @pytest.mark.parametrize("yaml_filename", ["pgbouncer-deployment", "pgbouncer-service"]) def test_pgbouncer_resources_not_created_by_default(self, yaml_filename): docs = render_chart( show_only=[f"templates/pgbouncer/{yaml_filename}.yaml"], ) assert docs == [] def test_should_create_pgbouncer(self): docs = render_chart( values={"pgbouncer": {"enabled": True}}, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert "Deployment" == jmespath.search("kind", docs[0]) assert "release-name-pgbouncer" == jmespath.search("metadata.name", docs[0]) assert "pgbouncer" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) def test_should_create_pgbouncer_service(self): docs = render_chart( values={"pgbouncer": {"enabled": True}}, show_only=["templates/pgbouncer/pgbouncer-service.yaml"], ) assert "Service" == jmespath.search("kind", docs[0]) assert "release-name-pgbouncer" == jmespath.search("metadata.name", docs[0]) assert "true" == jmespath.search('metadata.annotations."prometheus.io/scrape"', docs[0]) assert "9127" == jmespath.search('metadata.annotations."prometheus.io/port"', docs[0]) assert {"prometheus.io/scrape": "true", "prometheus.io/port": "9127"} == jmespath.search( "metadata.annotations", docs[0] ) assert {"name": "pgbouncer", "protocol": "TCP", "port": 6543} in jmespath.search( "spec.ports", docs[0] ) assert {"name": "pgbouncer-metrics", "protocol": "TCP", "port": 9127} in jmespath.search( "spec.ports", docs[0] ) def test_pgbouncer_service_with_custom_ports(self): docs = render_chart( values={ "pgbouncer": {"enabled": True}, "ports": {"pgbouncer": 1111, "pgbouncerScrape": 2222}, }, show_only=["templates/pgbouncer/pgbouncer-service.yaml"], ) assert "true" == jmespath.search('metadata.annotations."prometheus.io/scrape"', docs[0]) assert "2222" == jmespath.search('metadata.annotations."prometheus.io/port"', docs[0]) assert {"name": "pgbouncer", "protocol": "TCP", "port": 1111} in jmespath.search( "spec.ports", docs[0] ) assert {"name": "pgbouncer-metrics", "protocol": "TCP", "port": 2222} in jmespath.search( "spec.ports", docs[0] ) def test_pgbouncer_service_extra_annotations(self): docs = render_chart( values={ "pgbouncer": {"enabled": True, "service": {"extraAnnotations": {"foo": "bar"}}}, }, show_only=["templates/pgbouncer/pgbouncer-service.yaml"], ) assert { "prometheus.io/scrape": "true", "prometheus.io/port": "9127", "foo": "bar", } == jmespath.search("metadata.annotations", docs[0]) @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", [(8, 10), (10, 8), (8, None), (None, 10), (None, None)], ) def test_revision_history_limit(self, revision_history_limit, global_revision_history_limit): values = { "pgbouncer": { "enabled": True, } } if revision_history_limit: values["pgbouncer"]["revisionHistoryLimit"] = revision_history_limit if global_revision_history_limit: values["revisionHistoryLimit"] = global_revision_history_limit docs = render_chart( values=values, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) expected_result = revision_history_limit if revision_history_limit else global_revision_history_limit assert jmespath.search("spec.revisionHistoryLimit", docs[0]) == expected_result def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "pgbouncer": { "enabled": True, "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, } }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_no_existing_secret(self): docs = render_chart( "test-pgbouncer-config", values={ "pgbouncer": {"enabled": True}, }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert { "name": "pgbouncer-config", "secret": {"secretName": "test-pgbouncer-config-pgbouncer-config"}, } == jmespath.search("spec.template.spec.volumes[0]", docs[0]) def test_existing_secret(self): docs = render_chart( "test-pgbouncer-config", values={ "pgbouncer": {"enabled": True, "configSecretName": "pgbouncer-config-secret"}, }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert { "name": "pgbouncer-config", "secret": {"secretName": "pgbouncer-config-secret"}, } == jmespath.search("spec.template.spec.volumes[0]", docs[0]) def test_pgbouncer_resources_are_configurable(self): docs = render_chart( values={ "pgbouncer": { "enabled": True, "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, }, }, }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) def test_pgbouncer_resources_are_not_added_by_default(self): docs = render_chart( values={ "pgbouncer": {"enabled": True}, }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} def test_metrics_exporter_resources(self): docs = render_chart( values={ "pgbouncer": { "enabled": True, "metricsExporterSidecar": { "resources": { "requests": {"memory": "2Gi", "cpu": "1"}, "limits": {"memory": "3Gi", "cpu": "2"}, } }, } }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert { "limits": { "cpu": "2", "memory": "3Gi", }, "requests": { "cpu": "1", "memory": "2Gi", }, } == jmespath.search("spec.template.spec.containers[1].resources", docs[0]) def test_default_command_and_args(self): docs = render_chart( values={"pgbouncer": {"enabled": True}}, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert ["pgbouncer", "-u", "nobody", "/etc/pgbouncer/pgbouncer.ini"] == jmespath.search( "spec.template.spec.containers[0].command", docs[0] ) assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) is None @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={"pgbouncer": {"enabled": True, "command": command, "args": args}}, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={ "pgbouncer": { "enabled": True, "command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"], } }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( values={ "pgbouncer": { "enabled": True, "extraVolumes": [ { "name": "pgbouncer-client-certificates-{{ .Chart.Name }}", "secret": {"secretName": "pgbouncer-client-tls-certificate"}, } ], "extraVolumeMounts": [ { "name": "pgbouncer-client-certificates-{{ .Chart.Name }}", "mountPath": "/etc/pgbouncer/certs", } ], }, }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert "pgbouncer-client-certificates-airflow" in jmespath.search( "spec.template.spec.volumes[*].name", docs[0] ) assert "pgbouncer-client-certificates-airflow" in jmespath.search( "spec.template.spec.containers[0].volumeMounts[*].name", docs[0] ) def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( values={ "pgbouncer": { "enabled": True, }, "volumes": [ { "name": "pgbouncer-client-certificates", "secret": {"secretName": "pgbouncer-client-tls-certificate"}, } ], "volumeMounts": [ {"name": "pgbouncer-client-certificates", "mountPath": "/etc/pgbouncer/certs"} ], }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert "pgbouncer-client-certificates" in jmespath.search( "spec.template.spec.volumes[*].name", docs[0] ) assert "pgbouncer-client-certificates" in jmespath.search( "spec.template.spec.containers[0].volumeMounts[*].name", docs[0] ) def test_pgbouncer_replicas_are_configurable(self): docs = render_chart( values={ "pgbouncer": { "enabled": True, "replicas": 2, }, }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert 2 == jmespath.search("spec.replicas", docs[0]) def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "pgbouncer": { "enabled": True, "annotations": {"test_annotation": "test_annotation_value"}, }, }, show_only=["templates/pgbouncer/pgbouncer-deployment.yaml"], ) assert "annotations" in jmespath.search("metadata", docs[0]) assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value" class TestPgbouncerConfig: """Tests PgBouncer config.""" def test_config_not_created_by_default(self): docs = render_chart( show_only=["templates/secrets/pgbouncer-config-secret.yaml"], ) assert docs == [] def _get_pgbouncer_ini(self, values: dict) -> str: docs = render_chart( values=values, show_only=["templates/secrets/pgbouncer-config-secret.yaml"], ) encoded_ini = jmespath.search('data."pgbouncer.ini"', docs[0]) return base64.b64decode(encoded_ini).decode() def test_databases_default(self): ini = self._get_pgbouncer_ini({"pgbouncer": {"enabled": True}}) assert ( "release-name-metadata = host=release-name-postgresql.default dbname=postgres port=5432" " pool_size=10" in ini ) assert ( "release-name-result-backend = host=release-name-postgresql.default dbname=postgres port=5432" " pool_size=5" in ini ) def test_databases_override(self): values = { "pgbouncer": { "enabled": True, "metadataPoolSize": 12, "resultBackendPoolSize": 7, "extraIniMetadata": "reserve_pool = 5", "extraIniResultBackend": "reserve_pool = 3", }, "data": { "metadataConnection": {"host": "meta_host", "db": "meta_db", "port": 1111}, "resultBackendConnection": { "protocol": "postgresql", "host": "rb_host", "user": "someuser", "pass": "someuser", "db": "rb_db", "port": 2222, "sslmode": "disabled", }, }, } ini = self._get_pgbouncer_ini(values) assert ( "release-name-metadata = host=meta_host dbname=meta_db port=1111 pool_size=12 reserve_pool = 5" in ini ) assert ( "release-name-result-backend = host=rb_host dbname=rb_db port=2222 pool_size=7 reserve_pool = 3" in ini ) def test_config_defaults(self): ini = self._get_pgbouncer_ini({"pgbouncer": {"enabled": True}}) assert "listen_port = 6543" in ini assert "stats_users = postgres" in ini assert "max_client_conn = 100" in ini assert "verbose = 0" in ini assert "log_disconnections = 0" in ini assert "log_connections = 0" in ini assert "server_tls_sslmode = prefer" in ini assert "server_tls_ciphers = normal" in ini assert "server_tls_ca_file = " not in ini assert "server_tls_cert_file = " not in ini assert "server_tls_key_file = " not in ini def test_config_overrides(self): values = { "pgbouncer": { "enabled": True, "maxClientConn": 111, "verbose": 2, "logDisconnections": 1, "logConnections": 1, "sslmode": "verify-full", "ciphers": "secure", }, "ports": {"pgbouncer": 7777}, "data": {"metadataConnection": {"user": "someuser"}}, } ini = self._get_pgbouncer_ini(values) assert "listen_port = 7777" in ini assert "stats_users = someuser" in ini assert "max_client_conn = 111" in ini assert "verbose = 2" in ini assert "log_disconnections = 1" in ini assert "log_connections = 1" in ini assert "server_tls_sslmode = verify-full" in ini assert "server_tls_ciphers = secure" in ini def test_auth_type_file_defaults(self): values = { "pgbouncer": {"enabled": True}, "ports": {"pgbouncer": 7777}, "data": {"metadataConnection": {"user": "someuser"}}, } ini = self._get_pgbouncer_ini(values) assert "auth_type = md5" in ini assert "auth_file = /etc/pgbouncer/users.txt" in ini def test_auth_type_file_overrides(self): values = { "pgbouncer": {"enabled": True, "auth_type": "any", "auth_file": "/home/auth.txt"}, "ports": {"pgbouncer": 7777}, "data": {"metadataConnection": {"user": "someuser"}}, } ini = self._get_pgbouncer_ini(values) assert "auth_type = any" in ini assert "auth_file = /home/auth.txt" in ini def test_ssl_defaults_dont_create_cert_secret(self): docs = render_chart( values={"pgbouncer": {"enabled": True}}, show_only=["templates/secrets/pgbouncer-certificates-secret.yaml"], ) assert docs == [] def test_ssl_config(self): values = { "pgbouncer": {"enabled": True, "ssl": {"ca": "someca", "cert": "somecert", "key": "somekey"}} } ini = self._get_pgbouncer_ini(values) assert "server_tls_ca_file = /etc/pgbouncer/root.crt" in ini assert "server_tls_cert_file = /etc/pgbouncer/server.crt" in ini assert "server_tls_key_file = /etc/pgbouncer/server.key" in ini docs = render_chart( values=values, show_only=["templates/secrets/pgbouncer-certificates-secret.yaml"], ) for key, expected in [("root.crt", "someca"), ("server.crt", "somecert"), ("server.key", "somekey")]: encoded = jmespath.search(f'data."{key}"', docs[0]) value = base64.b64decode(encoded).decode() assert expected == value def test_extra_ini_configs(self): values = {"pgbouncer": {"enabled": True, "extraIni": "server_round_robin = 1\nstats_period = 30"}} ini = self._get_pgbouncer_ini(values) assert "server_round_robin = 1" in ini assert "stats_period = 30" in ini class TestPgbouncerExporter: """Tests PgBouncer exporter.""" def test_secret_not_created_by_default(self): docs = render_chart( show_only=["templates/secrets/pgbouncer-stats-secret.yaml"], ) assert 0 == len(docs) def _get_connection(self, values: dict) -> str: docs = render_chart( values=values, show_only=["templates/secrets/pgbouncer-stats-secret.yaml"], ) encoded_connection = jmespath.search("data.connection", docs[0]) return base64.b64decode(encoded_connection).decode() def test_default_exporter_secret(self): connection = self._get_connection({"pgbouncer": {"enabled": True}}) assert "postgresql://postgres:[email protected]:6543/pgbouncer?sslmode=disable" == connection def test_exporter_secret_with_overrides(self): connection = self._get_connection( { "pgbouncer": {"enabled": True, "metricsExporterSidecar": {"sslmode": "require"}}, "data": { "metadataConnection": { "user": "username@123123", "pass": "password@!@#$^&*()", "host": "somehost", "port": 7777, "db": "somedb", }, }, "ports": {"pgbouncer": 1111}, } ) assert ( "postgresql://username%40123123:password%40%21%40%23$%5E&%2A%28%[email protected]:1111" "/pgbouncer?sslmode=require" == connection )
21,941
37.494737
110
py
airflow
airflow-main/helm_tests/other/test_resource_quota.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestResourceQuota: """Tests resource quota.""" def test_resource_quota_template(self): docs = render_chart( values={ "quotas": { "configmaps": "10", "persistentvolumeclaims": "4", "pods": "4", "replicationcontrollers": "20", "secrets": "10", "services": "10", } }, show_only=["templates/resourcequota.yaml"], ) assert "ResourceQuota" == jmespath.search("kind", docs[0]) assert "20" == jmespath.search("spec.hard.replicationcontrollers", docs[0]) def test_resource_quota_are_not_added_by_default(self): docs = render_chart( show_only=["templates/resourcequota.yaml"], ) assert docs == []
1,765
35.040816
83
py
airflow
airflow-main/helm_tests/other/test_git_sync_triggerer.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestGitSyncTriggerer: """Test git sync triggerer.""" def test_validate_sshkeysecret_not_added_when_persistence_is_enabled(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "sshKeySecret": "ssh-secret", "knownHosts": None, "branch": "test-branch", }, "persistence": {"enabled": True}, } }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert "git-sync-ssh-key" not in jmespath.search("spec.template.spec.volumes[].name", docs[0])
1,697
37.590909
102
py
airflow
airflow-main/helm_tests/other/test_git_sync_worker.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestGitSyncWorker: """Test git sync worker.""" def test_should_add_dags_volume_to_the_worker_if_git_sync_and_persistence_is_enabled(self): docs = render_chart( values={ "executor": "CeleryExecutor", "dags": {"persistence": {"enabled": True}, "gitSync": {"enabled": True}}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "config" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) def test_should_add_dags_volume_to_the_worker_if_git_sync_is_enabled_and_persistence_is_disabled(self): docs = render_chart( values={ "executor": "CeleryExecutor", "dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": False}}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "config" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) def test_should_add_git_sync_container_to_worker_if_persistence_is_not_enabled_but_git_sync_is(self): docs = render_chart( values={ "executor": "CeleryExecutor", "dags": { "gitSync": {"enabled": True, "containerName": "git-sync"}, "persistence": {"enabled": False}, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "git-sync" == jmespath.search("spec.template.spec.containers[1].name", docs[0]) def test_should_not_add_sync_container_to_worker_if_git_sync_and_persistence_are_enabled(self): docs = render_chart( values={ "executor": "CeleryExecutor", "dags": { "gitSync": {"enabled": True, "containerName": "git-sync"}, "persistence": {"enabled": True}, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "git-sync" != jmespath.search("spec.template.spec.containers[1].name", docs[0]) def test_should_add_env(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "env": [{"name": "FOO", "value": "bar"}], } }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert {"name": "FOO", "value": "bar"} in jmespath.search( "spec.template.spec.containers[1].env", docs[0] ) def test_resources_are_configurable(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, }, }, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[1].resources.limits.memory", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[1].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[1].resources.requests.cpu", docs[0]) def test_validate_sshkeysecret_not_added_when_persistence_is_enabled(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "sshKeySecret": "ssh-secret", "knownHosts": None, "branch": "test-branch", }, "persistence": {"enabled": True}, } }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "git-sync-ssh-key" not in jmespath.search("spec.template.spec.volumes[].name", docs[0])
5,373
38.807407
110
py
airflow
airflow-main/helm_tests/other/test_keda.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestKeda: """Tests keda.""" def test_keda_disabled_by_default(self): """Disabled by default.""" docs = render_chart( values={}, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) assert docs == [] @pytest.mark.parametrize( "executor, is_created", [ ("CeleryExecutor", True), ("CeleryKubernetesExecutor", True), ], ) def test_keda_enabled(self, executor, is_created): """ScaledObject should only be created when set to enabled and executor is Celery or CeleryKubernetes. """ docs = render_chart( values={ "workers": {"keda": {"enabled": True}, "persistence": {"enabled": False}}, "executor": executor, }, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) if is_created: assert jmespath.search("metadata.name", docs[0]) == "release-name-worker" else: assert docs == [] @pytest.mark.parametrize("executor", ["CeleryExecutor", "CeleryKubernetesExecutor"]) def test_keda_advanced(self, executor): """Verify keda advanced config.""" expected_advanced = { "horizontalPodAutoscalerConfig": { "behavior": { "scaleDown": { "stabilizationWindowSeconds": 300, "policies": [{"type": "Percent", "value": 100, "periodSeconds": 15}], } } } } docs = render_chart( values={ "workers": { "keda": { "enabled": True, "advanced": expected_advanced, }, }, "executor": executor, }, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) assert jmespath.search("spec.advanced", docs[0]) == expected_advanced @staticmethod def build_query(executor, concurrency=16, queue=None): """Builds the query used by KEDA autoscaler to determine how many workers there should be.""" query = ( f"SELECT ceil(COUNT(*)::decimal / {concurrency}) " "FROM task_instance WHERE (state='running' OR state='queued')" ) if executor == "CeleryKubernetesExecutor": query += f" AND queue != '{queue or 'kubernetes'}'" return query @pytest.mark.parametrize( "executor,concurrency", [ ("CeleryExecutor", 8), ("CeleryExecutor", 16), ("CeleryKubernetesExecutor", 8), ("CeleryKubernetesExecutor", 16), ], ) def test_keda_concurrency(self, executor, concurrency): """Verify keda sql query uses configured concurrency.""" docs = render_chart( values={ "workers": {"keda": {"enabled": True}, "persistence": {"enabled": False}}, "executor": executor, "config": {"celery": {"worker_concurrency": concurrency}}, }, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) expected_query = self.build_query(executor=executor, concurrency=concurrency) assert jmespath.search("spec.triggers[0].metadata.query", docs[0]) == expected_query @pytest.mark.parametrize( "executor,queue,should_filter", [ ("CeleryExecutor", None, False), ("CeleryExecutor", "my_queue", False), ("CeleryKubernetesExecutor", None, True), ("CeleryKubernetesExecutor", "my_queue", True), ], ) def test_keda_query_kubernetes_queue(self, executor, queue, should_filter): """ Verify keda sql query ignores kubernetes queue when CKE is used. Sometimes a user might want to use a different queue name for k8s executor tasks, and we also verify here that we use the configured queue name in that case. """ values = { "workers": {"keda": {"enabled": True}, "persistence": {"enabled": False}}, "executor": executor, } if queue: values.update({"config": {"celery_kubernetes_executor": {"kubernetes_queue": queue}}}) docs = render_chart( values=values, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) expected_query = self.build_query(executor=executor, queue=queue) assert jmespath.search("spec.triggers[0].metadata.query", docs[0]) == expected_query @pytest.mark.parametrize( "enabled, kind", [ ("enabled", "StatefulSet"), ("not_enabled", "Deployment"), ], ) def test_persistence(self, enabled, kind): """If worker persistence is enabled, scaleTargetRef should be StatefulSet else Deployment.""" is_enabled = enabled == "enabled" docs = render_chart( values={ "workers": {"keda": {"enabled": True}, "persistence": {"enabled": is_enabled}}, "executor": "CeleryExecutor", }, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) assert jmespath.search("spec.scaleTargetRef.kind", docs[0]) == kind def test_default_keda_db_connection(self): """Verify default keda db connection.""" import base64 docs = render_chart( values={ "workers": {"keda": {"enabled": True}}, "executor": "CeleryExecutor", }, show_only=[ "templates/workers/worker-deployment.yaml", "templates/workers/worker-kedaautoscaler.yaml", "templates/secrets/metadata-connection-secret.yaml", ], ) worker_deployment = docs[0] keda_autoscaler = docs[1] metadata_connection_secret = docs[2] worker_container_env_vars = jmespath.search( "spec.template.spec.containers[?name=='worker'].env[].name", worker_deployment ) assert "AIRFLOW_CONN_AIRFLOW_DB" in worker_container_env_vars assert "KEDA_DB_CONN" not in worker_container_env_vars secret_data = jmespath.search("data", metadata_connection_secret) assert "connection" in secret_data.keys() assert "@release-name-postgresql" in base64.b64decode(secret_data["connection"]).decode() assert "kedaConnection" not in secret_data.keys() autoscaler_connection_env_var = jmespath.search( "spec.triggers[0].metadata.connectionFromEnv", keda_autoscaler ) assert autoscaler_connection_env_var == "AIRFLOW_CONN_AIRFLOW_DB" def test_default_keda_db_connection_pgbouncer_enabled(self): """Verify keda db connection when pgbouncer is enabled.""" import base64 docs = render_chart( values={ "workers": {"keda": {"enabled": True}}, "executor": "CeleryExecutor", "pgbouncer": {"enabled": True}, }, show_only=[ "templates/workers/worker-deployment.yaml", "templates/workers/worker-kedaautoscaler.yaml", "templates/secrets/metadata-connection-secret.yaml", ], ) worker_deployment = docs[0] keda_autoscaler = docs[1] metadata_connection_secret = docs[2] worker_container_env_vars = jmespath.search( "spec.template.spec.containers[?name=='worker'].env[].name", worker_deployment ) assert "AIRFLOW_CONN_AIRFLOW_DB" in worker_container_env_vars assert "KEDA_DB_CONN" not in worker_container_env_vars secret_data = jmespath.search("data", metadata_connection_secret) assert "connection" in secret_data.keys() assert "@release-name-pgbouncer" in base64.b64decode(secret_data["connection"]).decode() assert "kedaConnection" not in secret_data.keys() autoscaler_connection_env_var = jmespath.search( "spec.triggers[0].metadata.connectionFromEnv", keda_autoscaler ) assert autoscaler_connection_env_var == "AIRFLOW_CONN_AIRFLOW_DB" def test_default_keda_db_connection_pgbouncer_enabled_usePgbouncer_false(self): """Verify keda db connection when pgbouncer is enabled and usePgbouncer is false.""" import base64 docs = render_chart( values={ "workers": {"keda": {"enabled": True, "usePgbouncer": False}}, "executor": "CeleryExecutor", "pgbouncer": {"enabled": True}, }, show_only=[ "templates/workers/worker-deployment.yaml", "templates/workers/worker-kedaautoscaler.yaml", "templates/secrets/metadata-connection-secret.yaml", ], ) worker_deployment = docs[0] keda_autoscaler = docs[1] metadata_connection_secret = docs[2] worker_container_env_vars = jmespath.search( "spec.template.spec.containers[?name=='worker'].env[].name", worker_deployment ) assert "AIRFLOW_CONN_AIRFLOW_DB" in worker_container_env_vars assert "KEDA_DB_CONN" in worker_container_env_vars secret_data = jmespath.search("data", metadata_connection_secret) assert "connection" in secret_data.keys() assert "@release-name-pgbouncer" in base64.b64decode(secret_data["connection"]).decode() assert "kedaConnection" in secret_data.keys() assert "@release-name-postgresql" in base64.b64decode(secret_data["kedaConnection"]).decode() autoscaler_connection_env_var = jmespath.search( "spec.triggers[0].metadata.connectionFromEnv", keda_autoscaler ) assert autoscaler_connection_env_var == "KEDA_DB_CONN"
10,931
39.043956
101
py
airflow
airflow-main/helm_tests/other/__init__.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License.
785
45.235294
62
py
airflow
airflow-main/helm_tests/other/test_limit_ranges.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestLimitRanges: """Tests limit ranges.""" def test_limit_ranges_template(self): docs = render_chart( values={"limits": [{"max": {"cpu": "500m"}, "min": {"min": "200m"}, "type": "Container"}]}, show_only=["templates/limitrange.yaml"], ) assert "LimitRange" == jmespath.search("kind", docs[0]) assert "500m" == jmespath.search("spec.limits[0].max.cpu", docs[0]) def test_limit_ranges_are_not_added_by_default(self): docs = render_chart(show_only=["templates/limitrange.yaml"]) assert docs == []
1,488
38.184211
103
py
airflow
airflow-main/helm_tests/other/test_statsd.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest import yaml from tests.charts.helm_template_generator import render_chart class TestStatsd: """Tests statsd.""" def test_should_create_statsd_default(self): docs = render_chart(show_only=["templates/statsd/statsd-deployment.yaml"]) assert "release-name-statsd" == jmespath.search("metadata.name", docs[0]) assert "statsd" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) assert {"name": "config", "configMap": {"name": "release-name-statsd"}} in jmespath.search( "spec.template.spec.volumes", docs[0] ) assert { "name": "config", "mountPath": "/etc/statsd-exporter/mappings.yml", "subPath": "mappings.yml", } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) default_args = ["--statsd.mapping-config=/etc/statsd-exporter/mappings.yml"] assert default_args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_should_add_volume_and_volume_mount_when_exist_extra_mappings(self): extra_mapping = { "match": "airflow.pool.queued_slots.*", "name": "airflow_pool_queued_slots", "labels": {"pool": "$1"}, } docs = render_chart( values={"statsd": {"enabled": True, "extraMappings": [extra_mapping]}}, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert {"name": "config", "configMap": {"name": "release-name-statsd"}} in jmespath.search( "spec.template.spec.volumes", docs[0] ) assert { "name": "config", "mountPath": "/etc/statsd-exporter/mappings.yml", "subPath": "mappings.yml", } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) def test_should_add_volume_and_volume_mount_when_exist_override_mappings(self): override_mapping = { "match": "airflow.pool.queued_slots.*", "name": "airflow_pool_queued_slots", "labels": {"pool": "$1"}, } docs = render_chart( values={"statsd": {"enabled": True, "overrideMappings": [override_mapping]}}, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert {"name": "config", "configMap": {"name": "release-name-statsd"}} in jmespath.search( "spec.template.spec.volumes", docs[0] ) assert { "name": "config", "mountPath": "/etc/statsd-exporter/mappings.yml", "subPath": "mappings.yml", } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", [(8, 10), (10, 8), (8, None), (None, 10), (None, None)], ) def test_revision_history_limit(self, revision_history_limit, global_revision_history_limit): values = {"statsd": {"enabled": True}} if revision_history_limit: values["statsd"]["revisionHistoryLimit"] = revision_history_limit if global_revision_history_limit: values["revisionHistoryLimit"] = global_revision_history_limit docs = render_chart( values=values, show_only=["templates/statsd/statsd-deployment.yaml"], ) expected_result = revision_history_limit if revision_history_limit else global_revision_history_limit assert jmespath.search("spec.revisionHistoryLimit", docs[0]) == expected_result def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "statsd": { "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, } }, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert "Deployment" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_stastd_resources_are_configurable(self): docs = render_chart( values={ "statsd": { "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, } }, }, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) def test_statsd_security_contexts_are_configurable(self): docs = render_chart( values={ "statsd": { "securityContexts": { "pod": { "fsGroup": 1000, "runAsGroup": 1001, "runAsNonRoot": True, "runAsUser": 2000, }, "container": { "allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True, }, } }, }, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert {"allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True} == jmespath.search( "spec.template.spec.containers[0].securityContext", docs[0] ) assert { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, } == jmespath.search("spec.template.spec.securityContext", docs[0]) def test_statsd_security_context_legacy(self): docs = render_chart( values={ "statsd": { "securityContext": { "fsGroup": 1000, "runAsGroup": 1001, "runAsNonRoot": True, "runAsUser": 2000, } }, }, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, } == jmespath.search("spec.template.spec.securityContext", docs[0]) def test_statsd_resources_are_not_added_by_default(self): docs = render_chart( show_only=["templates/statsd/statsd-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} def test_statsd_configmap_by_default(self): docs = render_chart(show_only=["templates/configmaps/statsd-configmap.yaml"]) mappings_yml = jmespath.search('data."mappings.yml"', docs[0]) mappings_yml_obj = yaml.safe_load(mappings_yml) assert "airflow_dagrun_dependency_check" == mappings_yml_obj["mappings"][0]["name"] assert "airflow_pool_starving_tasks" == mappings_yml_obj["mappings"][-1]["name"] def test_statsd_configmap_when_exist_extra_mappings(self): extra_mapping = { "match": "airflow.pool.queued_slots.*", "name": "airflow_pool_queued_slots", "labels": {"pool": "$1"}, } docs = render_chart( values={"statsd": {"enabled": True, "extraMappings": [extra_mapping]}}, show_only=["templates/configmaps/statsd-configmap.yaml"], ) mappings_yml = jmespath.search('data."mappings.yml"', docs[0]) mappings_yml_obj = yaml.safe_load(mappings_yml) assert "airflow_dagrun_dependency_check" == mappings_yml_obj["mappings"][0]["name"] assert "airflow_pool_queued_slots" == mappings_yml_obj["mappings"][-1]["name"] def test_statsd_configmap_when_exist_override_mappings(self): override_mapping = { "match": "airflow.pool.queued_slots.*", "name": "airflow_pool_queued_slots", "labels": {"pool": "$1"}, } docs = render_chart( values={"statsd": {"enabled": True, "overrideMappings": [override_mapping]}}, show_only=["templates/configmaps/statsd-configmap.yaml"], ) mappings_yml = jmespath.search('data."mappings.yml"', docs[0]) mappings_yml_obj = yaml.safe_load(mappings_yml) assert 1 == len(mappings_yml_obj["mappings"]) assert "airflow_pool_queued_slots" == mappings_yml_obj["mappings"][0]["name"] def test_statsd_args_can_be_overridden(self): args = ["--some-arg=foo"] docs = render_chart( values={"statsd": {"enabled": True, "args": args}}, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].args", docs[0]) == args def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "statsd": { "annotations": {"test_annotation": "test_annotation_value"}, "podAnnotations": {"test_pod_annotation": "test_pod_annotation_value"}, }, }, show_only=["templates/statsd/statsd-deployment.yaml"], ) assert "annotations" in jmespath.search("metadata", docs[0]) assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value" assert "test_pod_annotation" in jmespath.search("spec.template.metadata.annotations", docs[0]) assert ( jmespath.search("spec.template.metadata.annotations", docs[0])["test_pod_annotation"] == "test_pod_annotation_value" )
12,172
39.848993
110
py
airflow
airflow-main/helm_tests/webserver/test_pdb_webserver.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestWebserverPdb: """Tests webserver pdb.""" def test_should_pass_validation_with_just_pdb_enabled_v1(self): render_chart( values={"webserver": {"podDisruptionBudget": {"enabled": True}}}, show_only=["templates/webserver/webserver-poddisruptionbudget.yaml"], ) # checks that no validation exception is raised def test_should_pass_validation_with_just_pdb_enabled_v1beta1(self): render_chart( values={"webserver": {"podDisruptionBudget": {"enabled": True}}}, show_only=["templates/webserver/webserver-poddisruptionbudget.yaml"], kubernetes_version="1.16.0", ) # checks that no validation exception is raised def test_should_add_component_specific_labels(self): docs = render_chart( values={ "webserver": { "podDisruptionBudget": {"enabled": True}, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/webserver/webserver-poddisruptionbudget.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" def test_should_pass_validation_with_pdb_enabled_and_min_available_param(self): render_chart( values={ "webserver": { "podDisruptionBudget": { "enabled": True, "config": {"maxUnavailable": None, "minAvailable": 1}, } } }, show_only=["templates/webserver/webserver-poddisruptionbudget.yaml"], ) # checks that no validation exception is raised
2,706
40.646154
94
py
airflow
airflow-main/helm_tests/webserver/test_ingress_flower.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import itertools import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestIngressFlower: """Tests ingress flower.""" def test_should_pass_validation_with_just_ingress_enabled_v1(self): render_chart( values={"flower": {"enabled": True}, "ingress": {"flower": {"enabled": True}}}, show_only=["templates/flower/flower-ingress.yaml"], ) # checks that no validation exception is raised def test_should_pass_validation_with_just_ingress_enabled_v1beta1(self): render_chart( values={"flower": {"enabled": True}, "ingress": {"flower": {"enabled": True}}}, show_only=["templates/flower/flower-ingress.yaml"], kubernetes_version="1.16.0", ) # checks that no validation exception is raised def test_should_allow_more_than_one_annotation(self): docs = render_chart( values={ "ingress": {"flower": {"enabled": True, "annotations": {"aa": "bb", "cc": "dd"}}}, "flower": {"enabled": True}, }, show_only=["templates/flower/flower-ingress.yaml"], ) assert jmespath.search("metadata.annotations", docs[0]) == {"aa": "bb", "cc": "dd"} def test_should_set_ingress_class_name(self): docs = render_chart( values={ "ingress": {"enabled": True, "flower": {"ingressClassName": "foo"}}, "flower": {"enabled": True}, }, show_only=["templates/flower/flower-ingress.yaml"], ) assert "foo" == jmespath.search("spec.ingressClassName", docs[0]) def test_should_ingress_hosts_objs_have_priority_over_host(self): docs = render_chart( values={ "flower": {"enabled": True}, "ingress": { "flower": { "enabled": True, "tls": {"enabled": True, "secretName": "oldsecret"}, "hosts": [ {"name": "*.a-host", "tls": {"enabled": True, "secretName": "newsecret1"}}, {"name": "b-host", "tls": {"enabled": True, "secretName": "newsecret2"}}, {"name": "c-host", "tls": {"enabled": True, "secretName": "newsecret1"}}, {"name": "d-host", "tls": {"enabled": False, "secretName": ""}}, {"name": "e-host"}, ], "host": "old-host", }, }, }, show_only=["templates/flower/flower-ingress.yaml"], ) assert ["*.a-host", "b-host", "c-host", "d-host", "e-host"] == jmespath.search( "spec.rules[*].host", docs[0] ) assert [ {"hosts": ["*.a-host"], "secretName": "newsecret1"}, {"hosts": ["b-host"], "secretName": "newsecret2"}, {"hosts": ["c-host"], "secretName": "newsecret1"}, ] == jmespath.search("spec.tls[*]", docs[0]) def test_should_ingress_hosts_strs_have_priority_over_host(self): docs = render_chart( values={ "flower": {"enabled": True}, "ingress": { "flower": { "enabled": True, "tls": {"enabled": True, "secretName": "secret"}, "hosts": ["*.a-host", "b-host", "c-host", "d-host"], "host": "old-host", }, }, }, show_only=["templates/flower/flower-ingress.yaml"], ) assert ["*.a-host", "b-host", "c-host", "d-host"] == jmespath.search("spec.rules[*].host", docs[0]) assert [ {"hosts": ["*.a-host", "b-host", "c-host", "d-host"], "secretName": "secret"} ] == jmespath.search("spec.tls[*]", docs[0]) def test_should_ingress_deprecated_host_and_top_level_tls_still_work(self): docs = render_chart( values={ "flower": {"enabled": True}, "ingress": { "flower": { "enabled": True, "tls": {"enabled": True, "secretName": "supersecret"}, "host": "old-host", }, }, }, show_only=["templates/flower/flower-ingress.yaml"], ) assert ( ["old-host"] == jmespath.search("spec.rules[*].host", docs[0]) == list(itertools.chain.from_iterable(jmespath.search("spec.tls[*].hosts", docs[0]))) ) def test_should_ingress_host_entry_not_exist(self): docs = render_chart( values={"flower": {"enabled": True}, "ingress": {"flower": {"enabled": True}}}, show_only=["templates/flower/flower-ingress.yaml"], ) assert not jmespath.search("spec.rules[*].host", docs[0]) @pytest.mark.parametrize( "global_value, flower_value, expected", [ (None, None, False), (None, False, False), (None, True, True), (False, None, False), (True, None, True), (False, True, True), # We will deploy it if _either_ are true (True, False, True), ], ) def test_ingress_created(self, global_value, flower_value, expected): values = {"flower": {"enabled": True}, "ingress": {}} if global_value is not None: values["ingress"]["enabled"] = global_value if flower_value is not None: values["ingress"]["flower"] = {"enabled": flower_value} if values["ingress"] == {}: del values["ingress"] docs = render_chart(values=values, show_only=["templates/flower/flower-ingress.yaml"]) assert expected == (1 == len(docs)) def test_ingress_not_created_flower_disabled(self): docs = render_chart( values={ "ingress": { "flower": {"enabled": True}, } }, show_only=["templates/flower/flower-ingress.yaml"], ) assert 0 == len(docs) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "ingress": {"enabled": True}, "flower": { "enabled": True, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/flower/flower-ingress.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
7,650
39.481481
107
py
airflow
airflow-main/helm_tests/webserver/test_ingress_web.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestIngressWeb: """Tests ingress web.""" def test_should_pass_validation_with_just_ingress_enabled_v1(self): render_chart( values={"ingress": {"web": {"enabled": True}}}, show_only=["templates/webserver/webserver-ingress.yaml"], ) # checks that no validation exception is raised def test_should_pass_validation_with_just_ingress_enabled_v1beta1(self): render_chart( values={"ingress": {"web": {"enabled": True}}}, show_only=["templates/webserver/webserver-ingress.yaml"], kubernetes_version="1.16.0", ) # checks that no validation exception is raised def test_should_allow_more_than_one_annotation(self): docs = render_chart( values={"ingress": {"web": {"enabled": True, "annotations": {"aa": "bb", "cc": "dd"}}}}, show_only=["templates/webserver/webserver-ingress.yaml"], ) assert {"aa": "bb", "cc": "dd"} == jmespath.search("metadata.annotations", docs[0]) def test_should_set_ingress_class_name(self): docs = render_chart( values={"ingress": {"web": {"enabled": True, "ingressClassName": "foo"}}}, show_only=["templates/webserver/webserver-ingress.yaml"], ) assert "foo" == jmespath.search("spec.ingressClassName", docs[0]) def test_should_ingress_hosts_objs_have_priority_over_host(self): docs = render_chart( values={ "ingress": { "web": { "enabled": True, "tls": {"enabled": True, "secretName": "oldsecret"}, "hosts": [ {"name": "*.a-host", "tls": {"enabled": True, "secretName": "newsecret1"}}, {"name": "b-host", "tls": {"enabled": True, "secretName": "newsecret2"}}, {"name": "c-host", "tls": {"enabled": True, "secretName": "newsecret1"}}, {"name": "d-host", "tls": {"enabled": False, "secretName": ""}}, {"name": "e-host"}, ], "host": "old-host", }, } }, show_only=["templates/webserver/webserver-ingress.yaml"], ) assert ["*.a-host", "b-host", "c-host", "d-host", "e-host"] == jmespath.search( "spec.rules[*].host", docs[0] ) assert [ {"hosts": ["*.a-host"], "secretName": "newsecret1"}, {"hosts": ["b-host"], "secretName": "newsecret2"}, {"hosts": ["c-host"], "secretName": "newsecret1"}, ] == jmespath.search("spec.tls[*]", docs[0]) def test_should_ingress_hosts_strs_have_priority_over_host(self): docs = render_chart( values={ "ingress": { "web": { "enabled": True, "tls": {"enabled": True, "secretName": "secret"}, "hosts": ["*.a-host", "b-host", "c-host", "d-host"], "host": "old-host", }, } }, show_only=["templates/webserver/webserver-ingress.yaml"], ) assert ["*.a-host", "b-host", "c-host", "d-host"] == jmespath.search("spec.rules[*].host", docs[0]) assert [ {"hosts": ["*.a-host", "b-host", "c-host", "d-host"], "secretName": "secret"} ] == jmespath.search("spec.tls[*]", docs[0]) def test_should_ingress_deprecated_host_and_top_level_tls_still_work(self): docs = render_chart( values={ "ingress": { "web": { "enabled": True, "tls": {"enabled": True, "secretName": "supersecret"}, "host": "old-host", }, } }, show_only=["templates/webserver/webserver-ingress.yaml"], ) assert ( ["old-host"] == jmespath.search("spec.rules[*].host", docs[0]) == jmespath.search("spec.tls[0].hosts", docs[0]) ) def test_should_ingress_host_entry_not_exist(self): docs = render_chart( values={ "ingress": { "web": { "enabled": True, } } }, show_only=["templates/webserver/webserver-ingress.yaml"], ) assert not jmespath.search("spec.rules[*].host", docs[0]) @pytest.mark.parametrize( "global_value, web_value, expected", [ (None, None, False), (None, False, False), (None, True, True), (False, None, False), (True, None, True), (False, True, True), # We will deploy it if _either_ are true (True, False, True), ], ) def test_ingress_created(self, global_value, web_value, expected): values = {"ingress": {}} if global_value is not None: values["ingress"]["enabled"] = global_value if web_value is not None: values["ingress"]["web"] = {"enabled": web_value} if values["ingress"] == {}: del values["ingress"] docs = render_chart(values=values, show_only=["templates/webserver/webserver-ingress.yaml"]) assert expected == (1 == len(docs)) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "ingress": {"enabled": True}, "webserver": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/webserver/webserver-ingress.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
6,961
39.952941
107
py
airflow
airflow-main/helm_tests/webserver/test_webserver.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestWebserverDeployment: """Tests webserver deployment.""" def test_should_add_host_header_to_liveness_and_readiness_probes(self): docs = render_chart( values={ "config": { "webserver": {"base_url": "https://example.com:21222/mypath/path"}, } }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert {"name": "Host", "value": "example.com"} in jmespath.search( "spec.template.spec.containers[0].livenessProbe.httpGet.httpHeaders", docs[0] ) assert {"name": "Host", "value": "example.com"} in jmespath.search( "spec.template.spec.containers[0].readinessProbe.httpGet.httpHeaders", docs[0] ) def test_should_add_path_to_liveness_and_readiness_probes(self): docs = render_chart( values={ "config": { "webserver": {"base_url": "https://example.com:21222/mypath/path"}, } }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert ( jmespath.search("spec.template.spec.containers[0].livenessProbe.httpGet.path", docs[0]) == "/mypath/path/health" ) assert ( jmespath.search("spec.template.spec.containers[0].readinessProbe.httpGet.path", docs[0]) == "/mypath/path/health" ) @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", [(8, 10), (10, 8), (8, None), (None, 10), (None, None)], ) def test_revision_history_limit(self, revision_history_limit, global_revision_history_limit): values = {"webserver": {}} if revision_history_limit: values["webserver"]["revisionHistoryLimit"] = revision_history_limit if global_revision_history_limit: values["revisionHistoryLimit"] = global_revision_history_limit docs = render_chart( values=values, show_only=["templates/webserver/webserver-deployment.yaml"], ) expected_result = revision_history_limit if revision_history_limit else global_revision_history_limit assert jmespath.search("spec.revisionHistoryLimit", docs[0]) == expected_result @pytest.mark.parametrize("values", [{"config": {"webserver": {"base_url": ""}}}, {}]) def test_should_not_contain_host_header(self, values): print(values) docs = render_chart(values=values, show_only=["templates/webserver/webserver-deployment.yaml"]) assert ( jmespath.search("spec.template.spec.containers[0].livenessProbe.httpGet.httpHeaders", docs[0]) is None ) assert ( jmespath.search("spec.template.spec.containers[0].readinessProbe.httpGet.httpHeaders", docs[0]) is None ) def test_should_use_templated_base_url_for_probes(self): docs = render_chart( values={ "config": { "webserver": { "base_url": "https://{{ .Release.Name }}.com:21222/mypath/{{ .Release.Name }}/path" }, } }, show_only=["templates/webserver/webserver-deployment.yaml"], ) container = jmespath.search("spec.template.spec.containers[0]", docs[0]) assert {"name": "Host", "value": "release-name.com"} in jmespath.search( "livenessProbe.httpGet.httpHeaders", container ) assert {"name": "Host", "value": "release-name.com"} in jmespath.search( "readinessProbe.httpGet.httpHeaders", container ) assert "/mypath/release-name/path/health" == jmespath.search("livenessProbe.httpGet.path", container) assert "/mypath/release-name/path/health" == jmespath.search("readinessProbe.httpGet.path", container) def test_should_add_scheme_to_liveness_and_readiness_probes(self): docs = render_chart( values={ "webserver": { "livenessProbe": {"scheme": "HTTPS"}, "readinessProbe": {"scheme": "HTTPS"}, } }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "HTTPS" in jmespath.search( "spec.template.spec.containers[0].livenessProbe.httpGet.scheme", docs[0] ) assert "HTTPS" in jmespath.search( "spec.template.spec.containers[0].readinessProbe.httpGet.scheme", docs[0] ) def test_should_add_volume_and_volume_mount_when_exist_webserver_config(self): docs = render_chart( values={"webserver": {"webserverConfig": "CSRF_ENABLED = True"}}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert { "name": "webserver-config", "configMap": {"name": "release-name-webserver-config"}, } in jmespath.search("spec.template.spec.volumes", docs[0]) assert { "name": "webserver-config", "mountPath": "/opt/airflow/webserver_config.py", "subPath": "webserver_config.py", "readOnly": True, } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) def test_should_add_extra_containers(self): docs = render_chart( values={ "executor": "CeleryExecutor", "webserver": { "extraContainers": [ {"name": "test-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert { "name": "test-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) def test_should_add_extraEnvs(self): docs = render_chart( values={ "webserver": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( values={ "webserver": { "extraVolumes": [{"name": "test-volume-{{ .Chart.Name }}", "emptyDir": {}}], "extraVolumeMounts": [ {"name": "test-volume-{{ .Chart.Name }}", "mountPath": "/opt/test"} ], }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.containers[0].volumeMounts[-1].name", docs[0] ) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] ) def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( values={ "volumes": [{"name": "test-volume", "emptyDir": {}}], "volumeMounts": [{"name": "test-volume", "mountPath": "/opt/test"}], }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "test-volume" == jmespath.search("spec.template.spec.volumes[-1].name", docs[0]) assert "test-volume" == jmespath.search( "spec.template.spec.containers[0].volumeMounts[-1].name", docs[0] ) def test_should_add_extraEnvs_to_wait_for_migration_container(self): docs = render_chart( values={ "webserver": { "waitForMigrations": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.initContainers[0].env", docs[0] ) @pytest.mark.parametrize( "airflow_version, expected_arg", [ ("2.0.0", ["airflow", "db", "check-migrations", "--migration-wait-timeout=60"]), ("2.1.0", ["airflow", "db", "check-migrations", "--migration-wait-timeout=60"]), ("1.10.2", ["python", "-c"]), ], ) def test_wait_for_migration_airflow_version(self, airflow_version, expected_arg): docs = render_chart( values={ "airflowVersion": airflow_version, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) # Don't test the full string, just the length of the expect matches actual = jmespath.search("spec.template.spec.initContainers[0].args", docs[0]) assert expected_arg == actual[: len(expected_arg)] def test_disable_wait_for_migration(self): docs = render_chart( values={ "webserver": { "waitForMigrations": {"enabled": False}, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) actual = jmespath.search( "spec.template.spec.initContainers[?name=='wait-for-airflow-migrations']", docs[0] ) assert actual is None def test_should_add_extra_init_containers(self): docs = render_chart( values={ "webserver": { "extraInitContainers": [ {"name": "test-init-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "webserver": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "test_label" in jmespath.search("spec.template.metadata.labels", docs[0]) assert jmespath.search("spec.template.metadata.labels", docs[0])["test_label"] == "test_label_value" def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "webserver": { "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, } }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "Deployment" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/webserver/webserver-deployment.yaml"]) assert {"component": "webserver"} == jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): """When given both global and webserver affinity etc, webserver affinity etc is used.""" expected_affinity = { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } } expected_topology_spread_constraints = { "maxSkew": 1, "topologyKey": "foo", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } docs = render_chart( values={ "webserver": { "affinity": expected_affinity, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [expected_topology_spread_constraints], "nodeSelector": {"type": "ssd"}, }, "affinity": { "nodeAffinity": { "preferredDuringSchedulingIgnoredDuringExecution": [ { "weight": 1, "preference": { "matchExpressions": [ {"key": "not-me", "operator": "In", "values": ["true"]}, ] }, } ] } }, "tolerations": [ {"key": "not-me", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [ { "maxSkew": 1, "topologyKey": "not-me", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } ], "nodeSelector": {"type": "not-me"}, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.type", docs[0], ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) assert 1 == len(tolerations) assert "dynamic-pods" == tolerations[0]["key"] assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) @pytest.mark.parametrize( "log_persistence_values, expected_claim_name", [ ({"enabled": False}, None), ({"enabled": True}, "release-name-logs"), ({"enabled": True, "existingClaim": "test-claim"}, "test-claim"), ], ) def test_logs_persistence_adds_volume_and_mount(self, log_persistence_values, expected_claim_name): docs = render_chart( values={"logs": {"persistence": log_persistence_values}}, show_only=["templates/webserver/webserver-deployment.yaml"], ) if expected_claim_name: assert { "name": "logs", "persistentVolumeClaim": {"claimName": expected_claim_name}, } in jmespath.search("spec.template.spec.volumes", docs[0]) assert { "name": "logs", "mountPath": "/opt/airflow/logs", } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) else: assert "logs" not in [v["name"] for v in jmespath.search("spec.template.spec.volumes", docs[0])] assert "logs" not in [ v["name"] for v in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] @pytest.mark.parametrize( "af_version, pod_template_file_expected", [ ("1.10.10", False), ("1.10.12", True), ("2.1.0", True), ], ) def test_config_volumes_and_mounts(self, af_version, pod_template_file_expected): # setup docs = render_chart( values={"airflowVersion": af_version}, show_only=["templates/webserver/webserver-deployment.yaml"], ) # default config assert { "name": "config", "mountPath": "/opt/airflow/airflow.cfg", "readOnly": True, "subPath": "airflow.cfg", } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) # pod_template_file config assert pod_template_file_expected == ( { "name": "config", "mountPath": "/opt/airflow/pod_templates/pod_template_file.yaml", "readOnly": True, "subPath": "pod_template_file.yaml", } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ) def test_webserver_resources_are_configurable(self): docs = render_chart( values={ "webserver": { "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, } }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) # initContainer wait-for-airflow-migrations assert "128Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] ) assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] ) def test_webserver_security_contexts_are_configurable(self): docs = render_chart( values={ "webserver": { "securityContexts": { "pod": { "fsGroup": 1000, "runAsGroup": 1001, "runAsNonRoot": True, "runAsUser": 2000, }, "container": { "allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True, }, } }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert {"allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True} == jmespath.search( "spec.template.spec.containers[0].securityContext", docs[0] ) assert { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, } == jmespath.search("spec.template.spec.securityContext", docs[0]) def test_webserver_security_context_legacy(self): docs = render_chart( values={ "webserver": { "securityContext": { "fsGroup": 1000, "runAsGroup": 1001, "runAsNonRoot": True, "runAsUser": 2000, } }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, } == jmespath.search("spec.template.spec.securityContext", docs[0]) def test_webserver_resources_are_not_added_by_default(self): docs = render_chart( show_only=["templates/webserver/webserver-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} assert jmespath.search("spec.template.spec.initContainers[0].resources", docs[0]) == {} @pytest.mark.parametrize( "airflow_version, expected_strategy", [ ("2.0.2", {"type": "RollingUpdate", "rollingUpdate": {"maxSurge": 1, "maxUnavailable": 0}}), ("1.10.14", {"type": "Recreate"}), ("1.9.0", {"type": "Recreate"}), ("2.1.0", {"type": "RollingUpdate", "rollingUpdate": {"maxSurge": 1, "maxUnavailable": 0}}), ], ) def test_default_update_strategy(self, airflow_version, expected_strategy): docs = render_chart( values={"airflowVersion": airflow_version}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert jmespath.search("spec.strategy", docs[0]) == expected_strategy def test_update_strategy(self): expected_strategy = {"type": "RollingUpdate", "rollingUpdate": {"maxUnavailable": 1}} docs = render_chart( values={"webserver": {"strategy": expected_strategy}}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert jmespath.search("spec.strategy", docs[0]) == expected_strategy def test_no_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": None}, show_only=["templates/webserver/webserver-deployment.yaml"] ) volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts) volume_mounts_init = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts_init) def test_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": "# Well hello!"}, show_only=["templates/webserver/webserver-deployment.yaml"], ) volume_mount = { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } assert volume_mount in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert volume_mount in jmespath.search("spec.template.spec.initContainers[0].volumeMounts", docs[0]) def test_default_command_and_args(self): docs = render_chart(show_only=["templates/webserver/webserver-deployment.yaml"]) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None assert ["bash", "-c", "exec airflow webserver"] == jmespath.search( "spec.template.spec.containers[0].args", docs[0] ) @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={"webserver": {"command": command, "args": args}}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={"webserver": {"command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"]}}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) @pytest.mark.parametrize( "airflow_version, dag_values", [ ("1.10.15", {"gitSync": {"enabled": False}}), ("1.10.15", {"persistence": {"enabled": False}}), ("1.10.15", {"gitSync": {"enabled": False}, "persistence": {"enabled": False}}), ("2.0.0", {"gitSync": {"enabled": True}}), ("2.0.0", {"gitSync": {"enabled": False}}), ("2.0.0", {"persistence": {"enabled": True}}), ("2.0.0", {"persistence": {"enabled": False}}), ("2.0.0", {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}), ], ) def test_no_dags_mount_or_volume_or_gitsync_sidecar_expected(self, airflow_version, dag_values): docs = render_chart( values={"dags": dag_values, "airflowVersion": airflow_version}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "dags" not in [ vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] assert "dags" not in [vm["name"] for vm in jmespath.search("spec.template.spec.volumes", docs[0])] assert 1 == len(jmespath.search("spec.template.spec.containers", docs[0])) @pytest.mark.parametrize( "airflow_version, dag_values, expected_read_only", [ ("1.10.15", {"gitSync": {"enabled": True}}, True), ("1.10.15", {"persistence": {"enabled": True}}, False), ("1.10.15", {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}, True), ], ) def test_dags_mount(self, airflow_version, dag_values, expected_read_only): docs = render_chart( values={"dags": dag_values, "airflowVersion": airflow_version}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert { "mountPath": "/opt/airflow/dags", "name": "dags", "readOnly": expected_read_only, } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) def test_dags_gitsync_volume_and_sidecar_and_init_container(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}}, "airflowVersion": "1.10.15"}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert {"name": "dags", "emptyDir": {}} in jmespath.search("spec.template.spec.volumes", docs[0]) assert "git-sync" in [c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0])] assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] @pytest.mark.parametrize( "dags_values, expected_claim_name", [ ({"persistence": {"enabled": True}}, "release-name-dags"), ({"persistence": {"enabled": True, "existingClaim": "test-claim"}}, "test-claim"), ({"persistence": {"enabled": True}, "gitSync": {"enabled": True}}, "release-name-dags"), ], ) def test_dags_persistence_volume_no_sidecar(self, dags_values, expected_claim_name): docs = render_chart( values={"dags": dags_values, "airflowVersion": "1.10.15"}, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert { "name": "dags", "persistentVolumeClaim": {"claimName": expected_claim_name}, } in jmespath.search("spec.template.spec.volumes", docs[0]) # No gitsync sidecar or init container assert 1 == len(jmespath.search("spec.template.spec.containers", docs[0])) assert 1 == len(jmespath.search("spec.template.spec.initContainers", docs[0])) def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "webserver": { "annotations": {"test_annotation": "test_annotation_value"}, }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "annotations" in jmespath.search("metadata", docs[0]) assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value" def test_webserver_pod_hostaliases(self): docs = render_chart( values={ "webserver": { "hostAliases": [{"ip": "127.0.0.1", "hostnames": ["foo.local"]}], }, }, show_only=["templates/webserver/webserver-deployment.yaml"], ) assert "127.0.0.1" == jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) assert "foo.local" == jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) class TestWebserverService: """Tests webserver service.""" def test_default_service(self): docs = render_chart( show_only=["templates/webserver/webserver-service.yaml"], ) assert "release-name-webserver" == jmespath.search("metadata.name", docs[0]) assert jmespath.search("metadata.annotations", docs[0]) is None assert {"tier": "airflow", "component": "webserver", "release": "release-name"} == jmespath.search( "spec.selector", docs[0] ) assert "ClusterIP" == jmespath.search("spec.type", docs[0]) assert {"name": "airflow-ui", "port": 8080} in jmespath.search("spec.ports", docs[0]) def test_overrides(self): docs = render_chart( values={ "ports": {"airflowUI": 9000}, "webserver": { "service": { "type": "LoadBalancer", "loadBalancerIP": "127.0.0.1", "annotations": {"foo": "bar"}, "loadBalancerSourceRanges": ["10.123.0.0/16"], } }, }, show_only=["templates/webserver/webserver-service.yaml"], ) assert {"foo": "bar"} == jmespath.search("metadata.annotations", docs[0]) assert "LoadBalancer" == jmespath.search("spec.type", docs[0]) assert {"name": "airflow-ui", "port": 9000} in jmespath.search("spec.ports", docs[0]) assert "127.0.0.1" == jmespath.search("spec.loadBalancerIP", docs[0]) assert ["10.123.0.0/16"] == jmespath.search("spec.loadBalancerSourceRanges", docs[0]) @pytest.mark.parametrize( "ports, expected_ports", [ ([{"port": 8888}], [{"port": 8888}]), # name is optional with a single port ( [{"name": "{{ .Release.Name }}", "protocol": "UDP", "port": "{{ .Values.ports.airflowUI }}"}], [{"name": "release-name", "protocol": "UDP", "port": 8080}], ), ([{"name": "only_sidecar", "port": "{{ int 9000 }}"}], [{"name": "only_sidecar", "port": 9000}]), ( [ {"name": "airflow-ui", "port": "{{ .Values.ports.airflowUI }}"}, {"name": "sidecar", "port": 80, "targetPort": "sidecar"}, ], [ {"name": "airflow-ui", "port": 8080}, {"name": "sidecar", "port": 80, "targetPort": "sidecar"}, ], ), ], ) def test_ports_overrides(self, ports, expected_ports): docs = render_chart( values={ "webserver": {"service": {"ports": ports}}, }, show_only=["templates/webserver/webserver-service.yaml"], ) assert expected_ports == jmespath.search("spec.ports", docs[0]) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "webserver": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/webserver/webserver-service.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" @pytest.mark.parametrize( "ports, expected_ports", [ ( [{"nodePort": "31000", "port": "8080"}], [{"nodePort": 31000, "port": 8080}], ), ( [{"port": "8080"}], [{"port": 8080}], ), ], ) def test_nodeport_service(self, ports, expected_ports): docs = render_chart( values={ "webserver": { "service": { "type": "NodePort", "ports": ports, } }, }, show_only=["templates/webserver/webserver-service.yaml"], ) assert "NodePort" == jmespath.search("spec.type", docs[0]) assert expected_ports == jmespath.search("spec.ports", docs[0]) class TestWebserverConfigmap: """Tests webserver configmap.""" def test_no_webserver_config_configmap_by_default(self): docs = render_chart(show_only=["templates/configmaps/webserver-configmap.yaml"]) assert 0 == len(docs) def test_no_webserver_config_configmap_with_configmap_name(self): docs = render_chart( values={ "webserver": { "webserverConfig": "CSRF_ENABLED = True # {{ .Release.Name }}", "webserverConfigConfigMapName": "my-configmap", } }, show_only=["templates/configmaps/webserver-configmap.yaml"], ) assert 0 == len(docs) def test_webserver_config_configmap(self): docs = render_chart( values={"webserver": {"webserverConfig": "CSRF_ENABLED = True # {{ .Release.Name }}"}}, show_only=["templates/configmaps/webserver-configmap.yaml"], ) assert "ConfigMap" == docs[0]["kind"] assert "release-name-webserver-config" == jmespath.search("metadata.name", docs[0]) assert ( "CSRF_ENABLED = True # release-name" == jmespath.search('data."webserver_config.py"', docs[0]).strip() ) class TestWebserverNetworkPolicy: """Tests webserver network policy.""" def test_off_by_default(self): docs = render_chart( show_only=["templates/webserver/webserver-networkpolicy.yaml"], ) assert 0 == len(docs) def test_defaults(self): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "webserver": { "networkPolicy": { "ingress": { "from": [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] } } }, }, show_only=["templates/webserver/webserver-networkpolicy.yaml"], ) assert 1 == len(docs) assert "NetworkPolicy" == docs[0]["kind"] assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( "spec.ingress[0].from", docs[0] ) assert [{"port": 8080}] == jmespath.search("spec.ingress[0].ports", docs[0]) @pytest.mark.parametrize( "ports, expected_ports", [ ([{"port": "sidecar"}], [{"port": "sidecar"}]), ( [ {"port": "{{ .Values.ports.airflowUI }}"}, {"port": 80}, ], [ {"port": 8080}, {"port": 80}, ], ), ], ) def test_ports_overrides(self, ports, expected_ports): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "webserver": { "networkPolicy": { "ingress": { "from": [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}], "ports": ports, } } }, }, show_only=["templates/webserver/webserver-networkpolicy.yaml"], ) assert expected_ports == jmespath.search("spec.ingress[0].ports", docs[0]) def test_deprecated_from_param(self): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "webserver": { "extraNetworkPolicies": [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] }, }, show_only=["templates/webserver/webserver-networkpolicy.yaml"], ) assert [{"namespaceSelector": {"matchLabels": {"release": "myrelease"}}}] == jmespath.search( "spec.ingress[0].from", docs[0] ) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "webserver": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/webserver/webserver-networkpolicy.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" class TestWebserverServiceAccount: """Tests webserver service account.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "webserver": { "serviceAccount": {"create": True}, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/webserver/webserver-serviceaccount.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
41,376
39.685349
110
py
airflow
airflow-main/helm_tests/webserver/__init__.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License.
785
45.235294
62
py
airflow
airflow-main/helm_tests/airflow_core/test_pdb_scheduler.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestSchedulerPdb: """Tests Scheduler PDB.""" def test_should_pass_validation_with_just_pdb_enabled_v1(self): render_chart( values={"scheduler": {"podDisruptionBudget": {"enabled": True}}}, show_only=["templates/scheduler/scheduler-poddisruptionbudget.yaml"], ) # checks that no validation exception is raised def test_should_pass_validation_with_just_pdb_enabled_v1beta1(self): render_chart( values={"scheduler": {"podDisruptionBudget": {"enabled": True}}}, show_only=["templates/scheduler/scheduler-poddisruptionbudget.yaml"], kubernetes_version="1.16.0", ) # checks that no validation exception is raised def test_should_add_component_specific_labels(self): docs = render_chart( values={ "scheduler": { "podDisruptionBudget": {"enabled": True}, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/scheduler/scheduler-poddisruptionbudget.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" def test_should_pass_validation_with_pdb_enabled_and_min_available_param(self): render_chart( values={ "scheduler": { "podDisruptionBudget": { "enabled": True, "config": {"maxUnavailable": None, "minAvailable": 1}, } } }, show_only=["templates/scheduler/scheduler-poddisruptionbudget.yaml"], ) # checks that no validation exception is raised
2,707
40.030303
94
py
airflow
airflow-main/helm_tests/airflow_core/test_worker.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart from tests.charts.log_groomer import LogGroomerTestBase class TestWorker: """Tests worker.""" @pytest.mark.parametrize( "executor, persistence, kind", [ ("CeleryExecutor", False, "Deployment"), ("CeleryExecutor", True, "StatefulSet"), ("CeleryKubernetesExecutor", False, "Deployment"), ("CeleryKubernetesExecutor", True, "StatefulSet"), ], ) def test_worker_kind(self, executor, persistence, kind): """Test worker kind is StatefulSet when worker persistence is enabled.""" docs = render_chart( values={ "executor": executor, "workers": {"persistence": {"enabled": persistence}}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert kind == jmespath.search("kind", docs[0]) @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", [(8, 10), (10, 8), (8, None), (None, 10), (None, None)], ) def test_revision_history_limit(self, revision_history_limit, global_revision_history_limit): values = {"workers": {}} if revision_history_limit: values["workers"]["revisionHistoryLimit"] = revision_history_limit if global_revision_history_limit: values["revisionHistoryLimit"] = global_revision_history_limit docs = render_chart( values=values, show_only=["templates/workers/worker-deployment.yaml"], ) expected_result = revision_history_limit if revision_history_limit else global_revision_history_limit assert jmespath.search("spec.revisionHistoryLimit", docs[0]) == expected_result def test_should_add_extra_containers(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "extraContainers": [ {"name": "test-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert { "name": "test-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) def test_disable_wait_for_migration(self): docs = render_chart( values={ "workers": { "waitForMigrations": {"enabled": False}, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) actual = jmespath.search( "spec.template.spec.initContainers[?name=='wait-for-airflow-migrations']", docs[0] ) assert actual is None def test_should_add_extra_init_containers(self): docs = render_chart( values={ "workers": { "extraInitContainers": [ {"name": "test-init-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "extraVolumes": [{"name": "test-volume-{{ .Chart.Name }}", "emptyDir": {}}], "extraVolumeMounts": [ {"name": "test-volume-{{ .Chart.Name }}", "mountPath": "/opt/test"} ], }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] ) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] ) def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( values={ "volumes": [{"name": "test-volume", "emptyDir": {}}], "volumeMounts": [{"name": "test-volume", "mountPath": "/opt/test"}], }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "test-volume" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) assert "test-volume" == jmespath.search( "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] ) def test_should_add_extraEnvs(self): docs = render_chart( values={ "workers": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) def test_should_add_extraEnvs_to_wait_for_migration_container(self): docs = render_chart( values={ "workers": { "waitForMigrations": {"env": [{"name": "TEST_ENV_1", "value": "test_env_1"}]}, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.initContainers[0].env", docs[0] ) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "test_label" in jmespath.search("spec.template.metadata.labels", docs[0]) assert jmespath.search("spec.template.metadata.labels", docs[0])["test_label"] == "test_label_value" def test_workers_host_aliases(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "hostAliases": [{"ip": "127.0.0.2", "hostnames": ["test.hostname"]}], }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "127.0.0.2" == jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) assert "test.hostname" == jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) @pytest.mark.parametrize( "persistence, update_strategy, expected_update_strategy", [ (False, None, None), (True, {"rollingUpdate": {"partition": 0}}, {"rollingUpdate": {"partition": 0}}), (True, None, None), ], ) def test_workers_update_strategy(self, persistence, update_strategy, expected_update_strategy): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "persistence": {"enabled": persistence}, "updateStrategy": update_strategy, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert expected_update_strategy == jmespath.search("spec.updateStrategy", docs[0]) @pytest.mark.parametrize( "persistence, strategy, expected_strategy", [ (True, None, None), ( False, {"rollingUpdate": {"maxSurge": "100%", "maxUnavailable": "50%"}}, {"rollingUpdate": {"maxSurge": "100%", "maxUnavailable": "50%"}}, ), (False, None, None), ], ) def test_workers_strategy(self, persistence, strategy, expected_strategy): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": {"persistence": {"enabled": persistence}, "strategy": strategy}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert expected_strategy == jmespath.search("spec.strategy", docs[0]) def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "StatefulSet" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): """When given both global and worker affinity etc, worker affinity etc is used.""" expected_affinity = { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } } expected_topology_spread_constraints = { "maxSkew": 1, "topologyKey": "foo", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } docs = render_chart( values={ "workers": { "affinity": expected_affinity, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [expected_topology_spread_constraints], "nodeSelector": {"type": "ssd"}, }, "affinity": { "nodeAffinity": { "preferredDuringSchedulingIgnoredDuringExecution": [ { "weight": 1, "preference": { "matchExpressions": [ {"key": "not-me", "operator": "In", "values": ["true"]}, ] }, } ] } }, "tolerations": [ {"key": "not-me", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [ { "maxSkew": 1, "topologyKey": "not-me", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } ], "nodeSelector": {"type": "not-me"}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.type", docs[0], ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) assert 1 == len(tolerations) assert "dynamic-pods" == tolerations[0]["key"] assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/workers/worker-deployment.yaml"]) assert {"component": "worker"} == jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], ) def test_runtime_class_name_values_are_configurable(self): docs = render_chart( values={ "workers": {"runtimeClassName": "nvidia"}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert jmespath.search("spec.template.spec.runtimeClassName", docs[0]) == "nvidia" def test_livenessprobe_values_are_configurable(self): docs = render_chart( values={ "workers": { "livenessProbe": { "initialDelaySeconds": 111, "timeoutSeconds": 222, "failureThreshold": 333, "periodSeconds": 444, "command": ["sh", "-c", "echo", "wow such test"], } }, }, show_only=["templates/workers/worker-deployment.yaml"], ) livenessprobe = jmespath.search("spec.template.spec.containers[0].livenessProbe", docs[0]) assert livenessprobe == { "initialDelaySeconds": 111, "timeoutSeconds": 222, "failureThreshold": 333, "periodSeconds": 444, "exec": { "command": ["sh", "-c", "echo", "wow such test"], }, } def test_disable_livenessprobe(self): docs = render_chart( values={ "workers": {"livenessProbe": {"enabled": False}}, }, show_only=["templates/workers/worker-deployment.yaml"], ) livenessprobe = jmespath.search("spec.template.spec.containers[0].livenessProbe", docs[0]) assert livenessprobe is None @pytest.mark.parametrize( "log_persistence_values, expected_volume", [ ({"enabled": False}, {"emptyDir": {}}), ({"enabled": True}, {"persistentVolumeClaim": {"claimName": "release-name-logs"}}), ( {"enabled": True, "existingClaim": "test-claim"}, {"persistentVolumeClaim": {"claimName": "test-claim"}}, ), ], ) def test_logs_persistence_changes_volume(self, log_persistence_values, expected_volume): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": {"persistence": {"enabled": False}}, "logs": {"persistence": log_persistence_values}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert {"name": "logs", **expected_volume} in jmespath.search("spec.template.spec.volumes", docs[0]) def test_worker_resources_are_configurable(self): docs = render_chart( values={ "workers": { "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, } }, }, show_only=["templates/workers/worker-deployment.yaml"], ) # main container assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) # initContainer wait-for-airflow-configurations assert "128Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] ) assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] ) def test_worker_resources_are_not_added_by_default(self): docs = render_chart( show_only=["templates/workers/worker-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} def test_no_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": None}, show_only=["templates/workers/worker-deployment.yaml"] ) volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts) volume_mounts_init = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts_init) def test_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": "# Well hello!"}, show_only=["templates/workers/worker-deployment.yaml"], ) volume_mount = { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } assert volume_mount in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert volume_mount in jmespath.search("spec.template.spec.initContainers[0].volumeMounts", docs[0]) def test_airflow_local_settings_kerberos_sidecar(self): docs = render_chart( values={ "airflowLocalSettings": "# Well hello!", "workers": {"kerberosSidecar": {"enabled": True}}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } in jmespath.search("spec.template.spec.containers[2].volumeMounts", docs[0]) @pytest.mark.parametrize( "airflow_version, expected_arg", [ ("1.9.0", "airflow worker"), ("1.10.14", "airflow worker"), ("2.0.2", "airflow celery worker"), ("2.1.0", "airflow celery worker"), ], ) def test_default_command_and_args_airflow_version(self, airflow_version, expected_arg): docs = render_chart( values={ "airflowVersion": airflow_version, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None assert [ "bash", "-c", f"exec \\\n{expected_arg}", ] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={"workers": {"command": command, "args": args}}, show_only=["templates/workers/worker-deployment.yaml"], ) assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={"workers": {"command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"]}}, show_only=["templates/workers/worker-deployment.yaml"], ) assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_dags_gitsync_sidecar_and_init_container(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}}}, show_only=["templates/workers/worker-deployment.yaml"], ) assert "git-sync" in [c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0])] assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] def test_dags_gitsync_with_persistence_no_sidecar_or_init_container(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}}, show_only=["templates/workers/worker-deployment.yaml"], ) # No gitsync sidecar or init container assert "git-sync" not in [ c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0]) ] assert "git-sync-init" not in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] def test_persistence_volume_annotations(self): docs = render_chart( values={"workers": {"persistence": {"annotations": {"foo": "bar"}}}}, show_only=["templates/workers/worker-deployment.yaml"], ) assert {"foo": "bar"} == jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0]) def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "workers": { "annotations": {"test_annotation": "test_annotation_value"}, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "annotations" in jmespath.search("metadata", docs[0]) assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value" class TestWorkerLogGroomer(LogGroomerTestBase): """Worker groomer.""" obj_name = "worker" folder = "workers" class TestWorkerKedaAutoScaler: """Tests worker keda auto scaler.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "keda": {"enabled": True}, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" def test_should_remove_replicas_field(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "keda": {"enabled": True}, }, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "replicas" not in jmespath.search("spec", docs[0]) @pytest.mark.parametrize( "query, executor, expected_query", [ # default query with CeleryExecutor ( None, "CeleryExecutor", "SELECT ceil(COUNT(*)::decimal / 16) FROM task_instance" " WHERE (state='running' OR state='queued')", ), # default query with CeleryKubernetesExecutor ( None, "CeleryKubernetesExecutor", "SELECT ceil(COUNT(*)::decimal / 16) FROM task_instance" " WHERE (state='running' OR state='queued') AND queue != 'kubernetes'", ), # test custom static query ( "SELECT ceil(COUNT(*)::decimal / 16) FROM task_instance", "CeleryKubernetesExecutor", "SELECT ceil(COUNT(*)::decimal / 16) FROM task_instance", ), # test custom template query ( "SELECT ceil(COUNT(*)::decimal / {{ mul .Values.config.celery.worker_concurrency 2 }})" " FROM task_instance", "CeleryKubernetesExecutor", "SELECT ceil(COUNT(*)::decimal / 32) FROM task_instance", ), ], ) def test_should_use_keda_query(self, query, executor, expected_query): docs = render_chart( values={ "executor": executor, "workers": { "keda": {"enabled": True, **({"query": query} if query else {})}, }, }, show_only=["templates/workers/worker-kedaautoscaler.yaml"], ) assert expected_query == jmespath.search("spec.triggers[0].metadata.query", docs[0]) class TestWorkerNetworkPolicy: """Tests worker network policy.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "executor": "CeleryExecutor", "workers": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/workers/worker-networkpolicy.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" class TestWorkerService: """Tests worker service.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/workers/worker-service.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" class TestWorkerServiceAccount: """Tests worker service account.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "executor": "CeleryExecutor", "workers": { "serviceAccount": {"create": True}, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/workers/worker-service.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" @pytest.mark.parametrize( "executor, creates_service_account", [ ("LocalExecutor", False), ("CeleryExecutor", True), ("CeleryKubernetesExecutor", True), ("KubernetesExecutor", True), ("LocalKubernetesExecutor", True), ], ) def test_should_create_worker_service_account_for_specific_executors( self, executor, creates_service_account ): docs = render_chart( values={ "executor": executor, "workers": { "serviceAccount": {"create": True}, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/workers/worker-serviceaccount.yaml"], ) if creates_service_account: assert jmespath.search("kind", docs[0]) == "ServiceAccount" assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" else: assert docs == []
31,013
38.407878
110
py
airflow
airflow-main/helm_tests/airflow_core/__init__.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License.
785
45.235294
62
py
airflow
airflow-main/helm_tests/airflow_core/test_triggerer.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart from tests.charts.log_groomer import LogGroomerTestBase class TestTriggerer: """Tests triggerer.""" @pytest.mark.parametrize( "airflow_version, num_docs", [ ("2.1.0", 0), ("2.2.0", 1), ], ) def test_only_exists_on_new_airflow_versions(self, airflow_version, num_docs): """Trigger was only added from Airflow 2.2 onwards.""" docs = render_chart( values={"airflowVersion": airflow_version}, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert num_docs == len(docs) def test_can_be_disabled(self): """ Triggerer should be able to be disabled if the users desires (e.g. Python 3.6 or doesn't want to use async tasks). """ docs = render_chart( values={"triggerer": {"enabled": False}}, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert 0 == len(docs) @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", [(8, 10), (10, 8), (8, None), (None, 10), (None, None)], ) def test_revision_history_limit(self, revision_history_limit, global_revision_history_limit): values = { "triggerer": { "enabled": True, } } if revision_history_limit: values["triggerer"]["revisionHistoryLimit"] = revision_history_limit if global_revision_history_limit: values["revisionHistoryLimit"] = global_revision_history_limit docs = render_chart( values=values, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) expected_result = revision_history_limit if revision_history_limit else global_revision_history_limit assert jmespath.search("spec.revisionHistoryLimit", docs[0]) == expected_result def test_disable_wait_for_migration(self): docs = render_chart( values={ "triggerer": { "waitForMigrations": {"enabled": False}, }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) actual = jmespath.search( "spec.template.spec.initContainers[?name=='wait-for-airflow-migrations']", docs[0] ) assert actual is None def test_should_add_extra_containers(self): docs = render_chart( values={ "triggerer": { "extraContainers": [ {"name": "test-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert { "name": "test-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) def test_should_add_extra_init_containers(self): docs = render_chart( values={ "triggerer": { "extraInitContainers": [ {"name": "test-init-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( values={ "triggerer": { "extraVolumes": [{"name": "test-volume-{{ .Chart.Name }}", "emptyDir": {}}], "extraVolumeMounts": [ {"name": "test-volume-{{ .Chart.Name }}", "mountPath": "/opt/test"} ], }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] ) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] ) def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( values={ "volumes": [{"name": "test-volume", "emptyDir": {}}], "volumeMounts": [{"name": "test-volume", "mountPath": "/opt/test"}], }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert "test-volume" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) assert "test-volume" == jmespath.search( "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] ) def test_should_add_extraEnvs(self): docs = render_chart( values={ "triggerer": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], } }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) def test_should_add_extraEnvs_to_wait_for_migration_container(self): docs = render_chart( values={ "triggerer": { "waitForMigrations": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, } }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.initContainers[0].env", docs[0] ) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "triggerer": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert "test_label" in jmespath.search("spec.template.metadata.labels", docs[0]) assert jmespath.search("spec.template.metadata.labels", docs[0])["test_label"] == "test_label_value" def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "triggerer": { "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert "StatefulSet" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): """When given both global and triggerer affinity etc, triggerer affinity etc is used.""" expected_affinity = { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } } expected_topology_spread_constraints = { "maxSkew": 1, "topologyKey": "foo", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } docs = render_chart( values={ "triggerer": { "affinity": expected_affinity, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [expected_topology_spread_constraints], "nodeSelector": {"type": "ssd"}, }, "affinity": { "nodeAffinity": { "preferredDuringSchedulingIgnoredDuringExecution": [ { "weight": 1, "preference": { "matchExpressions": [ {"key": "not-me", "operator": "In", "values": ["true"]}, ] }, } ] } }, "tolerations": [ {"key": "not-me", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [ { "maxSkew": 1, "topologyKey": "not-me", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } ], "nodeSelector": {"type": "not-me"}, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.type", docs[0], ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) assert 1 == len(tolerations) assert "dynamic-pods" == tolerations[0]["key"] assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/scheduler/scheduler-deployment.yaml"]) assert {"component": "scheduler"} == jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], ) def test_livenessprobe_values_are_configurable(self): docs = render_chart( values={ "triggerer": { "livenessProbe": { "initialDelaySeconds": 111, "timeoutSeconds": 222, "failureThreshold": 333, "periodSeconds": 444, "command": ["sh", "-c", "echo", "wow such test"], } }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert 111 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0] ) assert 222 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0] ) assert 333 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0] ) assert 444 == jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) assert ["sh", "-c", "echo", "wow such test"] == jmespath.search( "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] ) @pytest.mark.parametrize( "airflow_version, probe_command", [ ("2.4.9", "airflow jobs check --job-type TriggererJob --hostname $(hostname)"), ("2.5.0", "airflow jobs check --job-type TriggererJob --local"), ], ) def test_livenessprobe_command_depends_on_airflow_version(self, airflow_version, probe_command): docs = render_chart( values={"airflowVersion": f"{airflow_version}"}, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert ( probe_command in jmespath.search("spec.template.spec.containers[0].livenessProbe.exec.command", docs[0])[-1] ) @pytest.mark.parametrize( "log_persistence_values, expected_volume", [ ({"enabled": False}, {"emptyDir": {}}), ({"enabled": True}, {"persistentVolumeClaim": {"claimName": "release-name-logs"}}), ( {"enabled": True, "existingClaim": "test-claim"}, {"persistentVolumeClaim": {"claimName": "test-claim"}}, ), ], ) def test_logs_persistence_changes_volume(self, log_persistence_values, expected_volume): docs = render_chart( values={ "triggerer": {"persistence": {"enabled": False}}, "logs": {"persistence": log_persistence_values}, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert {"name": "logs", **expected_volume} == jmespath.search( "spec.template.spec.volumes[1]", docs[0] ) def test_resources_are_configurable(self): docs = render_chart( values={ "triggerer": { "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, } }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) assert "128Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] ) assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] ) def test_resources_are_not_added_by_default(self): docs = render_chart( show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} @pytest.mark.parametrize( "persistence, update_strategy, expected_update_strategy", [ (False, None, None), (True, {"rollingUpdate": {"partition": 0}}, {"rollingUpdate": {"partition": 0}}), (True, None, None), ], ) def test_update_strategy(self, persistence, update_strategy, expected_update_strategy): docs = render_chart( values={ "airflowVersion": "2.6.0", "executor": "CeleryExecutor", "triggerer": { "persistence": {"enabled": persistence}, "updateStrategy": update_strategy, }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert expected_update_strategy == jmespath.search("spec.updateStrategy", docs[0]) @pytest.mark.parametrize( "persistence, strategy, expected_strategy", [ (True, None, None), ( False, {"rollingUpdate": {"maxSurge": "100%", "maxUnavailable": "50%"}}, {"rollingUpdate": {"maxSurge": "100%", "maxUnavailable": "50%"}}, ), (False, None, None), ], ) def test_strategy(self, persistence, strategy, expected_strategy): docs = render_chart( values={ "triggerer": {"persistence": {"enabled": persistence}, "strategy": strategy}, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert expected_strategy == jmespath.search("spec.strategy", docs[0]) def test_default_command_and_args(self): docs = render_chart( show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None assert ["bash", "-c", "exec airflow triggerer"] == jmespath.search( "spec.template.spec.containers[0].args", docs[0] ) @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={"triggerer": {"command": command, "args": args}}, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={ "triggerer": {"command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"]}, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_dags_gitsync_sidecar_and_init_container(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}}}, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert "git-sync" in [c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0])] assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] def test_dags_gitsync_with_persistence_no_sidecar_or_init_container(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}}, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) # No gitsync sidecar or init container assert "git-sync" not in [ c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0]) ] assert "git-sync-init" not in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] def test_no_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": None}, show_only=["templates/triggerer/triggerer-deployment.yaml"] ) volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts) volume_mounts_init = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts_init) def test_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": "# Well hello!"}, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) volume_mount = { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } assert volume_mount in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert volume_mount in jmespath.search("spec.template.spec.initContainers[0].volumeMounts", docs[0]) def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "triggerer": { "annotations": {"test_annotation": "test_annotation_value"}, }, }, show_only=["templates/triggerer/triggerer-deployment.yaml"], ) assert "annotations" in jmespath.search("metadata", docs[0]) assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value" class TestTriggererServiceAccount: """Tests triggerer service account.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "triggerer": { "serviceAccount": {"create": True}, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/triggerer/triggerer-serviceaccount.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" class TestTriggererLogGroomer(LogGroomerTestBase): """Triggerer log groomer.""" obj_name = "triggerer" folder = "triggerer"
24,184
39.308333
110
py
airflow
airflow-main/helm_tests/airflow_core/test_dag_processor.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart from tests.charts.log_groomer import LogGroomerTestBase class TestDagProcessor: """Tests DAG processor.""" @pytest.mark.parametrize( "airflow_version, num_docs", [ ("2.2.0", 0), ("2.3.0", 1), ], ) def test_only_exists_on_new_airflow_versions(self, airflow_version, num_docs): """Standalone Dag Processor was only added from Airflow 2.3 onwards.""" docs = render_chart( values={ "airflowVersion": airflow_version, "dagProcessor": {"enabled": True}, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert num_docs == len(docs) def test_can_be_disabled(self): """Standalone Dag Processor is disabled by default.""" docs = render_chart( values={"dagProcessor": {"enabled": False}}, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert 0 == len(docs) def test_disable_wait_for_migration(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "waitForMigrations": {"enabled": False}, }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) actual = jmespath.search( "spec.template.spec.initContainers[?name=='wait-for-airflow-migrations']", docs[0] ) assert actual is None def test_should_add_extra_containers(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "extraContainers": [ {"name": "test-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert { "name": "test-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) def test_should_add_extra_init_containers(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "extraInitContainers": [ {"name": "test-init-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "extraVolumes": [{"name": "test-volume-{{ .Chart.Name }}", "emptyDir": {}}], "extraVolumeMounts": [ {"name": "test-volume-{{ .Chart.Name }}", "mountPath": "/opt/test"} ], }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert "test-volume-airflow" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] ) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.initContainers[0].volumeMounts[0].name", docs[0] ) def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( values={ "dagProcessor": {"enabled": True}, "volumes": [{"name": "test-volume", "emptyDir": {}}], "volumeMounts": [{"name": "test-volume", "mountPath": "/opt/test"}], }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert "test-volume" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) assert "test-volume" == jmespath.search( "spec.template.spec.containers[0].volumeMounts[0].name", docs[0] ) assert "test-volume" == jmespath.search( "spec.template.spec.initContainers[0].volumeMounts[0].name", docs[0] ) def test_should_add_extraEnvs(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) def test_should_add_extraEnvs_to_wait_for_migration_container(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "waitForMigrations": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, } }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.initContainers[0].env", docs[0] ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert "Deployment" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): """When given both global and triggerer affinity etc, triggerer affinity etc is used.""" expected_affinity = { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } } expected_topology_spread_constraints = { "maxSkew": 1, "topologyKey": "foo", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } docs = render_chart( values={ "dagProcessor": { "enabled": True, "affinity": expected_affinity, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [expected_topology_spread_constraints], "nodeSelector": {"type": "ssd"}, }, "affinity": { "nodeAffinity": { "preferredDuringSchedulingIgnoredDuringExecution": [ { "weight": 1, "preference": { "matchExpressions": [ {"key": "not-me", "operator": "In", "values": ["true"]}, ] }, } ] } }, "tolerations": [ {"key": "not-me", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [ { "maxSkew": 1, "topologyKey": "not-me", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } ], "nodeSelector": {"type": "not-me"}, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.type", docs[0], ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) assert 1 == len(tolerations) assert "dynamic-pods" == tolerations[0]["key"] assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/scheduler/scheduler-deployment.yaml"]) assert {"component": "scheduler"} == jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], ) def test_livenessprobe_values_are_configurable(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "livenessProbe": { "initialDelaySeconds": 111, "timeoutSeconds": 222, "failureThreshold": 333, "periodSeconds": 444, "command": ["sh", "-c", "echo", "wow such test"], }, }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert 111 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0] ) assert 222 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0] ) assert 333 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0] ) assert 444 == jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) assert ["sh", "-c", "echo", "wow such test"] == jmespath.search( "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] ) @pytest.mark.parametrize( "airflow_version, probe_command", [ ("2.4.9", "airflow jobs check --hostname $(hostname)"), ("2.5.0", "airflow jobs check --local"), ("2.5.2", "airflow jobs check --local --job-type DagProcessorJob"), ], ) def test_livenessprobe_command_depends_on_airflow_version(self, airflow_version, probe_command): docs = render_chart( values={"airflowVersion": f"{airflow_version}", "dagProcessor": {"enabled": True}}, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert ( probe_command in jmespath.search("spec.template.spec.containers[0].livenessProbe.exec.command", docs[0])[-1] ) @pytest.mark.parametrize( "log_persistence_values, expected_volume", [ ({"enabled": False}, {"emptyDir": {}}), ({"enabled": True}, {"persistentVolumeClaim": {"claimName": "release-name-logs"}}), ( {"enabled": True, "existingClaim": "test-claim"}, {"persistentVolumeClaim": {"claimName": "test-claim"}}, ), ], ) def test_logs_persistence_changes_volume(self, log_persistence_values, expected_volume): docs = render_chart( values={ "logs": {"persistence": log_persistence_values}, "dagProcessor": {"enabled": True}, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert {"name": "logs", **expected_volume} == jmespath.search( "spec.template.spec.volumes[1]", docs[0] ) def test_resources_are_configurable(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, }, }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) assert "128Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] ) assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] ) def test_resources_are_not_added_by_default(self): docs = render_chart( values={"dagProcessor": {"enabled": True}}, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} @pytest.mark.parametrize( "strategy, expected_strategy", [ (None, None), ( {"rollingUpdate": {"maxSurge": "100%", "maxUnavailable": "50%"}}, {"rollingUpdate": {"maxSurge": "100%", "maxUnavailable": "50%"}}, ), ], ) def test_strategy(self, strategy, expected_strategy): """Strategy should be used when we aren't using both LocalExecutor and workers.persistence.""" docs = render_chart( values={ "dagProcessor": {"enabled": True, "strategy": strategy}, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert expected_strategy == jmespath.search("spec.strategy", docs[0]) def test_default_command_and_args(self): docs = render_chart( values={"dagProcessor": {"enabled": True}}, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None assert ["bash", "-c", "exec airflow dag-processor"] == jmespath.search( "spec.template.spec.containers[0].args", docs[0] ) @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", [(8, 10), (10, 8), (8, None), (None, 10), (None, None)], ) def test_revision_history_limit(self, revision_history_limit, global_revision_history_limit): values = { "dagProcessor": { "enabled": True, } } if revision_history_limit: values["dagProcessor"]["revisionHistoryLimit"] = revision_history_limit if global_revision_history_limit: values["revisionHistoryLimit"] = global_revision_history_limit docs = render_chart( values=values, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) expected_result = revision_history_limit if revision_history_limit else global_revision_history_limit assert jmespath.search("spec.revisionHistoryLimit", docs[0]) == expected_result @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={ "dagProcessor": { "enabled": True, "command": command, "args": args, } }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"], }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_dags_volume_mount_with_persistence_true(self): docs = render_chart( values={"dagProcessor": {"enabled": True}, "dags": {"gitSync": {"enabled": True}}}, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert "dags" in [ vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] assert "dags" in [vm["name"] for vm in jmespath.search("spec.template.spec.volumes", docs[0])] def test_dags_gitsync_sidecar_and_init_container(self): docs = render_chart( values={"dagProcessor": {"enabled": True}, "dags": {"gitSync": {"enabled": True}}}, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert "git-sync" in [c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0])] assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] def test_dags_gitsync_with_persistence_no_sidecar_or_init_container(self): docs = render_chart( values={ "dagProcessor": {"enabled": True}, "dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) # No gitsync sidecar or init container assert "git-sync" not in [ c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0]) ] assert "git-sync-init" not in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] def test_no_airflow_local_settings(self): docs = render_chart( values={"dagProcessor": {"enabled": True}, "airflowLocalSettings": None}, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts) volume_mounts_init = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts_init) def test_airflow_local_settings(self): docs = render_chart( values={"dagProcessor": {"enabled": True}, "airflowLocalSettings": "# Well hello!"}, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) volume_mount = { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } assert volume_mount in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert volume_mount in jmespath.search("spec.template.spec.initContainers[0].volumeMounts", docs[0]) def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "dagProcessor": { "enabled": True, "annotations": {"test_annotation": "test_annotation_value"}, }, }, show_only=["templates/dag-processor/dag-processor-deployment.yaml"], ) assert "annotations" in jmespath.search("metadata", docs[0]) assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value" class TestDagProcessorLogGroomer(LogGroomerTestBase): """DAG processor log groomer.""" obj_name = "dag-processor" folder = "dag-processor"
24,153
40.078231
110
py
airflow
airflow-main/helm_tests/airflow_core/test_scheduler.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart from tests.charts.log_groomer import LogGroomerTestBase class TestScheduler: """Tests scheduler.""" @pytest.mark.parametrize( "executor, persistence, kind", [ ("CeleryExecutor", False, "Deployment"), ("CeleryExecutor", True, "Deployment"), ("CeleryKubernetesExecutor", True, "Deployment"), ("KubernetesExecutor", True, "Deployment"), ("LocalKubernetesExecutor", False, "Deployment"), ("LocalKubernetesExecutor", True, "StatefulSet"), ("LocalExecutor", True, "StatefulSet"), ("LocalExecutor", False, "Deployment"), ], ) def test_scheduler_kind(self, executor, persistence, kind): """ Test scheduler kind is StatefulSet only when using a local executor & worker persistence is enabled. """ docs = render_chart( values={ "executor": executor, "workers": {"persistence": {"enabled": persistence}}, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert kind == jmespath.search("kind", docs[0]) def test_should_add_extra_containers(self): docs = render_chart( values={ "executor": "CeleryExecutor", "scheduler": { "extraContainers": [ {"name": "test-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert { "name": "test-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) def test_disable_wait_for_migration(self): docs = render_chart( values={ "scheduler": { "waitForMigrations": {"enabled": False}, }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) actual = jmespath.search( "spec.template.spec.initContainers[?name=='wait-for-airflow-migrations']", docs[0] ) assert actual is None def test_should_add_extra_init_containers(self): docs = render_chart( values={ "scheduler": { "extraInitContainers": [ {"name": "test-init-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.initContainers[-1]", docs[0]) def test_should_add_extra_volume_and_extra_volume_mount(self): docs = render_chart( values={ "executor": "CeleryExecutor", "scheduler": { "extraVolumes": [{"name": "test-volume-{{ .Chart.Name }}", "emptyDir": {}}], "extraVolumeMounts": [ {"name": "test-volume-{{ .Chart.Name }}", "mountPath": "/opt/test"} ], }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "test-volume-airflow" in jmespath.search("spec.template.spec.volumes[*].name", docs[0]) assert "test-volume-airflow" in jmespath.search( "spec.template.spec.containers[0].volumeMounts[*].name", docs[0] ) assert "test-volume-airflow" == jmespath.search( "spec.template.spec.initContainers[0].volumeMounts[-1].name", docs[0] ) def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( values={ "volumes": [{"name": "test-volume", "emptyDir": {}}], "volumeMounts": [{"name": "test-volume", "mountPath": "/opt/test"}], }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "test-volume" in jmespath.search("spec.template.spec.volumes[*].name", docs[0]) assert "test-volume" in jmespath.search( "spec.template.spec.containers[0].volumeMounts[*].name", docs[0] ) def test_should_add_extraEnvs(self): docs = render_chart( values={ "scheduler": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) def test_should_add_extraEnvs_to_wait_for_migration_container(self): docs = render_chart( values={ "scheduler": { "waitForMigrations": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.initContainers[0].env", docs[0] ) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "executor": "CeleryExecutor", "scheduler": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "test_label" in jmespath.search("spec.template.metadata.labels", docs[0]) assert jmespath.search("spec.template.metadata.labels", docs[0])["test_label"] == "test_label_value" @pytest.mark.parametrize( "revision_history_limit, global_revision_history_limit", [(8, 10), (10, 8), (8, None), (None, 10), (None, None)], ) def test_revision_history_limit(self, revision_history_limit, global_revision_history_limit): values = {"scheduler": {}} if revision_history_limit: values["scheduler"]["revisionHistoryLimit"] = revision_history_limit if global_revision_history_limit: values["revisionHistoryLimit"] = global_revision_history_limit docs = render_chart( values=values, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) expected_result = revision_history_limit if revision_history_limit else global_revision_history_limit assert jmespath.search("spec.revisionHistoryLimit", docs[0]) == expected_result def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "scheduler": { "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, } }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "Deployment" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): """When given both global and scheduler affinity etc, scheduler affinity etc is used.""" expected_affinity = { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } } expected_topology_spread_constraints = { "maxSkew": 1, "topologyKey": "foo", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } docs = render_chart( values={ "scheduler": { "affinity": expected_affinity, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [expected_topology_spread_constraints], "nodeSelector": {"type": "ssd"}, }, "affinity": { "nodeAffinity": { "preferredDuringSchedulingIgnoredDuringExecution": [ { "weight": 1, "preference": { "matchExpressions": [ {"key": "not-me", "operator": "In", "values": ["true"]}, ] }, } ] } }, "tolerations": [ {"key": "not-me", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [ { "maxSkew": 1, "topologyKey": "not-me", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } ], "nodeSelector": {"type": "not-me"}, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert expected_affinity == jmespath.search("spec.template.spec.affinity", docs[0]) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.type", docs[0], ) tolerations = jmespath.search("spec.template.spec.tolerations", docs[0]) assert 1 == len(tolerations) assert "dynamic-pods" == tolerations[0]["key"] assert expected_topology_spread_constraints == jmespath.search( "spec.template.spec.topologySpreadConstraints[0]", docs[0] ) def test_should_create_default_affinity(self): docs = render_chart(show_only=["templates/scheduler/scheduler-deployment.yaml"]) assert {"component": "scheduler"} == jmespath.search( "spec.template.spec.affinity.podAntiAffinity." "preferredDuringSchedulingIgnoredDuringExecution[0]." "podAffinityTerm.labelSelector.matchLabels", docs[0], ) def test_livenessprobe_values_are_configurable(self): docs = render_chart( values={ "scheduler": { "livenessProbe": { "initialDelaySeconds": 111, "timeoutSeconds": 222, "failureThreshold": 333, "periodSeconds": 444, "command": ["sh", "-c", "echo", "wow such test"], } }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert 111 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.initialDelaySeconds", docs[0] ) assert 222 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.timeoutSeconds", docs[0] ) assert 333 == jmespath.search( "spec.template.spec.containers[0].livenessProbe.failureThreshold", docs[0] ) assert 444 == jmespath.search("spec.template.spec.containers[0].livenessProbe.periodSeconds", docs[0]) assert ["sh", "-c", "echo", "wow such test"] == jmespath.search( "spec.template.spec.containers[0].livenessProbe.exec.command", docs[0] ) @pytest.mark.parametrize( "airflow_version, probe_command", [ ("1.9.0", "from airflow.jobs.scheduler_job import SchedulerJob"), ("2.1.0", "airflow jobs check --job-type SchedulerJob --hostname $(hostname)"), ("2.5.0", "airflow jobs check --job-type SchedulerJob --local"), ], ) def test_livenessprobe_command_depends_on_airflow_version(self, airflow_version, probe_command): docs = render_chart( values={"airflowVersion": f"{airflow_version}"}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert ( probe_command in jmespath.search("spec.template.spec.containers[0].livenessProbe.exec.command", docs[0])[-1] ) @pytest.mark.parametrize( "log_persistence_values, expected_volume", [ ({"enabled": False}, {"emptyDir": {}}), ({"enabled": True}, {"persistentVolumeClaim": {"claimName": "release-name-logs"}}), ( {"enabled": True, "existingClaim": "test-claim"}, {"persistentVolumeClaim": {"claimName": "test-claim"}}, ), ], ) def test_logs_persistence_changes_volume(self, log_persistence_values, expected_volume): docs = render_chart( values={"logs": {"persistence": log_persistence_values}}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"name": "logs", **expected_volume} in jmespath.search("spec.template.spec.volumes", docs[0]) def test_scheduler_security_contexts_are_configurable(self): docs = render_chart( values={ "scheduler": { "securityContexts": { "pod": { "fsGroup": 1000, "runAsGroup": 1001, "runAsNonRoot": True, "runAsUser": 2000, }, "container": { "allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True, }, } }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"allowPrivilegeEscalation": False, "readOnlyRootFilesystem": True} == jmespath.search( "spec.template.spec.containers[0].securityContext", docs[0] ) assert { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, } == jmespath.search("spec.template.spec.securityContext", docs[0]) def test_scheduler_security_context_legacy(self): docs = render_chart( values={ "scheduler": { "securityContext": { "fsGroup": 1000, "runAsGroup": 1001, "runAsNonRoot": True, "runAsUser": 2000, } }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert { "runAsUser": 2000, "runAsGroup": 1001, "fsGroup": 1000, "runAsNonRoot": True, } == jmespath.search("spec.template.spec.securityContext", docs[0]) def test_scheduler_resources_are_configurable(self): docs = render_chart( values={ "scheduler": { "resources": { "limits": {"cpu": "200m", "memory": "128Mi"}, "requests": {"cpu": "300m", "memory": "169Mi"}, } }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "128Mi" == jmespath.search("spec.template.spec.containers[0].resources.limits.memory", docs[0]) assert "200m" == jmespath.search("spec.template.spec.containers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.containers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search("spec.template.spec.containers[0].resources.requests.cpu", docs[0]) assert "128Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.limits.memory", docs[0] ) assert "200m" == jmespath.search("spec.template.spec.initContainers[0].resources.limits.cpu", docs[0]) assert "169Mi" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.memory", docs[0] ) assert "300m" == jmespath.search( "spec.template.spec.initContainers[0].resources.requests.cpu", docs[0] ) def test_scheduler_resources_are_not_added_by_default(self): docs = render_chart( show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].resources", docs[0]) == {} def test_no_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": None}, show_only=["templates/scheduler/scheduler-deployment.yaml"] ) volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts) volume_mounts_init = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts_init) def test_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": "# Well hello!"}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) volume_mount = { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } assert volume_mount in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert volume_mount in jmespath.search("spec.template.spec.initContainers[0].volumeMounts", docs[0]) @pytest.mark.parametrize( "executor, persistence, update_strategy, expected_update_strategy", [ ("CeleryExecutor", False, {"rollingUpdate": {"partition": 0}}, None), ("CeleryExecutor", True, {"rollingUpdate": {"partition": 0}}, None), ("LocalKubernetesExecutor", False, {"rollingUpdate": {"partition": 0}}, None), ( "LocalKubernetesExecutor", True, {"rollingUpdate": {"partition": 0}}, {"rollingUpdate": {"partition": 0}}, ), ("LocalExecutor", False, {"rollingUpdate": {"partition": 0}}, None), ("LocalExecutor", True, {"rollingUpdate": {"partition": 0}}, {"rollingUpdate": {"partition": 0}}), ("LocalExecutor", True, None, None), ], ) def test_scheduler_update_strategy( self, executor, persistence, update_strategy, expected_update_strategy ): """UpdateStrategy should only be used when we have a local executor and workers.persistence.""" docs = render_chart( values={ "executor": executor, "workers": {"persistence": {"enabled": persistence}}, "scheduler": {"updateStrategy": update_strategy}, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert expected_update_strategy == jmespath.search("spec.updateStrategy", docs[0]) @pytest.mark.parametrize( "executor, persistence, strategy, expected_strategy", [ ("LocalExecutor", False, None, None), ("LocalExecutor", False, {"type": "Recreate"}, {"type": "Recreate"}), ("LocalExecutor", True, {"type": "Recreate"}, None), ("LocalKubernetesExecutor", False, {"type": "Recreate"}, {"type": "Recreate"}), ("LocalKubernetesExecutor", True, {"type": "Recreate"}, None), ("CeleryExecutor", True, None, None), ("CeleryExecutor", False, None, None), ("CeleryExecutor", True, {"type": "Recreate"}, {"type": "Recreate"}), ( "CeleryExecutor", False, {"rollingUpdate": {"maxSurge": "100%", "maxUnavailable": "50%"}}, {"rollingUpdate": {"maxSurge": "100%", "maxUnavailable": "50%"}}, ), ], ) def test_scheduler_strategy(self, executor, persistence, strategy, expected_strategy): """Strategy should be used when we aren't using both a local executor and workers.persistence.""" docs = render_chart( values={ "executor": executor, "workers": {"persistence": {"enabled": persistence}}, "scheduler": {"strategy": strategy}, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert expected_strategy == jmespath.search("spec.strategy", docs[0]) def test_default_command_and_args(self): docs = render_chart(show_only=["templates/scheduler/scheduler-deployment.yaml"]) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None assert ["bash", "-c", "exec airflow scheduler"] == jmespath.search( "spec.template.spec.containers[0].args", docs[0] ) @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={"scheduler": {"command": command, "args": args}}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={"scheduler": {"command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"]}}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) @pytest.mark.parametrize( "dags_values", [ {"gitSync": {"enabled": True}}, {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}, ], ) def test_dags_gitsync_sidecar_and_init_container(self, dags_values): docs = render_chart( values={"dags": dags_values}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "git-sync" in [c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0])] assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] @pytest.mark.parametrize( "dag_processor, executor, skip_dags_mount", [ (True, "LocalExecutor", False), (True, "CeleryExecutor", True), (True, "KubernetesExecutor", True), (True, "LocalKubernetesExecutor", False), (False, "LocalExecutor", False), (False, "CeleryExecutor", False), (False, "KubernetesExecutor", False), (False, "LocalKubernetesExecutor", False), ], ) def test_dags_mount_and_gitsync_expected_with_dag_processor( self, dag_processor, executor, skip_dags_mount ): """ DAG Processor can move gitsync and DAGs mount from the scheduler to the DAG Processor only. The only exception is when we have a Local executor. In these cases, the scheduler does the worker role and needs access to DAGs anyway. """ docs = render_chart( values={ "dagProcessor": {"enabled": dag_processor}, "executor": executor, "dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": True}}, "scheduler": {"logGroomerSidecar": {"enabled": False}}, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) if skip_dags_mount: assert "dags" not in [ vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] assert "dags" not in [vm["name"] for vm in jmespath.search("spec.template.spec.volumes", docs[0])] assert 1 == len(jmespath.search("spec.template.spec.containers", docs[0])) else: assert "dags" in [ vm["name"] for vm in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) ] assert "dags" in [vm["name"] for vm in jmespath.search("spec.template.spec.volumes", docs[0])] assert "git-sync" in [ c["name"] for c in jmespath.search("spec.template.spec.containers", docs[0]) ] assert "git-sync-init" in [ c["name"] for c in jmespath.search("spec.template.spec.initContainers", docs[0]) ] def test_persistence_volume_annotations(self): docs = render_chart( values={"executor": "LocalExecutor", "workers": {"persistence": {"annotations": {"foo": "bar"}}}}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert {"foo": "bar"} == jmespath.search("spec.volumeClaimTemplates[0].metadata.annotations", docs[0]) @pytest.mark.parametrize( "executor", [ "LocalExecutor", "LocalKubernetesExecutor", "CeleryExecutor", "KubernetesExecutor", "CeleryKubernetesExecutor", ], ) def test_scheduler_deployment_has_executor_label(self, executor): docs = render_chart( values={"executor": executor}, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert 1 == len(docs) assert executor == docs[0]["metadata"]["labels"].get("executor") def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "scheduler": { "annotations": {"test_annotation": "test_annotation_value"}, }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "annotations" in jmespath.search("metadata", docs[0]) assert jmespath.search("metadata.annotations", docs[0])["test_annotation"] == "test_annotation_value" def test_scheduler_pod_hostaliases(self): docs = render_chart( values={ "scheduler": { "hostAliases": [{"ip": "127.0.0.1", "hostnames": ["foo.local"]}], }, }, show_only=["templates/scheduler/scheduler-deployment.yaml"], ) assert "127.0.0.1" == jmespath.search("spec.template.spec.hostAliases[0].ip", docs[0]) assert "foo.local" == jmespath.search("spec.template.spec.hostAliases[0].hostnames[0]", docs[0]) class TestSchedulerNetworkPolicy: """Tests scheduler network policy.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "networkPolicies": {"enabled": True}, "scheduler": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/scheduler/scheduler-networkpolicy.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" class TestSchedulerLogGroomer(LogGroomerTestBase): """Scheduler log groomer.""" obj_name = "scheduler" folder = "scheduler" class TestSchedulerService: """Tests scheduler service.""" @pytest.mark.parametrize( "executor, creates_service", [ ("LocalExecutor", True), ("CeleryExecutor", False), ("CeleryKubernetesExecutor", False), ("KubernetesExecutor", False), ("LocalKubernetesExecutor", True), ], ) def test_should_create_scheduler_service_for_specific_executors(self, executor, creates_service): docs = render_chart( values={ "executor": executor, "scheduler": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/scheduler/scheduler-service.yaml"], ) if creates_service: assert jmespath.search("kind", docs[0]) == "Service" assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" else: assert docs == [] def test_should_add_component_specific_labels(self): docs = render_chart( values={ "executor": "LocalExecutor", "scheduler": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/scheduler/scheduler-service.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" class TestSchedulerServiceAccount: """Tests scheduler service account.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "scheduler": { "serviceAccount": {"create": True}, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/scheduler/scheduler-serviceaccount.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
33,157
40.138958
110
py
airflow
airflow-main/helm_tests/airflow_aux/test_celery_kubernetes_executor.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestCeleryKubernetesExecutor: """Tests celery kubernetes executor.""" def test_should_create_a_worker_deployment_with_the_celery_executor(self): docs = render_chart( values={ "executor": "CeleryExecutor", "dags": {"persistence": {"enabled": True}, "gitSync": {"enabled": True}}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "config" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0]) def test_should_create_a_worker_deployment_with_the_celery_kubernetes_executor(self): docs = render_chart( values={ "executor": "CeleryKubernetesExecutor", "dags": {"gitSync": {"enabled": True}, "persistence": {"enabled": False}}, }, show_only=["templates/workers/worker-deployment.yaml"], ) assert "config" == jmespath.search("spec.template.spec.volumes[0].name", docs[0]) assert "dags" == jmespath.search("spec.template.spec.volumes[1].name", docs[0])
2,080
40.62
90
py
airflow
airflow-main/helm_tests/airflow_aux/test_logs_persistent_volume_claim.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath from tests.charts.helm_template_generator import render_chart class TestLogsPersistentVolumeClaim: """Tests logs PVC.""" def test_should_not_generate_a_document_if_persistence_is_disabled(self): docs = render_chart( values={"logs": {"persistence": {"enabled": False}}}, show_only=["templates/logs-persistent-volume-claim.yaml"], ) assert 0 == len(docs) def test_should_not_generate_a_document_when_using_an_existing_claim(self): docs = render_chart( values={"logs": {"persistence": {"enabled": True, "existingClaim": "test-claim"}}}, show_only=["templates/logs-persistent-volume-claim.yaml"], ) assert 0 == len(docs) def test_should_generate_a_document_if_persistence_is_enabled_and_not_using_an_existing_claim(self): docs = render_chart( values={"logs": {"persistence": {"enabled": True, "existingClaim": None}}}, show_only=["templates/logs-persistent-volume-claim.yaml"], ) assert 1 == len(docs) def test_should_set_pvc_details_correctly(self): docs = render_chart( values={ "logs": { "persistence": { "enabled": True, "size": "1G", "existingClaim": None, "storageClassName": "MyStorageClass", } } }, show_only=["templates/logs-persistent-volume-claim.yaml"], ) assert { "accessModes": ["ReadWriteMany"], "resources": {"requests": {"storage": "1G"}}, "storageClassName": "MyStorageClass", } == jmespath.search("spec", docs[0])
2,613
35.816901
104
py
airflow
airflow-main/helm_tests/airflow_aux/test_create_user_job.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestCreateUserJob: """Tests create user job.""" def test_should_run_by_default(self): docs = render_chart(show_only=["templates/jobs/create-user-job.yaml"]) assert "Job" == docs[0]["kind"] assert "create-user" == jmespath.search("spec.template.spec.containers[0].name", docs[0]) assert 50000 == jmespath.search("spec.template.spec.securityContext.runAsUser", docs[0]) def test_should_support_annotations(self): docs = render_chart( values={"createUserJob": {"annotations": {"foo": "bar"}, "jobAnnotations": {"fiz": "fuz"}}}, show_only=["templates/jobs/create-user-job.yaml"], ) annotations = jmespath.search("spec.template.metadata.annotations", docs[0]) assert "foo" in annotations assert "bar" == annotations["foo"] job_annotations = jmespath.search("metadata.annotations", docs[0]) assert "fiz" in job_annotations assert "fuz" == job_annotations["fiz"] def test_should_add_component_specific_labels(self): docs = render_chart( values={ "createUserJob": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/jobs/create-user-job.yaml"], ) assert "test_label" in jmespath.search("spec.template.metadata.labels", docs[0]) assert jmespath.search("spec.template.metadata.labels", docs[0])["test_label"] == "test_label_value" def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "createUserJob": { "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, } }, show_only=["templates/jobs/create-user-job.yaml"], ) assert "Job" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.template.spec.tolerations[0].key", docs[0], ) def test_create_user_job_resources_are_configurable(self): resources = { "requests": { "cpu": "128m", "memory": "256Mi", }, "limits": { "cpu": "256m", "memory": "512Mi", }, } docs = render_chart( values={ "createUserJob": { "resources": resources, }, }, show_only=["templates/jobs/create-user-job.yaml"], ) assert resources == jmespath.search("spec.template.spec.containers[0].resources", docs[0]) def test_should_disable_default_helm_hooks(self): docs = render_chart( values={"createUserJob": {"useHelmHooks": False}}, show_only=["templates/jobs/create-user-job.yaml"], ) annotations = jmespath.search("metadata.annotations", docs[0]) assert annotations is None def test_should_set_correct_helm_hooks_weight(self): docs = render_chart( show_only=[ "templates/jobs/create-user-job.yaml", ], ) annotations = jmespath.search("metadata.annotations", docs[0]) assert annotations["helm.sh/hook-weight"] == "2" def test_should_add_extra_containers(self): docs = render_chart( values={ "createUserJob": { "extraContainers": [ {"name": "test-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/jobs/create-user-job.yaml"], ) assert { "name": "test-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.template.spec.containers[-1]", docs[0]) def test_should_add_extra_volumes(self): docs = render_chart( values={ "createUserJob": { "extraVolumes": [{"name": "myvolume-{{ .Chart.Name }}", "emptyDir": {}}], }, }, show_only=["templates/jobs/create-user-job.yaml"], ) assert {"name": "myvolume-airflow", "emptyDir": {}} == jmespath.search( "spec.template.spec.volumes[-1]", docs[0] ) def test_should_add_extra_volume_mounts(self): docs = render_chart( values={ "createUserJob": { "extraVolumeMounts": [{"name": "foobar-{{ .Chart.Name }}", "mountPath": "foo/bar"}], }, }, show_only=["templates/jobs/create-user-job.yaml"], ) assert {"name": "foobar-airflow", "mountPath": "foo/bar"} == jmespath.search( "spec.template.spec.containers[0].volumeMounts[-1]", docs[0] ) def test_should_add_global_volume_and_global_volume_mount(self): docs = render_chart( values={ "volumes": [{"name": "myvolume", "emptyDir": {}}], "volumeMounts": [{"name": "foobar", "mountPath": "foo/bar"}], }, show_only=["templates/jobs/create-user-job.yaml"], ) assert {"name": "myvolume", "emptyDir": {}} == jmespath.search( "spec.template.spec.volumes[-1]", docs[0] ) assert {"name": "foobar", "mountPath": "foo/bar"} == jmespath.search( "spec.template.spec.containers[0].volumeMounts[-1]", docs[0] ) def test_should_add_extraEnvs(self): docs = render_chart( values={ "createUserJob": { "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, }, show_only=["templates/jobs/create-user-job.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.template.spec.containers[0].env", docs[0] ) def test_should_enable_custom_env(self): docs = render_chart( values={ "env": [ {"name": "foo", "value": "bar"}, ], "extraEnv": "- name: extraFoo\n value: extraBar\n", "createUserJob": {"applyCustomEnv": True}, }, show_only=["templates/jobs/create-user-job.yaml"], ) envs = jmespath.search("spec.template.spec.containers[0].env", docs[0]) assert {"name": "foo", "value": "bar"} in envs assert {"name": "extraFoo", "value": "extraBar"} in envs def test_should_disable_custom_env(self): docs = render_chart( values={ "env": [ {"name": "foo", "value": "bar"}, ], "extraEnv": "- name: extraFoo\n value: extraBar\n", "createUserJob": {"applyCustomEnv": False}, }, show_only=["templates/jobs/create-user-job.yaml"], ) envs = jmespath.search("spec.template.spec.containers[0].env", docs[0]) assert {"name": "foo", "value": "bar"} not in envs assert {"name": "extraFoo", "value": "extraBar"} not in envs def test_job_ttl_after_finished(self): docs = render_chart( values={"createUserJob": {"ttlSecondsAfterFinished": 1}}, show_only=["templates/jobs/create-user-job.yaml"], ) ttl = jmespath.search("spec.ttlSecondsAfterFinished", docs[0]) assert ttl == 1 def test_job_ttl_after_finished_zero(self): docs = render_chart( values={"createUserJob": {"ttlSecondsAfterFinished": 0}}, show_only=["templates/jobs/create-user-job.yaml"], ) ttl = jmespath.search("spec.ttlSecondsAfterFinished", docs[0]) assert ttl == 0 def test_job_ttl_after_finished_nil(self): docs = render_chart( values={"createUserJob": {"ttlSecondsAfterFinished": None}}, show_only=["templates/jobs/create-user-job.yaml"], ) spec = jmespath.search("spec", docs[0]) assert "ttlSecondsAfterFinished" not in spec @pytest.mark.parametrize( "airflow_version, expected_arg", [ ("1.10.14", "airflow create_user"), ("2.0.2", "airflow users create"), ], ) def test_default_command_and_args_airflow_version(self, airflow_version, expected_arg): docs = render_chart( values={ "airflowVersion": airflow_version, }, show_only=["templates/jobs/create-user-job.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None assert [ "bash", "-c", f'exec \\\n{expected_arg} "$@"', "--", "-r", "Admin", "-u", "admin", "-e", "[email protected]", "-f", "admin", "-l", "user", "-p", "admin", ] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={"createUserJob": {"command": command, "args": args}}, show_only=["templates/jobs/create-user-job.yaml"], ) assert command == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert args == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={ "createUserJob": {"command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"]} }, show_only=["templates/jobs/create-user-job.yaml"], ) assert ["release-name"] == jmespath.search("spec.template.spec.containers[0].command", docs[0]) assert ["Helm"] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_default_user_overrides(self): docs = render_chart( values={ "webserver": { "defaultUser": { "role": "SomeRole", "username": "jdoe", "email": "[email protected]", "firstName": "John", "lastName": "Doe", "password": "whereisjane?", } } }, show_only=["templates/jobs/create-user-job.yaml"], ) assert jmespath.search("spec.template.spec.containers[0].command", docs[0]) is None assert [ "bash", "-c", 'exec \\\nairflow users create "$@"', "--", "-r", "SomeRole", "-u", "jdoe", "-e", "[email protected]", "-f", "John", "-l", "Doe", "-p", "whereisjane?", ] == jmespath.search("spec.template.spec.containers[0].args", docs[0]) def test_no_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": None}, show_only=["templates/jobs/create-user-job.yaml"] ) volume_mounts = jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts) def test_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": "# Well hello!"}, show_only=["templates/jobs/create-user-job.yaml"], ) assert { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } in jmespath.search("spec.template.spec.containers[0].volumeMounts", docs[0]) class TestCreateUserJobServiceAccount: """Tests create user job service account.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "createUserJob": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/jobs/create-user-job-serviceaccount.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
14,927
36.413534
109
py
airflow
airflow-main/helm_tests/airflow_aux/test_chart_quality.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import json from pathlib import Path import yaml from jsonschema import validate CHART_DIR = Path(__file__).parents[2] / "chart" class TestChartQuality: """Tests chart quality.""" def test_values_validate_schema(self): values = yaml.safe_load((CHART_DIR / "values.yaml").read_text()) schema = json.loads((CHART_DIR / "values.schema.json").read_text()) # Add extra restrictions just for the tests to make sure # we don't forget to update the schema if we add a new property schema["additionalProperties"] = False schema["minProperties"] = len(schema["properties"].keys()) # shouldn't raise validate(instance=values, schema=schema)
1,530
35.452381
75
py
airflow
airflow-main/helm_tests/airflow_aux/test_basic_helm_chart.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import base64 import warnings from subprocess import CalledProcessError from typing import Any from unittest import mock import jmespath import pytest from tests.charts.helm_template_generator import render_chart OBJECT_COUNT_IN_BASIC_DEPLOYMENT = 35 class TestBaseChartTest: """Tests basic helm chart tests.""" def _get_values_with_version(self, values, version): if version != "default": values["airflowVersion"] = version return values def _get_object_count(self, version): if version == "2.3.2" or version == "default": return OBJECT_COUNT_IN_BASIC_DEPLOYMENT + 1 return OBJECT_COUNT_IN_BASIC_DEPLOYMENT @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_basic_deployments(self, version): expected_object_count_in_basic_deployment = self._get_object_count(version) k8s_objects = render_chart( "test-basic", self._get_values_with_version( values={ "chart": { "metadata": "AA", }, "labels": {"test-label": "TEST-VALUE"}, "fullnameOverride": "test-basic", }, version=version, ), ) list_of_kind_names_tuples = { (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects } expected = { ("ServiceAccount", "test-basic-create-user-job"), ("ServiceAccount", "test-basic-migrate-database-job"), ("ServiceAccount", "test-basic-redis"), ("ServiceAccount", "test-basic-scheduler"), ("ServiceAccount", "test-basic-statsd"), ("ServiceAccount", "test-basic-triggerer"), ("ServiceAccount", "test-basic-webserver"), ("ServiceAccount", "test-basic-worker"), ("Secret", "test-basic-airflow-metadata"), ("Secret", "test-basic-broker-url"), ("Secret", "test-basic-fernet-key"), ("Secret", "test-basic-webserver-secret-key"), ("Secret", "test-basic-postgresql"), ("Secret", "test-basic-redis-password"), ("ConfigMap", "test-basic-airflow-config"), ("ConfigMap", "test-basic-statsd"), ("Role", "test-basic-pod-launcher-role"), ("Role", "test-basic-pod-log-reader-role"), ("RoleBinding", "test-basic-pod-launcher-rolebinding"), ("RoleBinding", "test-basic-pod-log-reader-rolebinding"), ("Service", "test-basic-postgresql-hl"), ("Service", "test-basic-postgresql"), ("Service", "test-basic-redis"), ("Service", "test-basic-statsd"), ("Service", "test-basic-webserver"), ("Service", "test-basic-worker"), ("Deployment", "test-basic-scheduler"), ("Deployment", "test-basic-statsd"), (self.default_trigger_obj(version), "test-basic-triggerer"), ("Deployment", "test-basic-webserver"), ("StatefulSet", "test-basic-postgresql"), ("StatefulSet", "test-basic-redis"), ("StatefulSet", "test-basic-worker"), ("Job", "test-basic-create-user"), ("Job", "test-basic-run-airflow-migrations"), } if version == "2.3.2": expected.add(("Secret", "test-basic-airflow-result-backend")) if version == "default": expected.add(("Service", "test-basic-triggerer")) assert list_of_kind_names_tuples == expected assert expected_object_count_in_basic_deployment == len(k8s_objects) for k8s_object in k8s_objects: labels = jmespath.search("metadata.labels", k8s_object) or {} if "helm.sh/chart" in labels: chart_name = labels.get("helm.sh/chart") else: chart_name = labels.get("chart") if chart_name and "postgresql" in chart_name: continue k8s_name = k8s_object["kind"] + ":" + k8s_object["metadata"]["name"] assert "TEST-VALUE" == labels.get( "test-label" ), f"Missing label test-label on {k8s_name}. Current labels: {labels}" @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_basic_deployment_with_standalone_dag_processor(self, version): # Dag Processor creates two extra objects compared to the basic deployment object_count_in_basic_deployment = self._get_object_count(version) expected_object_count_with_standalone_scheduler = object_count_in_basic_deployment + 2 k8s_objects = render_chart( "test-basic", self._get_values_with_version( values={ "chart": { "metadata": "AA", }, "labels": {"test-label": "TEST-VALUE"}, "fullnameOverride": "test-basic", "dagProcessor": {"enabled": True}, }, version=version, ), ) list_of_kind_names_tuples = { (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects } expected = { ("ServiceAccount", "test-basic-create-user-job"), ("ServiceAccount", "test-basic-migrate-database-job"), ("ServiceAccount", "test-basic-redis"), ("ServiceAccount", "test-basic-scheduler"), ("ServiceAccount", "test-basic-statsd"), ("ServiceAccount", "test-basic-triggerer"), ("ServiceAccount", "test-basic-dag-processor"), ("ServiceAccount", "test-basic-webserver"), ("ServiceAccount", "test-basic-worker"), ("Secret", "test-basic-airflow-metadata"), ("Secret", "test-basic-broker-url"), ("Secret", "test-basic-fernet-key"), ("Secret", "test-basic-webserver-secret-key"), ("Secret", "test-basic-postgresql"), ("Secret", "test-basic-redis-password"), ("ConfigMap", "test-basic-airflow-config"), ("ConfigMap", "test-basic-statsd"), ("Role", "test-basic-pod-launcher-role"), ("Role", "test-basic-pod-log-reader-role"), ("RoleBinding", "test-basic-pod-launcher-rolebinding"), ("RoleBinding", "test-basic-pod-log-reader-rolebinding"), ("Service", "test-basic-postgresql-hl"), ("Service", "test-basic-postgresql"), ("Service", "test-basic-redis"), ("Service", "test-basic-statsd"), ("Service", "test-basic-webserver"), ("Service", "test-basic-worker"), ("Deployment", "test-basic-scheduler"), ("Deployment", "test-basic-statsd"), (self.default_trigger_obj(version), "test-basic-triggerer"), ("Deployment", "test-basic-dag-processor"), ("Deployment", "test-basic-webserver"), ("StatefulSet", "test-basic-postgresql"), ("StatefulSet", "test-basic-redis"), ("StatefulSet", "test-basic-worker"), ("Job", "test-basic-create-user"), ("Job", "test-basic-run-airflow-migrations"), } if version == "2.3.2": expected.add(("Secret", "test-basic-airflow-result-backend")) if version == "default": expected.add(("Service", "test-basic-triggerer")) assert list_of_kind_names_tuples == expected assert expected_object_count_with_standalone_scheduler == len(k8s_objects) for k8s_object in k8s_objects: labels = jmespath.search("metadata.labels", k8s_object) or {} if "helm.sh/chart" in labels: chart_name = labels.get("helm.sh/chart") else: chart_name = labels.get("chart") if chart_name and "postgresql" in chart_name: continue k8s_name = k8s_object["kind"] + ":" + k8s_object["metadata"]["name"] assert "TEST-VALUE" == labels.get( "test-label" ), f"Missing label test-label on {k8s_name}. Current labels: {labels}" @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_basic_deployment_without_default_users(self, version): expected_object_count_in_basic_deployment = self._get_object_count(version) k8s_objects = render_chart( "test-basic", values=self._get_values_with_version( values={"webserver": {"defaultUser": {"enabled": False}}}, version=version ), ) list_of_kind_names_tuples = [ (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects ] assert ("Job", "test-basic-create-user") not in list_of_kind_names_tuples assert expected_object_count_in_basic_deployment - 2 == len(k8s_objects) @pytest.mark.parametrize("version", ["2.3.2", "2.4.0", "default"]) def test_basic_deployment_without_statsd(self, version): expected_object_count_in_basic_deployment = self._get_object_count(version) k8s_objects = render_chart( "test-basic", values=self._get_values_with_version(values={"statsd": {"enabled": False}}, version=version), ) list_of_kind_names_tuples = [ (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects ] assert ("ServiceAccount", "test-basic-statsd") not in list_of_kind_names_tuples assert ("ConfigMap", "test-basic-statsd") not in list_of_kind_names_tuples assert ("Service", "test-basic-statsd") not in list_of_kind_names_tuples assert ("Deployment", "test-basic-statsd") not in list_of_kind_names_tuples assert expected_object_count_in_basic_deployment - 4 == len(k8s_objects) def test_network_policies_are_valid(self): k8s_objects = render_chart( "test-basic", { "networkPolicies": {"enabled": True}, "executor": "CeleryExecutor", "flower": {"enabled": True}, "pgbouncer": {"enabled": True}, }, ) kind_names_tuples = { (k8s_object["kind"], k8s_object["metadata"]["name"]) for k8s_object in k8s_objects } expected_kind_names = [ ("NetworkPolicy", "test-basic-redis-policy"), ("NetworkPolicy", "test-basic-flower-policy"), ("NetworkPolicy", "test-basic-pgbouncer-policy"), ("NetworkPolicy", "test-basic-scheduler-policy"), ("NetworkPolicy", "test-basic-statsd-policy"), ("NetworkPolicy", "test-basic-webserver-policy"), ("NetworkPolicy", "test-basic-worker-policy"), ] for kind_name in expected_kind_names: assert kind_name in kind_names_tuples def test_labels_are_valid(self): """Test labels are correctly applied on all objects created by this chart.""" release_name = "test-basic" k8s_objects = render_chart( name=release_name, values={ "labels": {"label1": "value1", "label2": "value2"}, "executor": "CeleryExecutor", "data": { "resultBackendConnection": { "user": "someuser", "pass": "somepass", "host": "somehost", "protocol": "postgresql", "port": 7777, "db": "somedb", "sslmode": "allow", } }, "pgbouncer": {"enabled": True}, "redis": {"enabled": True}, "ingress": {"enabled": True}, "networkPolicies": {"enabled": True}, "cleanup": {"enabled": True}, "flower": {"enabled": True}, "dagProcessor": {"enabled": True}, "logs": {"persistence": {"enabled": True}}, "dags": {"persistence": {"enabled": True}}, "postgresql": {"enabled": False}, # We won't check the objects created by the postgres chart }, ) kind_k8s_obj_labels_tuples = { (k8s_object["metadata"]["name"], k8s_object["kind"]): k8s_object["metadata"]["labels"] for k8s_object in k8s_objects } kind_names_tuples = [ (f"{release_name}-airflow-cleanup", "ServiceAccount", None), (f"{release_name}-airflow-config", "ConfigMap", "config"), (f"{release_name}-airflow-create-user-job", "ServiceAccount", "create-user-job"), (f"{release_name}-airflow-flower", "ServiceAccount", "flower"), (f"{release_name}-airflow-metadata", "Secret", None), (f"{release_name}-airflow-migrate-database-job", "ServiceAccount", "run-airflow-migrations"), (f"{release_name}-airflow-pgbouncer", "ServiceAccount", "pgbouncer"), (f"{release_name}-airflow-result-backend", "Secret", None), (f"{release_name}-airflow-redis", "ServiceAccount", "redis"), (f"{release_name}-airflow-scheduler", "ServiceAccount", "scheduler"), (f"{release_name}-airflow-statsd", "ServiceAccount", "statsd"), (f"{release_name}-airflow-webserver", "ServiceAccount", "webserver"), (f"{release_name}-airflow-worker", "ServiceAccount", "worker"), (f"{release_name}-airflow-triggerer", "ServiceAccount", "triggerer"), (f"{release_name}-airflow-dag-processor", "ServiceAccount", "dag-processor"), (f"{release_name}-broker-url", "Secret", "redis"), (f"{release_name}-cleanup", "CronJob", "airflow-cleanup-pods"), (f"{release_name}-cleanup-role", "Role", None), (f"{release_name}-cleanup-rolebinding", "RoleBinding", None), (f"{release_name}-create-user", "Job", "create-user-job"), (f"{release_name}-fernet-key", "Secret", None), (f"{release_name}-flower", "Deployment", "flower"), (f"{release_name}-flower", "Service", "flower"), (f"{release_name}-flower-policy", "NetworkPolicy", "airflow-flower-policy"), (f"{release_name}-flower-ingress", "Ingress", "flower-ingress"), (f"{release_name}-pgbouncer", "Deployment", "pgbouncer"), (f"{release_name}-pgbouncer", "Service", "pgbouncer"), (f"{release_name}-pgbouncer-config", "Secret", "pgbouncer"), (f"{release_name}-pgbouncer-policy", "NetworkPolicy", "airflow-pgbouncer-policy"), (f"{release_name}-pgbouncer-stats", "Secret", "pgbouncer"), (f"{release_name}-pod-launcher-role", "Role", None), (f"{release_name}-pod-launcher-rolebinding", "RoleBinding", None), (f"{release_name}-pod-log-reader-role", "Role", None), (f"{release_name}-pod-log-reader-rolebinding", "RoleBinding", None), (f"{release_name}-redis", "Service", "redis"), (f"{release_name}-redis", "StatefulSet", "redis"), (f"{release_name}-redis-policy", "NetworkPolicy", "redis-policy"), (f"{release_name}-redis-password", "Secret", "redis"), (f"{release_name}-run-airflow-migrations", "Job", "run-airflow-migrations"), (f"{release_name}-scheduler", "Deployment", "scheduler"), (f"{release_name}-scheduler-policy", "NetworkPolicy", "airflow-scheduler-policy"), (f"{release_name}-statsd", "Deployment", "statsd"), (f"{release_name}-statsd", "Service", "statsd"), (f"{release_name}-statsd-policy", "NetworkPolicy", "statsd-policy"), (f"{release_name}-webserver", "Deployment", "webserver"), (f"{release_name}-webserver-secret-key", "Secret", "webserver"), (f"{release_name}-webserver", "Service", "webserver"), (f"{release_name}-webserver-policy", "NetworkPolicy", "airflow-webserver-policy"), (f"{release_name}-airflow-ingress", "Ingress", "airflow-ingress"), (f"{release_name}-worker", "Service", "worker"), (f"{release_name}-worker", "StatefulSet", "worker"), (f"{release_name}-worker-policy", "NetworkPolicy", "airflow-worker-policy"), (f"{release_name}-triggerer", "StatefulSet", "triggerer"), (f"{release_name}-dag-processor", "Deployment", "dag-processor"), (f"{release_name}-logs", "PersistentVolumeClaim", "logs-pvc"), (f"{release_name}-dags", "PersistentVolumeClaim", "dags-pvc"), ] for k8s_object_name, kind, component in kind_names_tuples: expected_labels = { "label1": "value1", "label2": "value2", "tier": "airflow", "release": release_name, "heritage": "Helm", "chart": mock.ANY, } if component: expected_labels["component"] = component if k8s_object_name == f"{release_name}-scheduler": expected_labels["executor"] = "CeleryExecutor" actual_labels = kind_k8s_obj_labels_tuples.pop((k8s_object_name, kind)) assert actual_labels == expected_labels if kind_k8s_obj_labels_tuples: warnings.warn(f"Unchecked objects: {kind_k8s_obj_labels_tuples.keys()}") def test_labels_are_valid_on_job_templates(self): """Test labels are correctly applied on all job templates created by this chart.""" release_name = "test-basic" k8s_objects = render_chart( name=release_name, values={ "labels": {"label1": "value1", "label2": "value2"}, "executor": "CeleryExecutor", "dagProcessor": {"enabled": True}, "pgbouncer": {"enabled": True}, "redis": {"enabled": True}, "networkPolicies": {"enabled": True}, "cleanup": {"enabled": True}, "flower": {"enabled": True}, "postgresql": {"enabled": False}, # We won't check the objects created by the postgres chart }, ) dict_of_labels_in_job_templates = { k8s_object["metadata"]["name"]: k8s_object["spec"]["template"]["metadata"]["labels"] for k8s_object in k8s_objects if k8s_object["kind"] == "Job" } kind_names_tuples = [ (f"{release_name}-create-user", "create-user-job"), (f"{release_name}-run-airflow-migrations", "run-airflow-migrations"), ] for k8s_object_name, component in kind_names_tuples: expected_labels = { "label1": "value1", "label2": "value2", "tier": "airflow", "release": release_name, "component": component, } assert dict_of_labels_in_job_templates.get(k8s_object_name) == expected_labels def test_annotations_on_airflow_pods_in_deployment(self): """ Test Annotations are correctly applied on all pods created Scheduler, Webserver & Worker deployments. """ release_name = "test-basic" k8s_objects = render_chart( name=release_name, values={ "airflowPodAnnotations": {"test-annotation/safe-to-evict": "true"}, "flower": {"enabled": True}, "dagProcessor": {"enabled": True}, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", "templates/flower/flower-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", ], ) # pod_template_file is tested separately as it has extra setup steps assert 8 == len(k8s_objects) for k8s_object in k8s_objects: annotations = k8s_object["spec"]["template"]["metadata"]["annotations"] assert "test-annotation/safe-to-evict" in annotations assert "true" in annotations["test-annotation/safe-to-evict"] def test_chart_is_consistent_with_official_airflow_image(self): def get_k8s_objs_with_image(obj: list[Any] | dict[str, Any]) -> list[dict[str, Any]]: """ Recursive helper to retrieve all the k8s objects that have an "image" key inside k8s obj or list of k8s obj. """ out = [] if isinstance(obj, list): for item in obj: out += get_k8s_objs_with_image(item) if isinstance(obj, dict): if "image" in obj: out += [obj] # include sub objs, just in case for val in obj.values(): out += get_k8s_objs_with_image(val) return out image_repo = "test-airflow-repo/airflow" k8s_objects = render_chart("test-basic", {"defaultAirflowRepository": image_repo}) objs_with_image = get_k8s_objs_with_image(k8s_objects) for obj in objs_with_image: image: str = obj["image"] if image.startswith(image_repo): # Make sure that a command is not specified assert "command" not in obj def test_unsupported_executor(self): with pytest.raises(CalledProcessError) as ex_ctx: render_chart( "test-basic", { "executor": "SequentialExecutor", }, ) assert ( 'executor must be one of the following: "LocalExecutor", ' '"LocalKubernetesExecutor", "CeleryExecutor", ' '"KubernetesExecutor", "CeleryKubernetesExecutor"' in ex_ctx.value.stderr.decode() ) @pytest.mark.parametrize( "image", ["airflow", "pod_template", "flower", "statsd", "redis", "pgbouncer", "pgbouncerExporter", "gitSync"], ) def test_invalid_pull_policy(self, image): with pytest.raises(CalledProcessError) as ex_ctx: render_chart( "test-basic", { "images": {image: {"pullPolicy": "InvalidPolicy"}}, }, ) assert ( 'pullPolicy must be one of the following: "Always", "Never", "IfNotPresent"' in ex_ctx.value.stderr.decode() ) def test_invalid_dags_access_mode(self): with pytest.raises(CalledProcessError) as ex_ctx: render_chart( "test-basic", { "dags": {"persistence": {"accessMode": "InvalidMode"}}, }, ) assert ( 'accessMode must be one of the following: "ReadWriteOnce", "ReadOnlyMany", "ReadWriteMany"' in ex_ctx.value.stderr.decode() ) @pytest.mark.parametrize("namespace", ["abc", "123", "123abc", "123-abc"]) def test_namespace_names(self, namespace): """Test various namespace names to make sure they render correctly in templates.""" render_chart(namespace=namespace) def test_postgres_connection_url_no_override(self): # no nameoverride provided doc = render_chart( "my-release", show_only=["templates/secrets/metadata-connection-secret.yaml"], )[0] assert ( "postgresql://postgres:[email protected]:5432/postgres?sslmode=disable" == base64.b64decode(doc["data"]["connection"]).decode("utf-8") ) def test_postgres_connection_url_name_override(self): # nameoverride provided doc = render_chart( "my-release", show_only=["templates/secrets/metadata-connection-secret.yaml"], values={"postgresql": {"nameOverride": "overrideName"}}, )[0] assert ( "postgresql://postgres:postgres@overrideName:5432/postgres?sslmode=disable" == base64.b64decode(doc["data"]["connection"]).decode("utf-8") ) def test_priority_classes(self): pc = [ {"name": "class1", "preemptionPolicy": "PreemptLowerPriority", "value": 1000}, {"name": "class2", "preemptionPolicy": "Never", "value": 10000}, ] objs = render_chart( "my-release", show_only=["templates/priorityclasses/priority-classes.yaml"], values={"priorityClasses": pc}, ) assert len(objs) == 2 for i in range(len(objs)): assert objs[i]["kind"] == "PriorityClass" assert objs[i]["apiVersion"] == "scheduling.k8s.io/v1" assert objs[i]["metadata"]["name"] == ("my-release" + "-" + pc[i]["name"]) assert objs[i]["preemptionPolicy"] == pc[i]["preemptionPolicy"] assert objs[i]["value"] == pc[i]["value"] def test_priority_classes_default_preemption(self): obj = render_chart( "my-release", show_only=["templates/priorityclasses/priority-classes.yaml"], values={ "priorityClasses": [ {"name": "class1", "value": 10000}, ] }, )[0] assert obj["preemptionPolicy"] == "PreemptLowerPriority" @staticmethod def default_trigger_obj(version): if version == "default": return "StatefulSet" return "Deployment"
26,888
45.280551
110
py
airflow
airflow-main/helm_tests/airflow_aux/test_airflow_common.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestAirflowCommon: """ This class holds tests that apply to more than 1 Airflow component so we don't have to repeat tests everywhere. The one general exception will be the KubernetesExecutor PodTemplateFile, as it requires extra test setup. """ @pytest.mark.parametrize( "dag_values, expected_mount", [ ( {"gitSync": {"enabled": True}}, { "mountPath": "/opt/airflow/dags", "name": "dags", "readOnly": True, }, ), ( {"persistence": {"enabled": True}}, { "mountPath": "/opt/airflow/dags", "name": "dags", "readOnly": False, }, ), ( { "gitSync": {"enabled": True}, "persistence": {"enabled": True}, }, { "mountPath": "/opt/airflow/dags", "name": "dags", "readOnly": True, }, ), ( {"persistence": {"enabled": True, "subPath": "test/dags"}}, { "subPath": "test/dags", "mountPath": "/opt/airflow/dags", "name": "dags", "readOnly": False, }, ), ], ) def test_dags_mount(self, dag_values, expected_mount): docs = render_chart( values={ "dags": dag_values, "airflowVersion": "1.10.15", }, # airflowVersion is present so webserver gets the mount show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", ], ) assert 3 == len(docs) for doc in docs: assert expected_mount in jmespath.search("spec.template.spec.containers[0].volumeMounts", doc) def test_webserver_config_configmap_name_volume_mounts(self): configmap_name = "my-configmap" docs = render_chart( values={ "webserver": { "webserverConfig": "CSRF_ENABLED = True # {{ .Release.Name }}", "webserverConfigConfigMapName": configmap_name, }, "workers": {"kerberosSidecar": {"enabled": True}}, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", ], ) for index in range(len(docs)): print(docs[index]) assert "webserver-config" in [ c["name"] for r in jmespath.search( "spec.template.spec.initContainers[?name=='wait-for-airflow-migrations'].volumeMounts", docs[index], ) for c in r ] for container in jmespath.search("spec.template.spec.containers", docs[index]): assert "webserver-config" in [c["name"] for c in jmespath.search("volumeMounts", container)] assert "webserver-config" in [ c["name"] for c in jmespath.search("spec.template.spec.volumes", docs[index]) ] assert configmap_name == jmespath.search( "spec.template.spec.volumes[?name=='webserver-config'].configMap.name | [0]", docs[index] ) def test_annotations(self): """ Test Annotations are correctly applied on all pods created Scheduler, Webserver & Worker deployments. """ release_name = "test-basic" k8s_objects = render_chart( name=release_name, values={ "airflowPodAnnotations": {"test-annotation/safe-to-evict": "true"}, "cleanup": {"enabled": True}, "flower": {"enabled": True}, "dagProcessor": {"enabled": True}, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/flower/flower-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", "templates/cleanup/cleanup-cronjob.yaml", ], ) assert 7 == len(k8s_objects) for k8s_object in k8s_objects: if k8s_object["kind"] == "CronJob": annotations = k8s_object["spec"]["jobTemplate"]["spec"]["template"]["metadata"]["annotations"] else: annotations = k8s_object["spec"]["template"]["metadata"]["annotations"] assert "test-annotation/safe-to-evict" in annotations assert "true" in annotations["test-annotation/safe-to-evict"] def test_global_affinity_tolerations_topology_spread_constraints_and_node_selector(self): """Test affinity, tolerations, etc are correctly applied on all pods created.""" k8s_objects = render_chart( values={ "cleanup": {"enabled": True}, "flower": {"enabled": True}, "pgbouncer": {"enabled": True}, "dagProcessor": {"enabled": True}, "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "static-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [ { "maxSkew": 1, "topologyKey": "foo", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } ], "nodeSelector": {"type": "user-node"}, }, show_only=[ "templates/cleanup/cleanup-cronjob.yaml", "templates/flower/flower-deployment.yaml", "templates/jobs/create-user-job.yaml", "templates/jobs/migrate-database-job.yaml", "templates/pgbouncer/pgbouncer-deployment.yaml", "templates/redis/redis-statefulset.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/statsd/statsd-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", ], ) assert 12 == len(k8s_objects) for k8s_object in k8s_objects: if k8s_object["kind"] == "CronJob": podSpec = jmespath.search("spec.jobTemplate.spec.template.spec", k8s_object) else: podSpec = jmespath.search("spec.template.spec", k8s_object) assert "foo" == jmespath.search( "affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", podSpec, ) assert "user-node" == jmespath.search("nodeSelector.type", podSpec) assert "static-pods" == jmespath.search("tolerations[0].key", podSpec) assert "foo" == jmespath.search("topologySpreadConstraints[0].topologyKey", podSpec) @pytest.mark.parametrize( "expected_image,tag,digest", [ ("apache/airflow:user-tag", "user-tag", None), ("apache/airflow@user-digest", None, "user-digest"), ("apache/airflow@user-digest", "user-tag", "user-digest"), ], ) def test_should_use_correct_image(self, expected_image, tag, digest): docs = render_chart( values={ "images": { "airflow": { "repository": "apache/airflow", "tag": tag, "digest": digest, }, }, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", ], ) for doc in docs: assert expected_image == jmespath.search("spec.template.spec.initContainers[0].image", doc) @pytest.mark.parametrize( "expected_image,tag,digest", [ ("apache/airflow:user-tag", "user-tag", None), ("apache/airflow@user-digest", None, "user-digest"), ("apache/airflow@user-digest", "user-tag", "user-digest"), ], ) def test_should_use_correct_default_image(self, expected_image, tag, digest): docs = render_chart( values={ "defaultAirflowRepository": "apache/airflow", "defaultAirflowTag": tag, "defaultAirflowDigest": digest, "images": {"useDefaultImageForMigration": True}, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", ], ) for doc in docs: assert expected_image == jmespath.search("spec.template.spec.initContainers[0].image", doc) def test_should_set_correct_helm_hooks_weight(self): docs = render_chart( show_only=["templates/secrets/fernetkey-secret.yaml"], ) annotations = jmespath.search("metadata.annotations", docs[0]) assert annotations["helm.sh/hook-weight"] == "0" def test_should_disable_some_variables(self): docs = render_chart( values={ "enableBuiltInSecretEnvVars": { "AIRFLOW__CORE__SQL_ALCHEMY_CONN": False, "AIRFLOW__DATABASE__SQL_ALCHEMY_CONN": False, "AIRFLOW__WEBSERVER__SECRET_KEY": False, "AIRFLOW__ELASTICSEARCH__HOST": False, }, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", ], ) expected_vars = [ "AIRFLOW__CORE__FERNET_KEY", "AIRFLOW_CONN_AIRFLOW_DB", "AIRFLOW__CELERY__BROKER_URL", ] expected_vars_in_worker = ["DUMB_INIT_SETSID"] + expected_vars for doc in docs: component = doc["metadata"]["labels"]["component"] variables = expected_vars_in_worker if component == "worker" else expected_vars assert variables == jmespath.search( "spec.template.spec.containers[0].env[*].name", doc ), f"Wrong vars in {component}" def test_have_all_variables(self): docs = render_chart( values={}, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", ], ) expected_vars = [ "AIRFLOW__CORE__FERNET_KEY", "AIRFLOW__CORE__SQL_ALCHEMY_CONN", "AIRFLOW__DATABASE__SQL_ALCHEMY_CONN", "AIRFLOW_CONN_AIRFLOW_DB", "AIRFLOW__WEBSERVER__SECRET_KEY", "AIRFLOW__CELERY__BROKER_URL", ] expected_vars_in_worker = ["DUMB_INIT_SETSID"] + expected_vars for doc in docs: component = doc["metadata"]["labels"]["component"] variables = expected_vars_in_worker if component == "worker" else expected_vars assert variables == jmespath.search( "spec.template.spec.containers[0].env[*].name", doc ), f"Wrong vars in {component}" def test_have_all_config_mounts_on_init_containers(self): docs = render_chart( values={ "dagProcessor": {"enabled": True}, }, show_only=[ "templates/scheduler/scheduler-deployment.yaml", "templates/workers/worker-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", ], ) assert 5 == len(docs) expected_mount = { "subPath": "airflow.cfg", "name": "config", "readOnly": True, "mountPath": "/opt/airflow/airflow.cfg", } for doc in docs: assert expected_mount in jmespath.search("spec.template.spec.initContainers[0].volumeMounts", doc) def test_priority_class_name(self): docs = render_chart( values={ "flower": {"enabled": True, "priorityClassName": "low-priority-flower"}, "pgbouncer": {"enabled": True, "priorityClassName": "low-priority-pgbouncer"}, "scheduler": {"priorityClassName": "low-priority-scheduler"}, "statsd": {"priorityClassName": "low-priority-statsd"}, "triggerer": {"priorityClassName": "low-priority-triggerer"}, "dagProcessor": {"priorityClassName": "low-priority-dag-processor"}, "webserver": {"priorityClassName": "low-priority-webserver"}, "workers": {"priorityClassName": "low-priority-worker"}, }, show_only=[ "templates/flower/flower-deployment.yaml", "templates/pgbouncer/pgbouncer-deployment.yaml", "templates/scheduler/scheduler-deployment.yaml", "templates/statsd/statsd-deployment.yaml", "templates/triggerer/triggerer-deployment.yaml", "templates/dag-processor/dag-processor-deployment.yaml", "templates/webserver/webserver-deployment.yaml", "templates/workers/worker-deployment.yaml", ], ) assert 7 == len(docs) for doc in docs: component = doc["metadata"]["labels"]["component"] priority = doc["spec"]["template"]["spec"]["priorityClassName"] assert priority == f"low-priority-{component}"
17,134
40.690998
110
py
airflow
airflow-main/helm_tests/airflow_aux/test_pod_launcher_role.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestPodLauncher: """Tests pod launcher.""" @pytest.mark.parametrize( "executor, rbac, allow, expected_accounts", [ ("CeleryKubernetesExecutor", True, True, ["scheduler", "worker"]), ("KubernetesExecutor", True, True, ["scheduler", "worker"]), ("CeleryExecutor", True, True, ["worker"]), ("LocalExecutor", True, True, ["scheduler"]), ("LocalExecutor", False, False, []), ], ) def test_pod_launcher_role(self, executor, rbac, allow, expected_accounts): docs = render_chart( values={ "rbac": {"create": rbac}, "allowPodLaunching": allow, "executor": executor, }, show_only=["templates/rbac/pod-launcher-rolebinding.yaml"], ) if expected_accounts: for idx, suffix in enumerate(expected_accounts): assert f"release-name-airflow-{suffix}" == jmespath.search(f"subjects[{idx}].name", docs[0]) else: assert [] == docs
1,989
37.269231
108
py
airflow
airflow-main/helm_tests/airflow_aux/test_pod_template_file.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import re from pathlib import Path from shutil import copyfile, copytree from tempfile import TemporaryDirectory import jmespath import pytest from tests.charts.helm_template_generator import render_chart @pytest.fixture(scope="class", autouse=True) def isolate_chart(request): chart_dir = Path(__file__).parents[2] / "chart" with TemporaryDirectory(prefix=request.cls.__name__) as tmp_dir: temp_chart_dir = Path(tmp_dir) / "chart" copytree(chart_dir, temp_chart_dir) copyfile( temp_chart_dir / "files/pod-template-file.kubernetes-helm-yaml", temp_chart_dir / "templates/pod-template-file.yaml", ) request.cls.temp_chart_dir = str(temp_chart_dir) yield class TestPodTemplateFile: """Tests pod template file.""" def test_should_work(self): docs = render_chart( values={}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert re.search("Pod", docs[0]["kind"]) assert jmespath.search("spec.containers[0].image", docs[0]) is not None assert "base" == jmespath.search("spec.containers[0].name", docs[0]) def test_should_add_an_init_container_if_git_sync_is_true(self): docs = render_chart( values={ "images": { "gitSync": { "repository": "test-registry/test-repo", "tag": "test-tag", "pullPolicy": "Always", } }, "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "wait": 66, "maxFailures": 70, "subPath": "path1/path2", "rev": "HEAD", "depth": 1, "repo": "https://github.com/apache/airflow.git", "branch": "test-branch", "sshKeySecret": None, "credentialsSecret": None, "knownHosts": None, } }, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert re.search("Pod", docs[0]["kind"]) assert { "name": "git-sync-test-init", "securityContext": {"runAsUser": 65533}, "image": "test-registry/test-repo:test-tag", "imagePullPolicy": "Always", "env": [ {"name": "GIT_SYNC_REV", "value": "HEAD"}, {"name": "GIT_SYNC_BRANCH", "value": "test-branch"}, {"name": "GIT_SYNC_REPO", "value": "https://github.com/apache/airflow.git"}, {"name": "GIT_SYNC_DEPTH", "value": "1"}, {"name": "GIT_SYNC_ROOT", "value": "/git"}, {"name": "GIT_SYNC_DEST", "value": "repo"}, {"name": "GIT_SYNC_ADD_USER", "value": "true"}, {"name": "GIT_SYNC_WAIT", "value": "66"}, {"name": "GIT_SYNC_MAX_SYNC_FAILURES", "value": "70"}, {"name": "GIT_SYNC_ONE_TIME", "value": "true"}, ], "volumeMounts": [{"mountPath": "/git", "name": "dags"}], "resources": {}, } == jmespath.search("spec.initContainers[0]", docs[0]) def test_should_not_add_init_container_if_dag_persistence_is_true(self): docs = render_chart( values={ "dags": { "persistence": {"enabled": True}, "gitSync": {"enabled": True}, } }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert jmespath.search("spec.initContainers", docs[0]) is None @pytest.mark.parametrize( "dag_values, expected_read_only", [ ({"gitSync": {"enabled": True}}, True), ({"persistence": {"enabled": True}}, False), ( { "gitSync": {"enabled": True}, "persistence": {"enabled": True}, }, True, ), ], ) def test_dags_mount(self, dag_values, expected_read_only): docs = render_chart( values={"dags": dag_values}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert { "mountPath": "/opt/airflow/dags", "name": "dags", "readOnly": expected_read_only, } in jmespath.search("spec.containers[0].volumeMounts", docs[0]) def test_should_add_global_volume_and_global_volume_mount(self): expected_volume = {"name": "test-volume", "emptyDir": {}} expected_volume_mount = {"name": "test-volume", "mountPath": "/opt/test"} docs = render_chart( values={ "volumes": [expected_volume], "volumeMounts": [expected_volume_mount], }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert expected_volume in jmespath.search("spec.volumes", docs[0]) assert expected_volume_mount in jmespath.search("spec.containers[0].volumeMounts", docs[0]) def test_validate_if_ssh_params_are_added(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "sshKeySecret": "ssh-secret", "knownHosts": None, "branch": "test-branch", } } }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert {"name": "GIT_SSH_KEY_FILE", "value": "/etc/git-secret/ssh"} in jmespath.search( "spec.initContainers[0].env", docs[0] ) assert {"name": "GIT_SYNC_SSH", "value": "true"} in jmespath.search( "spec.initContainers[0].env", docs[0] ) assert {"name": "GIT_KNOWN_HOSTS", "value": "false"} in jmespath.search( "spec.initContainers[0].env", docs[0] ) assert { "name": "git-sync-ssh-key", "mountPath": "/etc/git-secret/ssh", "subPath": "gitSshKey", "readOnly": True, } in jmespath.search("spec.initContainers[0].volumeMounts", docs[0]) assert { "name": "git-sync-ssh-key", "secret": {"secretName": "ssh-secret", "defaultMode": 288}, } in jmespath.search("spec.volumes", docs[0]) def test_validate_if_ssh_known_hosts_are_added(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "containerName": "git-sync-test", "sshKeySecret": "ssh-secret", "knownHosts": "github.com ssh-rsa AAAABdummy", "branch": "test-branch", } } }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert {"name": "GIT_KNOWN_HOSTS", "value": "true"} in jmespath.search( "spec.initContainers[0].env", docs[0] ) assert { "name": "GIT_SSH_KNOWN_HOSTS_FILE", "value": "/etc/git-secret/known_hosts", } in jmespath.search("spec.initContainers[0].env", docs[0]) assert { "name": "config", "mountPath": "/etc/git-secret/known_hosts", "subPath": "known_hosts", "readOnly": True, } in jmespath.search("spec.initContainers[0].volumeMounts", docs[0]) def test_should_set_username_and_pass_env_variables(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "credentialsSecret": "user-pass-secret", "sshKeySecret": None, } } }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert { "name": "GIT_SYNC_USERNAME", "valueFrom": {"secretKeyRef": {"name": "user-pass-secret", "key": "GIT_SYNC_USERNAME"}}, } in jmespath.search("spec.initContainers[0].env", docs[0]) assert { "name": "GIT_SYNC_PASSWORD", "valueFrom": {"secretKeyRef": {"name": "user-pass-secret", "key": "GIT_SYNC_PASSWORD"}}, } in jmespath.search("spec.initContainers[0].env", docs[0]) def test_should_set_the_dags_volume_claim_correctly_when_using_an_existing_claim(self): docs = render_chart( values={"dags": {"persistence": {"enabled": True, "existingClaim": "test-claim"}}}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert {"name": "dags", "persistentVolumeClaim": {"claimName": "test-claim"}} in jmespath.search( "spec.volumes", docs[0] ) def test_should_use_empty_dir_for_gitsync_without_persistence(self): docs = render_chart( values={"dags": {"gitSync": {"enabled": True}}}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert {"name": "dags", "emptyDir": {}} in jmespath.search("spec.volumes", docs[0]) @pytest.mark.parametrize( "log_persistence_values, expected", [ ({"enabled": False}, {"emptyDir": {}}), ({"enabled": True}, {"persistentVolumeClaim": {"claimName": "release-name-logs"}}), ( {"enabled": True, "existingClaim": "test-claim"}, {"persistentVolumeClaim": {"claimName": "test-claim"}}, ), ], ) def test_logs_persistence_changes_volume(self, log_persistence_values, expected): docs = render_chart( values={"logs": {"persistence": log_persistence_values}}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert {"name": "logs", **expected} in jmespath.search("spec.volumes", docs[0]) def test_should_set_a_custom_image_in_pod_template(self): docs = render_chart( values={ "images": { "pod_template": {"repository": "dummy_image", "tag": "latest", "pullPolicy": "Always"} } }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert re.search("Pod", docs[0]["kind"]) assert "dummy_image:latest" == jmespath.search("spec.containers[0].image", docs[0]) assert "Always" == jmespath.search("spec.containers[0].imagePullPolicy", docs[0]) assert "base" == jmespath.search("spec.containers[0].name", docs[0]) def test_mount_airflow_cfg(self): docs = render_chart( values={}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert re.search("Pod", docs[0]["kind"]) assert {"configMap": {"name": "release-name-airflow-config"}, "name": "config"} in jmespath.search( "spec.volumes", docs[0] ) assert { "name": "config", "mountPath": "/opt/airflow/airflow.cfg", "subPath": "airflow.cfg", "readOnly": True, } in jmespath.search("spec.containers[0].volumeMounts", docs[0]) def test_should_use_global_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "executor": "KubernetesExecutor", "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert re.search("Pod", docs[0]["kind"]) assert "foo" == jmespath.search( "spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.tolerations[0].key", docs[0], ) def test_should_create_valid_affinity_tolerations_topology_spread_constraints_and_node_selector(self): docs = render_chart( values={ "executor": "KubernetesExecutor", "workers": { "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [ { "maxSkew": 1, "topologyKey": "foo", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } ], "nodeSelector": {"diskType": "ssd"}, }, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert "Pod" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.tolerations[0].key", docs[0], ) assert "foo" == jmespath.search( "spec.topologySpreadConstraints[0].topologyKey", docs[0], ) def test_affinity_tolerations_topology_spread_constraints_and_node_selector_precedence(self): """When given both global and worker affinity etc, worker affinity etc is used.""" expected_affinity = { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } } expected_topology_spread_constraints = { "maxSkew": 1, "topologyKey": "foo", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } docs = render_chart( values={ "workers": { "affinity": expected_affinity, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [expected_topology_spread_constraints], "nodeSelector": {"type": "ssd"}, }, "affinity": { "nodeAffinity": { "preferredDuringSchedulingIgnoredDuringExecution": [ { "weight": 1, "preference": { "matchExpressions": [ {"key": "not-me", "operator": "In", "values": ["true"]}, ] }, } ] } }, "tolerations": [ {"key": "not-me", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "topologySpreadConstraints": [ { "maxSkew": 1, "topologyKey": "not-me", "whenUnsatisfiable": "ScheduleAnyway", "labelSelector": {"matchLabels": {"tier": "airflow"}}, } ], "nodeSelector": {"type": "not-me"}, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert expected_affinity == jmespath.search("spec.affinity", docs[0]) assert "ssd" == jmespath.search( "spec.nodeSelector.type", docs[0], ) tolerations = jmespath.search("spec.tolerations", docs[0]) assert 1 == len(tolerations) assert "dynamic-pods" == tolerations[0]["key"] assert expected_topology_spread_constraints == jmespath.search( "spec.topologySpreadConstraints[0]", docs[0] ) def test_should_not_create_default_affinity(self): docs = render_chart(show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir) assert {} == jmespath.search("spec.affinity", docs[0]) def test_should_add_fsgroup_to_the_pod_template(self): docs = render_chart( values={"gid": 5000}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert jmespath.search("spec.securityContext.fsGroup", docs[0]) == 5000 def test_should_create_valid_volume_mount_and_volume(self): docs = render_chart( values={ "workers": { "extraVolumes": [{"name": "test-volume-{{ .Chart.Name }}", "emptyDir": {}}], "extraVolumeMounts": [ {"name": "test-volume-{{ .Chart.Name }}", "mountPath": "/opt/test"} ], } }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert "test-volume-airflow" in jmespath.search( "spec.volumes[*].name", docs[0], ) assert "test-volume-airflow" in jmespath.search( "spec.containers[0].volumeMounts[*].name", docs[0], ) def test_should_add_env_for_gitsync(self): docs = render_chart( values={ "dags": { "gitSync": { "enabled": True, "env": [{"name": "FOO", "value": "bar"}], } }, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert {"name": "FOO", "value": "bar"} in jmespath.search("spec.initContainers[0].env", docs[0]) def test_no_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": None}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) volume_mounts = jmespath.search("spec.containers[0].volumeMounts", docs[0]) assert "airflow_local_settings.py" not in str(volume_mounts) def test_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": "# Well hello!"}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } in jmespath.search("spec.containers[0].volumeMounts", docs[0]) def test_airflow_pod_annotations(self): docs = render_chart( values={"airflowPodAnnotations": {"my_annotation": "annotated!"}}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) annotations = jmespath.search("metadata.annotations", docs[0]) assert "my_annotation" in annotations assert "annotated!" in annotations["my_annotation"] def test_workers_pod_annotations(self): docs = render_chart( values={"workers": {"podAnnotations": {"my_annotation": "annotated!"}}}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) annotations = jmespath.search("metadata.annotations", docs[0]) assert "my_annotation" in annotations assert "annotated!" in annotations["my_annotation"] def test_airflow_and_workers_pod_annotations(self): # should give preference to workers.podAnnotations docs = render_chart( values={ "airflowPodAnnotations": {"my_annotation": "airflowPodAnnotations"}, "workers": {"podAnnotations": {"my_annotation": "workerPodAnnotations"}}, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) annotations = jmespath.search("metadata.annotations", docs[0]) assert "my_annotation" in annotations assert "workerPodAnnotations" in annotations["my_annotation"] def test_should_add_extra_init_containers(self): docs = render_chart( values={ "workers": { "extraInitContainers": [ {"name": "test-init-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert { "name": "test-init-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.initContainers[-1]", docs[0]) def test_should_add_extra_containers(self): docs = render_chart( values={ "workers": { "extraContainers": [ {"name": "test-container", "image": "test-registry/test-repo:test-tag"} ], }, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert { "name": "test-container", "image": "test-registry/test-repo:test-tag", } == jmespath.search("spec.containers[-1]", docs[0]) def test_should_add_pod_labels(self): docs = render_chart( values={"labels": {"label1": "value1", "label2": "value2"}}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert { "label1": "value1", "label2": "value2", "release": "release-name", "component": "worker", "tier": "airflow", } == jmespath.search("metadata.labels", docs[0]) def test_should_add_extraEnvs(self): docs = render_chart( values={"workers": {"env": [{"name": "TEST_ENV_1", "value": "test_env_1"}]}}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.containers[0].env", docs[0] ) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "executor": "KubernetesExecutor", "workers": { "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value" def test_should_add_resources(self): docs = render_chart( values={ "workers": { "resources": { "requests": {"memory": "2Gi", "cpu": "1"}, "limits": {"memory": "3Gi", "cpu": "2"}, } } }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert { "limits": { "cpu": "2", "memory": "3Gi", }, "requests": { "cpu": "1", "memory": "2Gi", }, } == jmespath.search("spec.containers[0].resources", docs[0]) def test_empty_resources(self): docs = render_chart( values={}, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert {} == jmespath.search("spec.containers[0].resources", docs[0]) def test_workers_host_aliases(self): docs = render_chart( values={ "workers": { "hostAliases": [{"ip": "127.0.0.2", "hostnames": ["test.hostname"]}], }, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert "127.0.0.2" == jmespath.search("spec.hostAliases[0].ip", docs[0]) assert "test.hostname" == jmespath.search("spec.hostAliases[0].hostnames[0]", docs[0]) def test_workers_priority_class_name(self): docs = render_chart( values={ "workers": { "priorityClassName": "test-priority", }, }, show_only=["templates/pod-template-file.yaml"], chart_dir=self.temp_chart_dir, ) assert "test-priority" == jmespath.search("spec.priorityClassName", docs[0])
28,951
37.448871
109
py
airflow
airflow-main/helm_tests/airflow_aux/__init__.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License.
785
45.235294
62
py
airflow
airflow-main/helm_tests/airflow_aux/test_cleanup_pods.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestCleanupDeployment: """Tests cleanup pods deployments.""" def test_should_have_a_schedule_with_defaults(self): doc = render_chart( values={ "cleanup": {"enabled": True}, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], )[0] assert doc["spec"]["schedule"] == "*/15 * * * *" cron_tests = [ ("release-name", "*/5 * * * *", "*/5 * * * *"), ("something-else", "@hourly", "@hourly"), ( "custom-name", '{{- add 3 (regexFind ".$" (adler32sum .Release.Name)) -}}-59/15 * * * *', "7-59/15 * * * *", ), ( "airflow-rules", '{{- add 3 (regexFind ".$" (adler32sum .Release.Name)) -}}-59/15 * * * *', "10-59/15 * * * *", ), ] @pytest.mark.parametrize( "release_name,schedule_value,schedule_result", cron_tests, ids=[x[0] for x in cron_tests], ) def test_should_work_with_custom_schedule_string(self, release_name, schedule_value, schedule_result): doc = render_chart( name=release_name, values={ "cleanup": { "enabled": True, "schedule": schedule_value, }, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], )[0] assert doc["spec"]["schedule"] == schedule_result class TestCleanupPods: """Tests cleanup of pods.""" def test_should_create_cronjob_for_enabled_cleanup(self): docs = render_chart( values={ "cleanup": {"enabled": True}, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert "airflow-cleanup-pods" == jmespath.search( "spec.jobTemplate.spec.template.spec.containers[0].name", docs[0] ) assert jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].image", docs[0]).startswith( "apache/airflow" ) assert {"name": "config", "configMap": {"name": "release-name-airflow-config"}} in jmespath.search( "spec.jobTemplate.spec.template.spec.volumes", docs[0] ) assert { "name": "config", "mountPath": "/opt/airflow/airflow.cfg", "subPath": "airflow.cfg", "readOnly": True, } in jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].volumeMounts", docs[0]) def test_should_pass_validation_with_v1beta1_api(self): render_chart( values={"cleanup": {"enabled": True}}, show_only=["templates/cleanup/cleanup-cronjob.yaml"], kubernetes_version="1.16.0", ) # checks that no validation exception is raised def test_should_change_image_when_set_airflow_image(self): docs = render_chart( values={ "cleanup": {"enabled": True}, "images": {"airflow": {"repository": "airflow", "tag": "test"}}, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert "airflow:test" == jmespath.search( "spec.jobTemplate.spec.template.spec.containers[0].image", docs[0] ) def test_should_create_valid_affinity_tolerations_and_node_selector(self): docs = render_chart( values={ "cleanup": { "enabled": True, "affinity": { "nodeAffinity": { "requiredDuringSchedulingIgnoredDuringExecution": { "nodeSelectorTerms": [ { "matchExpressions": [ {"key": "foo", "operator": "In", "values": ["true"]}, ] } ] } } }, "tolerations": [ {"key": "dynamic-pods", "operator": "Equal", "value": "true", "effect": "NoSchedule"} ], "nodeSelector": {"diskType": "ssd"}, } }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert "CronJob" == jmespath.search("kind", docs[0]) assert "foo" == jmespath.search( "spec.jobTemplate.spec.template.spec.affinity.nodeAffinity." "requiredDuringSchedulingIgnoredDuringExecution." "nodeSelectorTerms[0]." "matchExpressions[0]." "key", docs[0], ) assert "ssd" == jmespath.search( "spec.jobTemplate.spec.template.spec.nodeSelector.diskType", docs[0], ) assert "dynamic-pods" == jmespath.search( "spec.jobTemplate.spec.template.spec.tolerations[0].key", docs[0], ) def test_default_command_and_args(self): docs = render_chart( values={"cleanup": {"enabled": True}}, show_only=["templates/cleanup/cleanup-cronjob.yaml"] ) assert jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].command", docs[0]) is None assert ["bash", "-c", "exec airflow kubernetes cleanup-pods --namespace=default"] == jmespath.search( "spec.jobTemplate.spec.template.spec.containers[0].args", docs[0] ) def test_should_add_extraEnvs(self): docs = render_chart( values={ "cleanup": { "enabled": True, "env": [{"name": "TEST_ENV_1", "value": "test_env_1"}], }, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert {"name": "TEST_ENV_1", "value": "test_env_1"} in jmespath.search( "spec.jobTemplate.spec.template.spec.containers[0].env", docs[0] ) @pytest.mark.parametrize("command", [None, ["custom", "command"]]) @pytest.mark.parametrize("args", [None, ["custom", "args"]]) def test_command_and_args_overrides(self, command, args): docs = render_chart( values={"cleanup": {"enabled": True, "command": command, "args": args}}, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert command == jmespath.search( "spec.jobTemplate.spec.template.spec.containers[0].command", docs[0] ) assert args == jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].args", docs[0]) def test_command_and_args_overrides_are_templated(self): docs = render_chart( values={ "cleanup": { "enabled": True, "command": ["{{ .Release.Name }}"], "args": ["{{ .Release.Service }}"], } }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert ["release-name"] == jmespath.search( "spec.jobTemplate.spec.template.spec.containers[0].command", docs[0] ) assert ["Helm"] == jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].args", docs[0]) def test_should_set_labels_to_jobs_from_cronjob(self): docs = render_chart( values={ "cleanup": {"enabled": True}, "labels": {"project": "airflow"}, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert { "tier": "airflow", "component": "airflow-cleanup-pods", "release": "release-name", "project": "airflow", } == jmespath.search("spec.jobTemplate.spec.template.metadata.labels", docs[0]) def test_should_add_component_specific_labels(self): docs = render_chart( values={ "cleanup": { "enabled": True, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert "test_label" in jmespath.search("spec.jobTemplate.spec.template.metadata.labels", docs[0]) assert ( jmespath.search("spec.jobTemplate.spec.template.metadata.labels", docs[0])["test_label"] == "test_label_value" ) def test_should_add_component_specific_annotations(self): docs = render_chart( values={ "cleanup": { "enabled": True, "jobAnnotations": {"test_cronjob_annotation": "test_cronjob_annotation_value"}, "podAnnotations": {"test_pod_annotation": "test_pod_annotation_value"}, }, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert "test_cronjob_annotation" in jmespath.search("metadata.annotations", docs[0]) assert ( "test_cronjob_annotation_value" == jmespath.search("metadata.annotations", docs[0])["test_cronjob_annotation"] ) assert "test_pod_annotation" in jmespath.search( "spec.jobTemplate.spec.template.metadata.annotations", docs[0] ) assert ( "test_pod_annotation_value" == jmespath.search("spec.jobTemplate.spec.template.metadata.annotations", docs[0])[ "test_pod_annotation" ] ) def test_cleanup_resources_are_configurable(self): resources = { "requests": { "cpu": "128m", "memory": "256Mi", }, "limits": { "cpu": "256m", "memory": "512Mi", }, } docs = render_chart( values={ "cleanup": { "enabled": True, "resources": resources, }, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert resources == jmespath.search( "spec.jobTemplate.spec.template.spec.containers[0].resources", docs[0] ) def test_should_set_job_history_limits(self): docs = render_chart( values={ "cleanup": { "enabled": True, "failedJobsHistoryLimit": 2, "successfulJobsHistoryLimit": 4, }, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert 2 == jmespath.search("spec.failedJobsHistoryLimit", docs[0]) assert 4 == jmespath.search("spec.successfulJobsHistoryLimit", docs[0]) def test_no_airflow_local_settings(self): docs = render_chart( values={ "cleanup": {"enabled": True}, "airflowLocalSettings": None, }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) volume_mounts = jmespath.search( "spec.jobTemplate.spec.template.spec.containers[0].volumeMounts", docs[0] ) assert "airflow_local_settings.py" not in str(volume_mounts) def test_airflow_local_settings(self): docs = render_chart( values={ "cleanup": {"enabled": True}, "airflowLocalSettings": "# Well hello!", }, show_only=["templates/cleanup/cleanup-cronjob.yaml"], ) assert { "name": "config", "mountPath": "/opt/airflow/config/airflow_local_settings.py", "subPath": "airflow_local_settings.py", "readOnly": True, } in jmespath.search("spec.jobTemplate.spec.template.spec.containers[0].volumeMounts", docs[0]) class TestCleanupServiceAccount: """Tests cleanup of service accounts.""" def test_should_add_component_specific_labels(self): docs = render_chart( values={ "cleanup": { "enabled": True, "labels": {"test_label": "test_label_value"}, }, }, show_only=["templates/cleanup/cleanup-serviceaccount.yaml"], ) assert "test_label" in jmespath.search("metadata.labels", docs[0]) assert jmespath.search("metadata.labels", docs[0])["test_label"] == "test_label_value"
13,502
36.096154
110
py
airflow
airflow-main/helm_tests/airflow_aux/test_configmap.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import jmespath import pytest from tests.charts.helm_template_generator import render_chart class TestConfigmap: """Tests configmaps.""" def test_single_annotation(self): docs = render_chart( values={ "airflowConfigAnnotations": {"key": "value"}, }, show_only=["templates/configmaps/configmap.yaml"], ) annotations = jmespath.search("metadata.annotations", docs[0]) assert "value" == annotations.get("key") def test_multiple_annotations(self): docs = render_chart( values={ "airflowConfigAnnotations": {"key": "value", "key-two": "value-two"}, }, show_only=["templates/configmaps/configmap.yaml"], ) annotations = jmespath.search("metadata.annotations", docs[0]) assert "value" == annotations.get("key") assert "value-two" == annotations.get("key-two") @pytest.mark.parametrize( "af_version, secret_key, secret_key_name, expected", [ ("2.2.0", None, None, True), ("2.2.0", "foo", None, False), ("2.2.0", None, "foo", False), ("2.1.3", None, None, False), ("2.1.3", "foo", None, False), ], ) def test_default_airflow_local_settings(self, af_version, secret_key, secret_key_name, expected): docs = render_chart( values={ "airflowVersion": af_version, "webserverSecretKey": secret_key, "webserverSecretKeySecretName": secret_key_name, }, show_only=["templates/configmaps/configmap.yaml"], ) if expected: assert ( "Usage of a dynamic webserver secret key detected" in jmespath.search('data."airflow_local_settings.py"', docs[0]).strip() ) else: assert "" == jmespath.search('data."airflow_local_settings.py"', docs[0]).strip() def test_airflow_local_settings(self): docs = render_chart( values={"airflowLocalSettings": "# Well hello {{ .Release.Name }}!"}, show_only=["templates/configmaps/configmap.yaml"], ) assert ( "# Well hello release-name!" == jmespath.search('data."airflow_local_settings.py"', docs[0]).strip() ) def test_kerberos_config_available_with_celery_executor(self): docs = render_chart( values={ "executor": "CeleryExecutor", "kerberos": {"enabled": True, "config": "krb5\ncontent"}, }, show_only=["templates/configmaps/configmap.yaml"], ) assert jmespath.search('data."krb5.conf"', docs[0]) == "krb5\ncontent" def test_pod_template_is_templated(self): docs = render_chart( values={ "executor": "KubernetesExecutor", "podTemplate": """ apiVersion: v1 kind: Pod metadata: name: example-name labels: mylabel: {{ .Release.Name }} """, }, show_only=["templates/configmaps/configmap.yaml"], ) pod_template_file = jmespath.search('data."pod_template_file.yaml"', docs[0]) assert "mylabel: release-name" in pod_template_file
4,128
33.991525
101
py
airflow
airflow-main/helm_tests/airflow_aux/test_extra_env_env_from.py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from __future__ import annotations import textwrap from typing import Any import jmespath import pytest import yaml from tests.charts.helm_template_generator import prepare_k8s_lookup_dict, render_chart RELEASE_NAME = "test-extra-env-env-from" # Test Params: k8s object key and paths with expected env / envFrom PARAMS = [ ( ("Job", f"{RELEASE_NAME}-create-user"), ("spec.template.spec.containers[0]",), ), ( ("Job", f"{RELEASE_NAME}-run-airflow-migrations"), ("spec.template.spec.containers[0]",), ), ( ("Deployment", f"{RELEASE_NAME}-scheduler"), ( "spec.template.spec.initContainers[0]", "spec.template.spec.containers[0]", ), ), ( ("StatefulSet", f"{RELEASE_NAME}-worker"), ( "spec.template.spec.initContainers[0]", "spec.template.spec.containers[0]", ), ), ( ("Deployment", f"{RELEASE_NAME}-webserver"), ( "spec.template.spec.initContainers[0]", "spec.template.spec.containers[0]", ), ), ( ("StatefulSet", f"{RELEASE_NAME}-triggerer"), ( "spec.template.spec.initContainers[0]", "spec.template.spec.containers[0]", ), ), ( ("Deployment", f"{RELEASE_NAME}-flower"), ("spec.template.spec.containers[0]",), ), ] class TestExtraEnvEnvFrom: """Tests extra env from.""" k8s_objects: list[dict[str, Any]] k8s_objects_by_key: dict[tuple[str, str], dict[str, Any]] @classmethod def setup_class(cls) -> None: values_str = textwrap.dedent( """ airflowVersion: "2.6.0" flower: enabled: true extraEnvFrom: | - secretRef: name: '{{ .Release.Name }}-airflow-connections' - configMapRef: name: '{{ .Release.Name }}-airflow-variables' extraEnv: | - name: PLATFORM value: FR - name: TEST valueFrom: secretKeyRef: name: '{{ .Release.Name }}-some-secret' key: connection """ ) values = yaml.safe_load(values_str) cls.k8s_objects = render_chart(RELEASE_NAME, values=values) cls.k8s_objects_by_key = prepare_k8s_lookup_dict(cls.k8s_objects) @pytest.mark.parametrize("k8s_obj_key, env_paths", PARAMS) def test_extra_env(self, k8s_obj_key, env_paths): expected_env_as_str = textwrap.dedent( f""" - name: PLATFORM value: FR - name: TEST valueFrom: secretKeyRef: key: connection name: {RELEASE_NAME}-some-secret """ ).lstrip() k8s_object = self.k8s_objects_by_key[k8s_obj_key] for path in env_paths: env = jmespath.search(f"{path}.env", k8s_object) assert expected_env_as_str in yaml.dump(env) @pytest.mark.parametrize("k8s_obj_key, env_from_paths", PARAMS) def test_extra_env_from(self, k8s_obj_key, env_from_paths): expected_env_from_as_str = textwrap.dedent( f""" - secretRef: name: {RELEASE_NAME}-airflow-connections - configMapRef: name: {RELEASE_NAME}-airflow-variables """ ).lstrip() k8s_object = self.k8s_objects_by_key[k8s_obj_key] for path in env_from_paths: env_from = jmespath.search(f"{path}.envFrom", k8s_object) assert expected_env_from_as_str in yaml.dump(env_from)
4,517
31.271429
86
py