python_code
stringlengths 0
679k
| repo_name
stringlengths 9
41
| file_path
stringlengths 6
149
|
---|---|---|
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvflare.fuel.utils.config import ConfigFormat
class TestConfigFormat:
def test_config_exts(self):
exts2fmt_map = ConfigFormat.config_ext_formats()
assert exts2fmt_map.get(".json") == ConfigFormat.JSON
assert exts2fmt_map.get(".conf") == ConfigFormat.PYHOCON
assert exts2fmt_map.get(".yml") == ConfigFormat.OMEGACONF
assert exts2fmt_map.get(".json.default") == ConfigFormat.JSON
assert exts2fmt_map.get(".conf.default") == ConfigFormat.PYHOCON
assert exts2fmt_map.get(".yml.default") == ConfigFormat.OMEGACONF
def test_config_exts2(self):
exts2fmt_map = ConfigFormat.config_ext_formats()
assert "|".join(exts2fmt_map.keys()) == ".json|.conf|.yml|.json.default|.conf.default|.yml.default"
def test_config_exts3(self):
exts = ConfigFormat.extensions()
assert "|".join(exts) == ".json|.conf|.yml|.json.default|.conf.default|.yml.default"
def test_config_exts4(self):
exts = ConfigFormat.extensions(target_fmt=ConfigFormat.JSON)
assert "|".join(exts) == ".json|.json.default"
exts = ConfigFormat.extensions(target_fmt=ConfigFormat.OMEGACONF)
assert "|".join(exts) == ".yml|.yml.default"
exts = ConfigFormat.extensions(target_fmt=ConfigFormat.PYHOCON)
assert "|".join(exts) == ".conf|.conf.default"
| NVFlare-main | tests/unit_test/fuel/utils/config_format_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from nvflare.fuel.utils.config import ConfigFormat
from nvflare.fuel.utils.json_config_loader import JsonConfigLoader
class TestJsonConfig:
def return_dict(self, file_name):
if file_name == "test.json":
return {
"a": {
"a1": 1,
"a2": 2,
},
"b": 1,
"c": "hi",
"d": [1, 2],
}
else: # default
return {
"a": {
"a1": 2,
"a2": 4,
},
"b": 2,
"c": "hello",
"d": [2, 4],
}
def test_json_loader(self):
loader = JsonConfigLoader()
assert loader.get_format() == ConfigFormat.JSON
loader._from_file = self.return_dict
dicts = {
"a": {
"a1": 200,
},
"c": "hello",
"d": [200, 400, 500],
"e1": "Yes",
"e2": "True",
"e3": "NO",
}
config = loader.load_config("test.json")
assert config is not None
conf = config.get_native_conf()
assert conf["a"]["a1"] == 1
assert conf.get("b") == 1
assert conf.get("c") == "hi"
assert conf.get("d") == [1, 2]
assert conf.get("e4", None) is None
assert config.get_format() == ConfigFormat.JSON
assert conf.get("d") == [1, 2]
assert conf.get("a") == {"a1": 1, "a2": 2}
config = loader.load_config_from_dict(dicts)
assert config.get_format() == ConfigFormat.JSON
conf = config.get_native_conf()
assert config is not None
assert conf == dicts
assert config.get_format() == ConfigFormat.JSON
config = loader.load_config_from_str(json.dumps(dicts))
assert config is not None
assert config.get_native_conf() == dicts
assert config.get_format() == ConfigFormat.JSON
def test_load_json_cofig_from_dict(self):
loader = JsonConfigLoader()
assert loader.get_format() == ConfigFormat.JSON
dicts = {
"a": {
"a1": 200,
},
"c": "hello",
"d": [200, 400, 500],
"e1": "Yes",
"e2": "True",
"e3": "NO",
}
config = loader.load_config_from_dict(dicts)
assert config.get_format() == ConfigFormat.JSON
conf = config.get_native_conf()
assert config is not None
assert conf == dicts
assert config.get_format() == ConfigFormat.JSON
def test_load_json_config_from_str(self):
loader = JsonConfigLoader()
assert loader.get_format() == ConfigFormat.JSON
dicts = {
"a": {
"a1": 200,
},
"c": "hello",
"d": [200, 400, 500],
"e1": "Yes",
"e2": "True",
"e3": "NO",
}
config = loader.load_config_from_str(json.dumps(dicts))
assert config is not None
assert config.get_native_conf() == dicts
assert config.get_format() == ConfigFormat.JSON
| NVFlare-main | tests/unit_test/fuel/utils/json_config_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvflare.fuel.utils.class_utils import ModuleScanner
from nvflare.fuel.utils.component_builder import ComponentBuilder
class MockComponentBuilder(ComponentBuilder):
def __init__(self):
self.module_scanner = ModuleScanner(["nvflare"], ["api", "app_commons", "app_opt", "fuel", "private", "utils"])
def get_module_scanner(self):
return self.module_scanner
| NVFlare-main | tests/unit_test/fuel/utils/mock_component_builder.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import re
from nvflare.fuel.utils.json_scanner import JsonObjectProcessor, JsonScanner, Node
test_json = """
{
"learning_rate": 1e-4,
"lr_search" : [1e-4, 2e-3],
"train": {
"model": {
"name": "SegAhnet",
"args": {
"num_classes": 2,
"if_use_psp": false,
"pretrain_weight_name": "{PRETRAIN_WEIGHTS_FILE}",
"plane": "z",
"final_activation": "softmax",
"n_spatial_dim": 3
},
"search": [
{
"type": "float",
"args": ["num_classes"],
"targets": [1,3],
"domain": "net"
},
{
"type": "float",
"args": ["n_spatial_dim"],
"targets": [2,5],
"domain": "net"
},
{
"type": "enum",
"args": ["n_spatial_dim", "num_classes"],
"targets": [[2,3],[3,4],[5,1]],
"domain": "net"
},
{
"type": "enum",
"args": ["n_spatial_dim"],
"targets": [[2],[3],[6],[12]],
"domain": "net"
}
]
},
"pre_transforms": [
{
"name": "LoadNifti",
"args": {
"fields": [
"image",
"label"
]
}
},
{
"name": "ConvertToChannelsFirst",
"args": {
"fields": [
"image",
"label"
]
}
},
{
"name": "ScaleIntensityRange",
"args": {
"fields": "image",
"a_min": -57,
"a_max": 164,
"b_min": 0.0,
"b_max": 1.0,
"clip": true
}
},
{
"name": "FastCropByPosNegRatio",
"args": {
"size": [
96,
96,
96
],
"fields": "image",
"label_field": "label",
"pos": 1,
"neg": 1,
"batch_size": 3
},
"search": [
{
"domain": "transform",
"type": "enum",
"args": ["size"],
"targets": [[[32, 32, 32]], [[64, 64, 64]], [[128, 128, 128]]]
},
{
"domain": "transform",
"type": "enum",
"args": ["batch_size"],
"targets": [[3], [4], [8], [10]]
}
]
},
{
"name": "RandomAxisFlip",
"args": {
"fields": [
"image",
"label"
],
"probability": 0.0
},
"search": [
{
"domain": "transform",
"type": "float",
"args": ["probability#p"],
"targets": [0.0, 1.0]
},
{
"domain": "transform",
"args": "DISABLED"
}
]
},
{
"name": "RandomRotate3D",
"args": {
"fields": [
"image",
"label"
],
"probability": 0.0
}
},
{
"name": "ScaleIntensityOscillation",
"args": {
"fields": "image",
"magnitude": 0.10
}
},
{
"name": "LoadNifti",
"args": {
"fields": [
"image",
"label"
]
}
},
{
"name": "LoadNifti",
"args": {
"fields": [
"image",
"label"
]
}
},
{
"name": "LoadNifti",
"args": {
"fields": [
"image",
"label"
]
}
},
{
"name": "LoadNifti",
"args": {
"fields": [
"image",
"label"
]
}
},
{
"name": "RandomAxisFlip",
"args": {
"fields": [
"image",
"label"
],
"probability": 0.0
},
"search": [
{
"domain": "transform",
"type": "float",
"args": ["probability#p"],
"targets": [0.0, 1.0]
},
{
"domain": "transform",
"args": "DISABLED"
}
]
}
]
} }
"""
TRAIN_CONFIG = json.loads(test_json)
def _post_process_element(node: Node):
path = node.path()
print("EXIT Level: {}; Key: {}; Pos: {}; Path: {}".format(node.level, node.key, node.position, path))
class _TestJsonProcessor(JsonObjectProcessor):
def process_element(self, node: Node):
pats = [
r".\.pre_transforms\.#[0-9]+$",
r"^train\.model\.name$",
r".\.search\.#[0-9]+$",
r".\.pre_transforms\.#[0-9]+\.args$",
]
path = node.path()
print("ENTER Level: {}; Key: {}; Pos: {}; Path: {}".format(node.level, node.key, node.position, path))
for p in pats:
x = re.search(p, path)
if x:
print("\t {} matches {}".format(path, p))
node.exit_cb = _post_process_element
class TestJsonScanner:
def test_scan(self):
scanner = JsonScanner(TRAIN_CONFIG, "test")
scanner.scan(_TestJsonProcessor())
| NVFlare-main | tests/unit_test/fuel/utils/json_scanner_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from platform import python_version
import pytest
from nvflare.app_common.np.np_model_locator import NPModelLocator
from tests.unit_test.fuel.utils.mock_component_builder import MockComponentBuilder
class MyComponent:
def __init__(self, model):
self.mode = model
class MyComponentFailure:
def __init__(self):
raise RuntimeError("initialization failed")
class MyComponentWithDictArgs:
def __init__(self, model: dict = None):
self.mode = model
class MyComponentWithPathArgs:
def __init__(self, path: str = None):
self.path = path
def is_python_greater_than_309():
version = python_version()
version_value = 0
if version.startswith("3.7."):
version_value = 307
elif version.startswith("3.8."):
version_value = 308
elif version.startswith("3.9."):
version_value = 309
elif version.startswith("3.10."):
version_value = 310
else:
raise ValueError("unknown version")
return version_value > 309
class TestComponentBuilder:
def test_empty_dict(self):
builder = MockComponentBuilder()
b = builder.build_component({})
assert b is None
def test_component(self):
config = {"id": "id", "path": "nvflare.app_common.np.np_model_locator.NPModelLocator", "args": {}}
builder = MockComponentBuilder()
assert isinstance(config, dict)
b = builder.build_component(config)
assert isinstance(b, NPModelLocator)
def test_component_failure(self):
config = {"id": "id", "path": "nvflare.app_common.np.np_model_locator.NPModelLocator", "args": {"xyz": 1}}
builder = MockComponentBuilder()
# the failure message changes since 3.10
if is_python_greater_than_309():
msg = "Class nvflare.app_common.np.np_model_locator.NPModelLocator has parameters error: TypeError: NPModelLocator.__init__() got an unexpected keyword argument 'xyz'."
else:
msg = "Class nvflare.app_common.np.np_model_locator.NPModelLocator has parameters error: TypeError: __init__() got an unexpected keyword argument 'xyz'."
assert isinstance(config, dict)
b = None
with pytest.raises(ValueError, match=re.escape(msg)):
b = builder.build_component(config)
def test_component_init_failure(self):
config = {
"id": "id",
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponentFailure",
"args": {},
}
builder = MockComponentBuilder()
assert isinstance(config, dict)
with pytest.raises(RuntimeError, match="initialization failed"):
builder.build_component(config)
def test_embedded_component(self):
config = {
"id": "id",
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponent",
"args": {"model": {"path": "nvflare.app_common.np.np_model_locator.NPModelLocator", "args": {}}},
}
builder = MockComponentBuilder()
assert isinstance(config, dict)
b = builder.build_component(config)
assert isinstance(b, MyComponent)
def test_embedded_component_with_dict_args(self):
config = {
"id": "id",
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponent",
"args": {
"model": {
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponentWithDictArgs",
"args": {"model": {"a": "b"}},
}
},
}
builder = MockComponentBuilder()
assert isinstance(config, dict)
b = builder.build_component(config)
assert isinstance(b, MyComponent)
assert isinstance(b.mode, MyComponentWithDictArgs)
assert b.mode.mode == {"a": "b"}
def test_embedded_component_failure(self):
config = {
"id": "id",
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponent",
"args": {"model": {"path": "nvflare.app_common.np.np_model_locator.NPModelLocator", "args": {"abc": 1}}},
}
# the failure message changes since 3.10
if is_python_greater_than_309():
msg = "failed to instantiate class: ValueError: Class nvflare.app_common.np.np_model_locator.NPModelLocator has parameters error: TypeError: NPModelLocator.__init__() got an unexpected keyword argument 'abc'."
else:
msg = "failed to instantiate class: ValueError: Class nvflare.app_common.np.np_model_locator.NPModelLocator has parameters error: TypeError: __init__() got an unexpected keyword argument 'abc'."
builder = MockComponentBuilder()
assert isinstance(config, dict)
with pytest.raises(
ValueError,
match=re.escape(msg),
):
b = builder.build_component(config)
def test_component_wo_args(self):
config = {"id": "id", "path": "tests.unit_test.fuel.utils.component_builder_test.MyComponentWithDictArgs"}
builder = MockComponentBuilder()
assert isinstance(config, dict)
b = builder.build_component(config)
assert isinstance(b, MyComponentWithDictArgs)
def test_embedded_component_wo_args(self):
config = {
"id": "id",
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponent",
"args": {
"model": {
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponentWithDictArgs",
}
},
}
builder = MockComponentBuilder()
assert isinstance(config, dict)
b = builder.build_component(config)
assert isinstance(b, MyComponent)
assert isinstance(b.mode, MyComponentWithDictArgs)
def test_embedded_component_with_path_args(self):
config = {
"id": "id",
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponent",
"args": {
"model": {
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponentWithPathArgs",
"args": {"path": "/tmp/nvflare"},
}
},
}
builder = MockComponentBuilder()
assert isinstance(config, dict)
b = builder.build_component(config)
assert isinstance(b, MyComponent)
assert isinstance(b.mode, MyComponentWithPathArgs)
def test_nested_component_component_type(self):
config = {
"id": "id",
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponent",
"args": {
"model": {
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponentWithDictArgs",
"config_type": "component",
}
},
}
builder = MockComponentBuilder()
assert isinstance(config, dict)
b = builder.build_component(config)
assert isinstance(b, MyComponent)
assert isinstance(b.mode, MyComponentWithDictArgs)
def test_nested_dict_component_type(self):
config = {
"id": "id",
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponent",
"args": {
"model": {
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponentWithDictArgs",
"config_type": "dict",
}
},
}
builder = MockComponentBuilder()
assert isinstance(config, dict)
b = builder.build_component(config)
assert isinstance(b, MyComponent)
assert b.mode == {
"path": "tests.unit_test.fuel.utils.component_builder_test.MyComponentWithDictArgs",
"config_type": "dict",
}
| NVFlare-main | tests/unit_test/fuel/utils/component_builder_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.fuel.utils.deprecated import deprecated
class TestDeprecated:
def test_deprecated_func_one_arg(self):
@deprecated
def test_f(a, b):
print(f"hello {a} and {b}")
with pytest.warns(DeprecationWarning, match=r"Call to deprecated function test_f."):
test_f(5, 6)
def test_deprecated_func_with_string(self):
@deprecated("please use new_test_f")
def test_f(a, b):
print(f"hello {a} and {b}")
with pytest.warns(DeprecationWarning, match=r"Call to deprecated function test_f \(please use new_test_f\)."):
test_f(5, 6)
def test_deprecated_class_one_arg(self):
@deprecated
class TestClass:
def __init__(self):
print("I am a test class")
with pytest.warns(DeprecationWarning, match=r"Call to deprecated class TestClass."):
_ = TestClass()
def test_deprecated_class_with_string(self):
@deprecated("please use NewTestClass")
class TestClass:
def __init__(self):
print("I am a test class")
with pytest.warns(DeprecationWarning, match=r"Call to deprecated class TestClass \(please use NewTestClass\)."):
_ = TestClass()
| NVFlare-main | tests/unit_test/fuel/utils/deprecated_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from pyhocon import ConfigFactory as CF
from nvflare.fuel.utils.config import ConfigFormat
from nvflare.fuel_opt.utils.pyhocon_loader import PyhoconConfig, PyhoconLoader
class TestPyHoconConfig:
def return_conf(self, file_name):
if file_name == "test.conf":
x = """config {
a {
a1 = 1
a2 = 2
}
b = 1
c = hi
d = [1,2]
} """
else:
x = """config {
a = {
a1 = 2
a2 = 4
}
b = 2
c = hello
d = [2,4]
} """
return CF.parse_string(x)
def test_config_loader(self):
loader = PyhoconLoader()
assert loader.get_format() == ConfigFormat.PYHOCON
loader._from_file = self.return_conf
dicts = {
"config": {
"a": {
"a1": 200,
},
"c": "hello",
"d": [200, 400, 500],
"e1": "Yes",
"e2": "True",
"e3": "NO",
}
}
config = loader.load_config("test.conf")
assert config.get_format() == ConfigFormat.PYHOCON
conf = config.get_native_conf()
print("conf=", conf)
conf = conf.get_config("config")
assert config is not None
assert conf.get_config("a").get_int("a1") == 1
assert conf.get_int("a.a1") == 1
assert conf.get_int("b") == 1
assert conf.get_string("c") == "hi"
assert conf.get_list("d") == [1, 2]
assert conf.get_string("e4", None) is None
with pytest.raises(Exception):
assert conf.get_string("d") == [1, 2]
with pytest.raises(Exception):
assert conf.get_string("d") == 1
assert PyhoconConfig(CF.from_dict(conf.get("a"))).to_dict() == {"a1": 1, "a2": 2}
with pytest.raises(Exception):
assert conf.get_int("a") == 1
def test_load_config_from_dict(self):
loader = PyhoconLoader()
assert loader.get_format() == ConfigFormat.PYHOCON
dicts = {
"config": {
"a": {
"a1": 200,
},
"c": "hello",
"d": [200, 400, 500],
"e1": "Yes",
"e2": "True",
"e3": "NO",
}
}
config = loader.load_config_from_dict(dicts)
assert config.get_format() == ConfigFormat.PYHOCON
assert config is not None
assert config.to_dict() == dicts
assert config.get_format() == ConfigFormat.PYHOCON
def test_load_config_from_str(self):
loader = PyhoconLoader()
assert loader.get_format() == ConfigFormat.PYHOCON
dicts = {
"config": {
"a": {
"a1": 200,
},
"c": "hello",
"d": [200, 400, 500],
"e1": "Yes",
"e2": "True",
"e3": "NO",
}
}
config = loader.load_config_from_dict(dicts)
config = loader.load_config_from_str(config.to_str())
assert config is not None
assert config.to_dict() == dicts
assert config.get_format() == ConfigFormat.PYHOCON
| NVFlare-main | tests/unit_test/fuel/utils/pyhocon_config_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/fuel/utils/fobs/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
from enum import Enum, IntEnum
from nvflare.fuel.utils import fobs
from nvflare.fuel.utils.fobs.decomposer import DictDecomposer
class DataClass:
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
if not isinstance(other, DataClass):
return False
return self.a == other.a and self.b == other.b
class EnumClass(str, Enum):
A = "foo"
B = "bar"
class IntEnumClass(IntEnum):
X = 123
Y = 456
class DictClass(dict):
pass
class TestDecomposers:
def test_generic_dict_class(self):
fobs.register(DictDecomposer(DictClass))
data = DictClass()
data["A"] = 123
data["B"] = "xyz"
self._check_decomposer(data, False)
def test_generic_data_class(self):
fobs.register_data_classes(DataClass)
data = DataClass("test", 456)
self._check_decomposer(data, False)
def test_generic_str_enum_type(self):
# Decomposers for enum classes are auto-registered by default
test_enum = EnumClass.A
self._check_decomposer(test_enum)
def test_generic_int_enum_type(self):
# Decomposers for enum classes are auto-registered by default
test_enum = IntEnumClass.X
self._check_decomposer(test_enum)
def test_ordered_dict(self):
test_list = [(3, "First"), (1, "Middle"), (2, "Last")]
test_data = OrderedDict(test_list)
buffer = fobs.dumps(test_data)
fobs.reset()
new_data = fobs.loads(buffer)
new_list = list(new_data.items())
assert test_list == new_list
@staticmethod
def _check_decomposer(data, clear_decomposers=True):
buffer = fobs.dumps(data)
if clear_decomposers:
fobs.reset()
new_data = fobs.loads(buffer)
assert type(data) == type(new_data), f"Original type {type(data)} doesn't match new data type {type(new_data)}"
assert data == new_data, f"Original data {data} doesn't match new data {new_data}"
| NVFlare-main | tests/unit_test/fuel/utils/fobs/decomposer_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvflare.apis.shareable import Shareable
from nvflare.apis.utils.decomposers import flare_decomposers
from nvflare.fuel.utils import fobs
from nvflare.fuel.utils.fobs.datum import DatumManager
BLOB_SIZE = 1024 * 1024 # 1M
class TestDatum:
test_data = Shareable()
test_data["data"] = {
"key1": "Test",
"blob1": bytes(BLOB_SIZE),
"member": {"key2": 123, "blob2": bytearray(BLOB_SIZE)},
}
def test_datum(self):
flare_decomposers.register()
manager = DatumManager(BLOB_SIZE)
buf = fobs.dumps(TestDatum.test_data, manager)
assert len(buf) < BLOB_SIZE
datums = manager.get_datums()
assert len(datums) == 2
data = fobs.loads(buf, manager)
assert isinstance(data["data"]["blob1"], bytes)
| NVFlare-main | tests/unit_test/fuel/utils/fobs/datum_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import queue
from datetime import datetime
from typing import Any
import pytest
from nvflare.fuel.utils import fobs
from nvflare.fuel.utils.fobs import Decomposer
from nvflare.fuel.utils.fobs.datum import DatumManager
class TestFobs:
NUMBER = 123456
FLOAT = 123.456
NOW = datetime.now()
test_data = {
"str": "Test string",
"number": NUMBER,
"float": FLOAT,
"list": [7, 8, 9],
"set": {4, 5, 6},
"tuple": ("abc", "xyz"),
"time": NOW,
}
def test_builtin(self):
buf = fobs.dumps(TestFobs.test_data)
data = fobs.loads(buf)
assert data["number"] == TestFobs.NUMBER
def test_aliases(self):
buf = fobs.dumps(TestFobs.test_data)
data = fobs.loads(buf)
assert data["number"] == TestFobs.NUMBER
def test_unsupported_classes(self):
with pytest.raises(TypeError):
# Queue is just a random built-in class not supported by FOBS
unsupported_class = queue.Queue()
fobs.dumps(unsupported_class)
def test_decomposers(self):
test_class = ExampleClass(TestFobs.NUMBER)
fobs.register(ExampleClassDecomposer)
buf = fobs.dumps(test_class)
new_class = fobs.loads(buf)
assert new_class.number == TestFobs.NUMBER
class ExampleClass:
def __init__(self, number):
self.number = number
class ExampleClassDecomposer(Decomposer):
def supported_type(self):
return ExampleClass
def decompose(self, target: ExampleClass, manager: DatumManager = None) -> Any:
return target.number
def recompose(self, data: Any, manager: DatumManager = None) -> ExampleClass:
return ExampleClass(data)
| NVFlare-main | tests/unit_test/fuel/utils/fobs/fobs_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_opt/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_opt/psi/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_opt/psi/dh_psi/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
# temp disable import
# from nvflare.app_opt.psi.dh_psi.dh_psi_client import PSIClient
# from nvflare.app_opt.psi.dh_psi.dh_psi_server import PSIServer
#
class TestPSIAlgo:
# Comment out the PSI tests for now.
@pytest.mark.parametrize(
"test_input, expected",
[
(
{
"server_items": [
"user_id-100",
"user_id-106",
"user_id-112",
"user_id-118",
"user_id-124",
"user_id-130",
"user_id-136",
"user_id-142",
"user_id-148",
"user_id-154",
"user_id-160",
"user_id-166",
"user_id-172",
"user_id-178",
"user_id-184",
"user_id-190",
"user_id-196",
],
"client_items": [
"user_id-100",
"user_id-104",
"user_id-108",
"user_id-112",
"user_id-116",
"user_id-120",
"user_id-124",
"user_id-128",
"user_id-132",
"user_id-136",
"user_id-140",
"user_id-144",
"user_id-148",
"user_id-152",
"user_id-156",
"user_id-160",
"user_id-164",
"user_id-168",
"user_id-172",
"user_id-176",
"user_id-180",
"user_id-184",
"user_id-188",
"user_id-192",
"user_id-196",
"user_id-200",
"user_id-204",
"user_id-208",
"user_id-212",
"user_id-216",
"user_id-220",
"user_id-224",
"user_id-228",
"user_id-232",
"user_id-236",
"user_id-240",
"user_id-244",
"user_id-248",
"user_id-252",
"user_id-256",
"user_id-260",
"user_id-264",
"user_id-268",
"user_id-272",
"user_id-276",
"user_id-280",
"user_id-284",
"user_id-288",
"user_id-292",
"user_id-296",
],
},
[
"user_id-100",
"user_id-112",
"user_id-124",
"user_id-136",
"user_id-148",
"user_id-160",
"user_id-172",
"user_id-184",
"user_id-196",
],
),
],
)
def test_psi_algo(self, test_input, expected):
# have to comment out the unittests for now until we figure
# out how to enable unit tests for optional requirements
# if you want to run the test, just uncomment the following code
# temp disable tests as Jenkins machine is based on Ubuntu 18.04 and missing
# ImportError: /lib/x86_64-linux-gnu/libm.so.6: version `GLIBC_2.29'
# not found (required by /root/.local/share/virtualenvs/NVFlare-premerge-R2yT5_j2/lib/python3.8/site-packages/private_set_intersection/python/_openmined_psi.so)
#
# server_items = test_input["server_items"]
# client_items = test_input["client_items"]
# client = PSIClient(client_items)
# server = PSIServer(server_items)
# setup_msg = server.setup(len(client_items))
#
# client.receive_setup(setup_msg)
# request_msg = client.get_request(client_items)
# response_msg = server.process_request(request_msg)
# intersections = client.get_intersection(response_msg)
#
# assert 9 == len(intersections)
# assert intersections == expected
pass
| NVFlare-main | tests/unit_test/app_opt/psi/dh_psi/dh_psi_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvflare.apis.job_def import is_valid_job_id
class TestJobDef:
def test_is_valid_job_id(self):
assert not is_valid_job_id("site-1")
assert is_valid_job_id("c2564481-536a-4548-8dfa-cf183a3652a1")
assert is_valid_job_id("c2564481536a45488dfacf183a3652a1")
assert not is_valid_job_id("c2564481536a45488dfacf183a3652a1ddd")
assert not is_valid_job_id("c2564481-536a-4548-fdff-df183a3652a1")
| NVFlare-main | tests/unit_test/apis/job_def_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.apis.dxo import DXO, DataKind, get_leaf_dxos
TEST_INIT_1 = [DataKind.WEIGHTS, {"data": 1.0}]
TEST_INIT_2 = [DataKind.WEIGHT_DIFF, {"data": 1.0}]
TEST_INIT_3 = [DataKind.METRICS, {"data": 1.0}]
TEST_INIT_4 = [DataKind.STATISTICS, {"data": 1.0}]
TEST_INIT_5 = [
DataKind.COLLECTION,
{"dxo1": DXO(DataKind.WEIGHTS, {"data": 1.0}), "dxo2": DXO(DataKind.WEIGHTS, {"data": 2.0})},
]
TEST_INIT_ERROR_1 = [DataKind.WEIGHTS, 1.0]
dxo1 = DXO(DataKind.WEIGHTS, {"data": 1.0})
dxo2 = DXO(DataKind.WEIGHTS, {"data": 2.0})
dxo11 = DXO(DataKind.WEIGHTS, {"data": 3.0})
dxo22 = DXO(DataKind.WEIGHTS, {"data": 4.0})
dxo3 = DXO(DataKind.COLLECTION, {"dxo11": dxo11, "dxo22": dxo22})
TEST_GET_LEAVES_1 = [DataKind.COLLECTION, {"dxo1": dxo1, "dxo2": dxo2}, {"dxo1": dxo1, "dxo2": dxo2}]
TEST_GET_LEAVES_2 = [
DataKind.COLLECTION,
{"dxo1": dxo1, "dxo2": dxo2, "dxo3": dxo3},
{"dxo1": dxo1, "dxo2": dxo2, "dxo3.dxo11": dxo11, "dxo3.dxo22": dxo22},
]
class TestDXO:
@pytest.mark.parametrize("data_kind, data", [TEST_INIT_1, TEST_INIT_2, TEST_INIT_3, TEST_INIT_4, TEST_INIT_5])
def test_init(self, data_kind, data):
dxo = DXO(data_kind=data_kind, data=data)
# why return empty string as valid ? should be a boolean
assert dxo.validate() == ""
@pytest.mark.parametrize("data_kind, data", [TEST_INIT_ERROR_1])
def test_init_no_dict(self, data_kind, data):
with pytest.raises(ValueError):
dxo = DXO(data_kind=data_kind, data=data)
dxo.validate()
@pytest.mark.parametrize("data_kind, data, expected", [TEST_GET_LEAVES_2])
def test_get_leaf_dxos(self, data_kind, data, expected):
dxo = DXO(data_kind=data_kind, data=data)
assert dxo.validate() == ""
result, errors = get_leaf_dxos(dxo, root_name="test")
assert len(result) == len(expected)
for _exp_key, _exp_dxo in expected.items():
_leaf_key = "test." + _exp_key
assert _leaf_key in result
assert result.get(_leaf_key).data == _exp_dxo.data
assert result.get(_leaf_key).data_kind == _exp_dxo.data_kind
assert not errors
| NVFlare-main | tests/unit_test/apis/dxo_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.apis.analytix import _DATA_TYPE_KEY, AnalyticsData, AnalyticsDataType
from nvflare.apis.dxo import DXO, DataKind
from nvflare.app_common.tracking.tracker_types import LogWriterName, TrackConst
from nvflare.app_common.widgets.streaming import create_analytic_dxo
FROM_DXO_TEST_CASES = [
("hello", 3.0, 1, AnalyticsDataType.SCALAR),
("world", "text", 2, AnalyticsDataType.TEXT),
("dict", {"key": 1.0}, 3, AnalyticsDataType.SCALARS),
]
TO_DXO_TEST_CASES = [
AnalyticsData(key="hello", value=3.0, data_type=AnalyticsDataType.SCALAR),
AnalyticsData(key="world", value="text", step=2, path="/tmp/", data_type=AnalyticsDataType.TEXT),
AnalyticsData(
key="dict",
value={"key": 1.0},
step=3,
sender=LogWriterName.MLFLOW,
kwargs={"experiment_name": "test"},
data_type=AnalyticsDataType.SCALARS,
),
]
FROM_DXO_INVALID_TEST_CASES = [
(dict(), TypeError, f"expect dxo to be an instance of DXO, but got {type(dict())}."),
(
DXO(data_kind=DataKind.WEIGHTS, data={"w": 1.0}),
KeyError,
"'track_key'",
),
]
INVALID_TEST_CASES = [
(
dict(),
1.0,
AnalyticsDataType.SCALAR,
None,
TypeError,
f"expect tag to be an instance of str, but got {type(dict())}.",
),
(
"tag",
1.0,
"scalar",
None,
TypeError,
f"expect data_type to be an instance of AnalyticsDataType, but got {type('')}.",
),
]
class TestAnalytix:
@pytest.mark.parametrize("tag,value,data_type,kwargs,expected_error,expected_msg", INVALID_TEST_CASES)
def test_invalid(self, tag, value, data_type, kwargs, expected_error, expected_msg):
with pytest.raises(expected_error, match=expected_msg):
if not kwargs:
_ = AnalyticsData(key=tag, value=value, data_type=data_type)
else:
_ = AnalyticsData(key=tag, value=value, data_type=data_type, **kwargs)
@pytest.mark.parametrize("tag,value,step, data_type", FROM_DXO_TEST_CASES)
def test_from_dxo(self, tag, value, step, data_type):
dxo = create_analytic_dxo(tag=tag, value=value, data_type=data_type, global_step=step)
assert dxo.get_meta_prop(_DATA_TYPE_KEY) == data_type
result = AnalyticsData.from_dxo(dxo)
assert result.tag == tag
assert result.value == value
assert result.step == step
assert result.sender == LogWriterName.TORCH_TB
@pytest.mark.parametrize("data", TO_DXO_TEST_CASES)
def test_to_dxo(self, data: AnalyticsData):
result = data.to_dxo()
assert result.data_kind == DataKind.ANALYTIC
assert result.data[TrackConst.TRACK_KEY] == data.tag
assert result.data[TrackConst.TRACK_VALUE] == data.value
if data.step:
assert result.data[TrackConst.GLOBAL_STEP_KEY] == data.step
if data.path:
assert result.data[TrackConst.PATH_KEY] == data.path
if data.kwargs:
assert result.data[TrackConst.KWARGS_KEY] == data.kwargs
assert result.get_meta_prop(_DATA_TYPE_KEY) == data.data_type
assert result.get_meta_prop(TrackConst.TRACKER_KEY) == data.sender
@pytest.mark.parametrize("dxo,expected_error,expected_msg", FROM_DXO_INVALID_TEST_CASES)
def test_from_dxo_invalid(self, dxo, expected_error, expected_msg):
with pytest.raises(expected_error, match=expected_msg):
_ = AnalyticsData.from_dxo(dxo)
| NVFlare-main | tests/unit_test/apis/analytix_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvflare.apis.fl_context import FLContext, FLContextManager
class TestFLContext:
def test_add_item(self):
expected = {"x": 1, "y": 2}
fl_ctx = FLContext()
fl_ctx.set_prop("x", 1, private=False)
fl_ctx.set_prop("y", 2, private=False)
assert fl_ctx.get_prop("y") == 2
assert fl_ctx.get_all_public_props() == expected
def test_add_with_private_item(self):
expected = {"x": 1, "y": 2}
fl_ctx = FLContext()
fl_ctx.set_prop("x", 1, private=False)
fl_ctx.set_prop("y", 2, private=False)
fl_ctx.set_prop("z", 3, private=True)
assert fl_ctx.get_prop("y") == 2
assert fl_ctx.get_all_public_props() == expected
def test_set_items(self):
expected = {"_public_x": 1, "_public_y": 2}
fl_ctx = FLContext()
fl_ctx.set_prop("z", 3, private=False)
# Overwrite the existing public_props.
fl_ctx.set_public_props(expected)
assert fl_ctx.get_all_public_props() == expected
def test_not_allow_duplicate_key(self):
fl_ctx = FLContext()
fl_ctx.set_prop("x", 1, private=False)
fl_ctx.set_prop("y", 2, private=False)
fl_ctx.set_prop("z", 3, private=True)
assert fl_ctx.set_prop("y", 20, private=False)
assert fl_ctx.get_prop("y") == 20
assert not fl_ctx.set_prop("y", 4, private=True)
def test_remove_prop(self):
fl_ctx = FLContext()
assert fl_ctx.set_prop("x", 1, private=False)
assert fl_ctx.set_prop("y", 2, private=False)
fl_ctx.remove_prop("y")
assert fl_ctx.set_prop("y", 20, private=True)
assert fl_ctx.get_prop("y") == 20
def test_sticky_prop(self):
mgr = FLContextManager()
ctx1 = mgr.new_context()
ctx2 = mgr.new_context()
ctx1.set_prop(key="x", value=1, private=True, sticky=True)
assert ctx2.get_prop("x") == 1
ctx2.set_prop(key="x", value=2, private=True, sticky=True)
assert ctx2.get_prop("x") == 2
assert ctx1.get_prop("x") == 2
| NVFlare-main | tests/unit_test/apis/fl_context_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.apis.controller_spec import Task
from nvflare.apis.shareable import Shareable
def create_task(name, data=None, timeout=0, before_task_sent_cb=None, result_received_cb=None, task_done_cb=None):
data = Shareable() if data is None else data
task = Task(
name=name,
data=data,
timeout=timeout,
before_task_sent_cb=before_task_sent_cb,
result_received_cb=result_received_cb,
task_done_cb=task_done_cb,
)
return task
def _get_create_task_cases():
test_cases = [
(
{"timeout": -1},
ValueError,
"timeout must be >= 0, but got -1.",
),
(
{"timeout": 1.1},
TypeError,
"timeout must be an int, but got <class 'float'>.",
),
(
{"before_task_sent_cb": list()},
TypeError,
"before_task_sent must be a callable function.",
),
(
{"result_received_cb": list()},
TypeError,
"result_received must be a callable function.",
),
(
{"task_done_cb": list()},
TypeError,
"task_done must be a callable function.",
),
]
return test_cases
class TestTask:
@pytest.mark.parametrize("kwargs,error,msg", _get_create_task_cases())
def test_create_task_with_invalid_input(self, kwargs, error, msg):
with pytest.raises(error, match=msg):
_ = create_task(name="__test_task", **kwargs)
def test_set_task_prop(self):
task = create_task(name="__test_task")
task.set_prop("hello", "world")
assert task.props["hello"] == "world"
def test_get_task_prop(self):
task = create_task(name="__test_task")
task.props["hello"] = "world"
assert task.get_prop("hello") == "world"
def test_set_task_prop_invalid_key(self):
task = create_task(name="__test_task")
with pytest.raises(ValueError, match="Keys start with __ is reserved. Please use other key."):
task.set_prop("__test", "world")
| NVFlare-main | tests/unit_test/apis/controller_spec_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import tempfile
import unittest
from unittest import mock
from nvflare.apis.fl_context import FLContext
from nvflare.apis.impl.job_def_manager import SimpleJobDefManager
from nvflare.apis.job_def import JobMetaKey
from nvflare.apis.storage import WORKSPACE
from nvflare.app_common.storages.filesystem_storage import FilesystemStorage
from nvflare.fuel.utils.zip_utils import zip_directory_to_bytes
from nvflare.private.fed.server.job_meta_validator import JobMetaValidator
class TestJobManager(unittest.TestCase):
def setUp(self) -> None:
dir_path = os.path.dirname(os.path.realpath(__file__))
self.uri_root = tempfile.mkdtemp()
self.data_folder = os.path.join(dir_path, "../../data/jobs")
self.job_manager = SimpleJobDefManager(uri_root=self.uri_root)
self.fl_ctx = FLContext()
def tearDown(self) -> None:
shutil.rmtree(self.uri_root)
def test_create_job(self):
with mock.patch("nvflare.apis.impl.job_def_manager.SimpleJobDefManager._get_job_store") as mock_store:
mock_store.return_value = FilesystemStorage()
data, meta = self._create_job()
content = self.job_manager.get_content(meta.get(JobMetaKey.JOB_ID), self.fl_ctx)
assert content == data
def _create_job(self):
data = zip_directory_to_bytes(self.data_folder, "valid_job")
folder_name = "valid_job"
job_validator = JobMetaValidator()
valid, error, meta = job_validator.validate(folder_name, data)
meta = self.job_manager.create(meta, data, self.fl_ctx)
return data, meta
def test_save_workspace(self):
with mock.patch("nvflare.apis.impl.job_def_manager.SimpleJobDefManager._get_job_store") as mock_store:
mock_store.return_value = FilesystemStorage()
data, meta = self._create_job()
job_id = meta.get(JobMetaKey.JOB_ID)
self.job_manager.save_workspace(job_id, data, self.fl_ctx)
result = self.job_manager.get_storage_component(job_id, WORKSPACE, self.fl_ctx)
assert result == data
| NVFlare-main | tests/unit_test/apis/impl/job_def_manager_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import re
import threading
import time
import uuid
from itertools import permutations
from unittest.mock import Mock
import pytest
from nvflare.apis.client import Client
from nvflare.apis.controller_spec import ClientTask, SendOrder, Task, TaskCompletionStatus
from nvflare.apis.fl_context import FLContext, FLContextManager
from nvflare.apis.impl.controller import Controller
from nvflare.apis.server_engine_spec import ServerEngineSpec
from nvflare.apis.shareable import ReservedHeaderKey, Shareable
from nvflare.apis.signal import Signal
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
def create_task(name, data=None, timeout=0, before_task_sent_cb=None, result_received_cb=None, task_done_cb=None):
data = Shareable() if data is None else data
task = Task(
name=name,
data=data,
timeout=timeout,
before_task_sent_cb=before_task_sent_cb,
result_received_cb=result_received_cb,
task_done_cb=task_done_cb,
)
return task
def create_client(name, token=None):
token = str(uuid.uuid4()) if token is None else token
return Client(name=name, token=token)
# TODO:
# - provide a easy way for researchers to test their own Controller / their own control loop?
# - how can they write their own test cases, simulating different client in diff. scenario...
class DummyController(Controller):
def __init__(self):
super().__init__(task_check_period=0.1)
def control_flow(self, abort_signal: Signal, fl_ctx: FLContext):
print(f"Entering control loop of {self.__class__.__name__}")
def start_controller(self, fl_ctx: FLContext):
print("Start controller")
def stop_controller(self, fl_ctx: FLContext):
print("Stop controller")
def process_result_of_unknown_task(
self, client: Client, task_name: str, client_task_id: str, result: Shareable, fl_ctx: FLContext
):
raise RuntimeError(f"Unknown task: {task_name} from client {client.name}.")
def launch_task(controller, method, task, fl_ctx, kwargs):
if method == "broadcast":
if "min_responses" in kwargs:
min_responses = kwargs.pop("min_responses")
elif "targets" in kwargs:
min_responses = len(kwargs["targets"])
else:
min_responses = 1
controller.broadcast(task=task, fl_ctx=fl_ctx, min_responses=min_responses, **kwargs)
elif method == "broadcast_and_wait":
if "min_responses" in kwargs:
min_responses = kwargs.pop("min_responses")
elif "targets" in kwargs:
min_responses = len(kwargs["targets"])
else:
min_responses = 1
controller.broadcast_and_wait(task=task, fl_ctx=fl_ctx, min_responses=min_responses, **kwargs)
elif method == "send":
controller.send(task=task, fl_ctx=fl_ctx, **kwargs)
elif method == "send_and_wait":
controller.send_and_wait(task=task, fl_ctx=fl_ctx, **kwargs)
elif method == "relay":
controller.relay(task=task, fl_ctx=fl_ctx, **kwargs)
elif method == "relay_and_wait":
controller.relay_and_wait(task=task, fl_ctx=fl_ctx, **kwargs)
def get_ready(thread, sleep_time=0.1):
thread.start()
time.sleep(sleep_time)
def _setup_system(num_clients=1):
clients_list = [create_client(f"__test_client{i}") for i in range(num_clients)]
mock_server_engine = Mock(spec=ServerEngineSpec)
context_manager = FLContextManager(
engine=mock_server_engine,
identity_name="__mock_server_engine",
job_id="job_1",
public_stickers={},
private_stickers={},
)
mock_server_engine.new_context.return_value = context_manager.new_context()
mock_server_engine.get_clients.return_value = clients_list
controller = DummyController()
fl_ctx = mock_server_engine.new_context()
controller.initialize_run(fl_ctx=fl_ctx)
return controller, mock_server_engine, fl_ctx, clients_list
class TestController:
NO_RELAY = ["broadcast", "broadcast_and_wait", "send", "send_and_wait"]
RELAY = ["relay", "relay_and_wait"]
ALL_APIS = NO_RELAY + RELAY
@staticmethod
def setup_system(num_of_clients=1):
controller, server_engine, fl_ctx, clients_list = _setup_system(num_clients=num_of_clients)
return controller, fl_ctx, clients_list
@staticmethod
def teardown_system(controller, fl_ctx):
controller.finalize_run(fl_ctx=fl_ctx)
class TestTaskManagement(TestController):
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("num_of_tasks", [2, 3, 4])
def test_add_task(self, method, num_of_tasks):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
all_threads = []
all_tasks = []
for i in range(num_of_tasks):
task = create_task(name="__test_task")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
all_threads.append(launch_thread)
all_tasks.append(task)
assert controller.get_num_standing_tasks() == num_of_tasks
controller.cancel_all_tasks()
for task in all_tasks:
assert task.completion_status == TaskCompletionStatus.CANCELLED
for thread in all_threads:
thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method1", TestController.ALL_APIS)
@pytest.mark.parametrize("method2", TestController.ALL_APIS)
def test_reuse_same_task(self, method1, method2):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task(name="__test_task")
targets = [client]
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method1,
"fl_ctx": fl_ctx,
"kwargs": {"targets": targets},
},
)
get_ready(launch_thread)
with pytest.raises(ValueError, match="Task was already used. Please create a new task object."):
launch_task(controller=controller, method=method2, task=task, fl_ctx=fl_ctx, kwargs={"targets": targets})
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("num_of_start_tasks", [2, 3, 4])
@pytest.mark.parametrize("num_of_cancel_tasks", [1, 2])
def test_check_task_remove_cancelled_tasks(self, method, num_of_start_tasks, num_of_cancel_tasks):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
all_threads = []
all_tasks = []
for i in range(num_of_start_tasks):
task = create_task(name=f"__test_task{i}")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
all_threads.append(launch_thread)
all_tasks.append(task)
for i in range(num_of_cancel_tasks):
controller.cancel_task(task=all_tasks[i], fl_ctx=fl_ctx)
assert all_tasks[i].completion_status == TaskCompletionStatus.CANCELLED
controller._check_tasks()
assert controller.get_num_standing_tasks() == (num_of_start_tasks - num_of_cancel_tasks)
controller.cancel_all_tasks()
for thread in all_threads:
thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("num_client_requests", [1, 2, 3, 4])
def test_client_request_after_cancel_task(self, method, num_client_requests):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task("__test_task")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
controller.cancel_task(task)
for i in range(num_client_requests):
_, task_id, data = controller.process_task_request(client, fl_ctx)
# check if task_id is empty means this task is not assigned
assert task_id == ""
assert data is None
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_client_submit_result_after_cancel_task(self, method):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task("__test_task")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
time.sleep(1)
print(controller._tasks)
# in here we make up client results:
result = Shareable()
result["result"] = "result"
with pytest.raises(RuntimeError, match="Unknown task: __test_task from client __test_client0."):
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=result
)
assert task.last_client_task_map["__test_client0"].result is None
launch_thread.join()
self.teardown_system(controller, fl_ctx)
def _get_common_test_cases():
test_cases = [
({"task": list(), "fl_ctx": FLContext()}, TypeError, "task must be an instance of Task."),
(
{"task": create_task("__test"), "fl_ctx": list()},
TypeError,
"fl_ctx must be an instance of FLContext.",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "targets": dict()},
TypeError,
"targets must be a list of Client or string.",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "targets": [1, 2, 3]},
TypeError,
"targets must be a list of Client or string.",
),
]
return test_cases
def _get_broadcast_test_cases():
test_cases = [
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "min_responses": -1},
ValueError,
"min_responses must >= 0.",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "min_responses": 1.1},
TypeError,
"min_responses must be an instance of int.",
),
]
return test_cases
def _get_send_test_cases():
test_cases = [
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "task_assignment_timeout": -1},
ValueError,
"task_assignment_timeout must >= 0.",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "task_assignment_timeout": 1.1},
TypeError,
"task_assignment_timeout must be an instance of int.",
),
(
{
"task": create_task("__test"),
"fl_ctx": FLContext(),
"send_order": SendOrder.SEQUENTIAL,
"targets": [],
},
ValueError,
"Targets must be provided for send",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "send_order": "hello"},
TypeError,
"send_order must be in Enum SendOrder.",
),
(
{
"task": create_task("__test", timeout=2),
"fl_ctx": FLContext(),
"task_assignment_timeout": 3,
},
ValueError,
re.escape("task_assignment_timeout (3) needs to be less than or equal to task.timeout (2)."),
),
]
return test_cases
def _get_relay_test_cases():
test_cases = [
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "task_assignment_timeout": -1},
ValueError,
"task_assignment_timeout must >= 0.",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "task_assignment_timeout": 1.1},
TypeError,
"task_assignment_timeout must be an instance of int.",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "task_result_timeout": -1},
ValueError,
"task_result_timeout must >= 0.",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "task_result_timeout": 1.1},
TypeError,
"task_result_timeout must be an instance of int.",
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "send_order": "hello"},
TypeError,
"send_order must be in Enum SendOrder.",
),
(
{
"task": create_task("__test", timeout=2),
"fl_ctx": FLContext(),
"task_assignment_timeout": 3,
},
ValueError,
re.escape("task_assignment_timeout (3) needs to be less than or equal to task.timeout (2)."),
),
(
{
"task": create_task("__test", timeout=2),
"fl_ctx": FLContext(),
"task_result_timeout": 3,
},
ValueError,
re.escape("task_result_timeout (3) needs to be less than or equal to task.timeout (2)."),
),
(
{"task": create_task("__test"), "fl_ctx": FLContext(), "dynamic_targets": False},
ValueError,
"Need to provide targets when dynamic_targets is set to False.",
),
]
return test_cases
def _get_process_submission_test_cases():
return [
(
{
"client": None,
"task_name": "__test_task",
"fl_ctx": FLContext(),
"task_id": "abc",
"result": Shareable(),
},
TypeError,
"client must be an instance of Client.",
),
(
{
"client": create_client("__test"),
"task_name": "__test_task",
"fl_ctx": None,
"task_id": "abc",
"result": Shareable(),
},
TypeError,
"fl_ctx must be an instance of FLContext.",
),
(
{
"client": create_client("__test"),
"task_name": "__test_task",
"fl_ctx": FLContext(),
"task_id": "abc",
"result": "abc",
},
TypeError,
"result must be an instance of Shareable.",
),
]
class TestInvalidInput(TestController):
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("kwargs,error,msg", _get_common_test_cases())
def test_invalid_input(self, method, kwargs, error, msg):
controller, fl_ctx, clients = self.setup_system()
with pytest.raises(error, match=msg):
if method == "broadcast":
controller.broadcast(**kwargs)
elif method == "broadcast_and_wait":
controller.broadcast_and_wait(**kwargs)
elif method == "send":
controller.send(**kwargs)
elif method == "send_and_wait":
controller.send_and_wait(**kwargs)
elif method == "relay":
controller.relay(**kwargs)
elif method == "relay_and_wait":
controller.relay_and_wait(**kwargs)
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", ["broadcast", "broadcast_and_wait"])
@pytest.mark.parametrize("kwargs,error,msg", _get_broadcast_test_cases())
def test_broadcast_invalid_input(self, method, kwargs, error, msg):
controller, fl_ctx, clients = self.setup_system()
with pytest.raises(error, match=msg):
if method == "broadcast":
controller.broadcast(**kwargs)
else:
controller.broadcast_and_wait(**kwargs)
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", ["send", "send_and_wait"])
@pytest.mark.parametrize("kwargs,error,msg", _get_send_test_cases())
def test_send_invalid_input(self, method, kwargs, error, msg):
controller, fl_ctx, clients = self.setup_system()
with pytest.raises(error, match=msg):
if method == "send":
controller.send(**kwargs)
else:
controller.send_and_wait(**kwargs)
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", ["relay", "relay_and_wait"])
@pytest.mark.parametrize("kwargs,error,msg", _get_relay_test_cases())
def test_relay_invalid_input(self, method, kwargs, error, msg):
controller, fl_ctx, clients = self.setup_system()
with pytest.raises(error, match=msg):
if method == "relay":
controller.relay(**kwargs)
else:
controller.relay_and_wait(**kwargs)
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("kwargs,error,msg", _get_process_submission_test_cases())
def test_process_submission_invalid_input(self, method, kwargs, error, msg):
controller, fl_ctx, clients = self.setup_system()
with pytest.raises(error, match=msg):
controller.process_submission(**kwargs)
self.teardown_system(controller, fl_ctx)
def _get_task_done_callback_test_cases():
task_name = "__test_task"
def task_done_cb(task: Task, **kwargs):
client_names = [x.client.name for x in task.client_tasks]
expected_str = "_".join(client_names)
task.props[task_name] = expected_str
input_data = Shareable()
test_cases = [
(
"broadcast",
10,
task_name,
input_data,
task_done_cb,
"_".join([f"__test_client{i}" for i in range(10)]),
),
(
"broadcast_and_wait",
10,
task_name,
input_data,
task_done_cb,
"_".join([f"__test_client{i}" for i in range(10)]),
),
("send", 1, task_name, input_data, task_done_cb, "__test_client0"),
("send_and_wait", 1, task_name, input_data, task_done_cb, "__test_client0"),
("relay", 1, task_name, input_data, task_done_cb, "__test_client0"),
("relay_and_wait", 1, task_name, input_data, task_done_cb, "__test_client0"),
]
return test_cases
def clients_pull_and_submit_result(controller, ctx, clients, task_name):
client_task_ids = []
num_of_clients = len(clients)
for i in range(num_of_clients):
task_name_out, client_task_id, data = controller.process_task_request(clients[i], ctx)
assert task_name_out == task_name
client_task_ids.append(client_task_id)
for client, client_task_id in zip(clients, client_task_ids):
data = Shareable()
controller.process_submission(
client=client, task_name=task_name, task_id=client_task_id, fl_ctx=ctx, result=data
)
class TestCallback(TestController):
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_before_task_sent_cb(self, method):
def before_task_sent_cb(client_task: ClientTask, **kwargs):
client_task.task.data["_test_data"] = client_task.client.name
client_name = "__test_client0"
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task("__test_task", before_task_sent_cb=before_task_sent_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
task_name_out, _, data = controller.process_task_request(client, fl_ctx)
assert data["_test_data"] == client_name
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_result_received_cb(self, method):
def result_received_cb(client_task: ClientTask, **kwargs):
client_task.result["_test_data"] = client_task.client.name
client_name = "__test_client0"
input_data = Shareable()
input_data["_test_data"] = "_old_data"
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task("__test_task", data=input_data, result_received_cb=result_received_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=data
)
assert task.last_client_task_map[client_name].result["_test_data"] == client_name
controller._check_tasks()
assert task.completion_status == TaskCompletionStatus.OK
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("task_complete", ["normal", "timeout", "cancel"])
@pytest.mark.parametrize(
"method,num_clients,task_name,input_data,cb,expected", _get_task_done_callback_test_cases()
)
def test_task_done_cb(self, method, num_clients, task_name, input_data, cb, expected, task_complete):
controller, fl_ctx, clients = self.setup_system(num_clients)
timeout = 0 if task_complete != "timeout" else 1
task = create_task("__test_task", data=input_data, task_done_cb=cb, timeout=timeout)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": clients},
},
)
get_ready(launch_thread)
client_task_ids = len(clients) * [None]
for i, client in enumerate(clients):
task_name_out, client_task_ids[i], _ = controller.process_task_request(client, fl_ctx)
if task_name_out == "":
client_task_ids[i] = None
# in here we make up client results:
result = Shareable()
result["result"] = "result"
for client, client_task_id in zip(clients, client_task_ids):
if client_task_id is not None:
if task_complete == "normal":
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=result
)
if task_complete == "timeout":
time.sleep(timeout)
controller._check_tasks()
assert task.completion_status == TaskCompletionStatus.TIMEOUT
elif task_complete == "cancel":
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
controller._check_tasks()
assert task.props[task_name] == expected
assert controller.get_num_standing_tasks() == 0
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_cancel_task_before_send_cb(self, method):
def before_task_sent_cb(client_task: ClientTask, **kwargs):
client_task.task.completion_status = TaskCompletionStatus.CANCELLED
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task("__test_task", before_task_sent_cb=before_task_sent_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
assert task_name_out == ""
assert client_task_id == ""
launch_thread.join()
assert task.completion_status == TaskCompletionStatus.CANCELLED
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_cancel_task_result_received_cb(self, method):
# callback needs to have args name client_task and fl_ctx
def result_received_cb(client_task: ClientTask, **kwargs):
client_task.task.completion_status = TaskCompletionStatus.CANCELLED
controller, fl_ctx, clients = self.setup_system()
client1 = clients[0]
client2 = create_client(name="__another_client")
task = create_task("__test_task", result_received_cb=result_received_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client1, client2]},
},
)
get_ready(launch_thread)
task_name_out, client_task_id, data = controller.process_task_request(client1, fl_ctx)
result = Shareable()
result["__result"] = "__test_result"
controller.process_submission(
client=client1, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=result
)
assert task.last_client_task_map["__test_client0"].result == result
task_name_out, client_task_id, data = controller.process_task_request(client2, fl_ctx)
assert task_name_out == ""
assert client_task_id == ""
launch_thread.join()
assert task.completion_status == TaskCompletionStatus.CANCELLED
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("method2", ["broadcast", "send", "relay"])
def test_schedule_task_before_send_cb(self, method, method2):
# callback needs to have args name client_task and fl_ctx
def before_task_sent_cb(client_task: ClientTask, fl_ctx: FLContext):
inner_controller = ctx.get_prop(key="controller")
new_task = create_task("__new_test_task")
inner_launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": inner_controller,
"task": new_task,
"method": method2,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client_task.client]},
},
)
inner_launch_thread.start()
inner_launch_thread.join()
controller, ctx, clients = self.setup_system()
ctx.set_prop("controller", controller)
client = clients[0]
task = create_task("__test_task", before_task_sent_cb=before_task_sent_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": ctx,
"kwargs": {"targets": [client]},
},
)
launch_thread.start()
task_name_out = ""
while task_name_out == "":
task_name_out, _, _ = controller.process_task_request(client, ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
new_task_name_out = ""
while new_task_name_out == "":
new_task_name_out, _, _ = controller.process_task_request(client, ctx)
time.sleep(0.1)
assert new_task_name_out == "__new_test_task"
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("method2", ["broadcast", "send", "relay"])
def test_schedule_task_result_received_cb(self, method, method2):
# callback needs to have args name client_task and fl_ctx
def result_received_cb(client_task: ClientTask, fl_ctx: FLContext):
inner_controller = fl_ctx.get_prop(key="controller")
new_task = create_task("__new_test_task")
inner_launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": inner_controller,
"task": new_task,
"method": method2,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client_task.client]},
},
)
get_ready(inner_launch_thread)
inner_launch_thread.join()
controller, ctx, clients = self.setup_system()
ctx.set_prop("controller", controller)
client = clients[0]
task = create_task("__test_task", result_received_cb=result_received_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": ctx,
"kwargs": {"targets": [client]},
},
)
launch_thread.start()
task_name_out = ""
client_task_id = ""
data = None
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(client, ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, fl_ctx=ctx, result=data
)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 1
new_task_name_out = ""
while new_task_name_out == "":
new_task_name_out, _, _ = controller.process_task_request(client, ctx)
time.sleep(0.1)
assert new_task_name_out == "__new_test_task"
launch_thread.join()
self.teardown_system(controller, ctx)
def test_broadcast_schedule_task_in_result_received_cb(self):
num_of_clients = 100
controller, ctx, clients = self.setup_system(num_of_clients=num_of_clients)
# callback needs to have args name client_task and fl_ctx
def result_received_cb(client_task: ClientTask, fl_ctx: FLContext):
inner_controller = fl_ctx.get_prop(key="controller")
client = client_task.client
new_task = create_task(f"__new_test_task_{client.name}")
inner_launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": inner_controller,
"task": new_task,
"method": "broadcast",
"fl_ctx": fl_ctx,
"kwargs": {"targets": clients},
},
)
get_ready(inner_launch_thread)
inner_launch_thread.join()
ctx.set_prop("controller", controller)
task = create_task("__test_task", result_received_cb=result_received_cb)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": "broadcast",
"fl_ctx": ctx,
"kwargs": {"targets": clients},
},
)
launch_thread.start()
clients_pull_and_submit_result(controller=controller, ctx=ctx, clients=clients, task_name="__test_task")
controller._check_tasks()
assert controller.get_num_standing_tasks() == num_of_clients
for i in range(num_of_clients):
clients_pull_and_submit_result(
controller=controller, ctx=ctx, clients=clients, task_name=f"__new_test_task_{clients[i].name}"
)
controller._check_tasks()
assert controller.get_num_standing_tasks() == num_of_clients - (i + 1)
launch_thread.join()
self.teardown_system(controller, ctx)
class TestBasic(TestController):
@pytest.mark.parametrize("task_name,client_name", [["__test_task", "__test_client0"]])
def test_process_submission_invalid_task(self, task_name, client_name):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
with pytest.raises(RuntimeError, match=f"Unknown task: {task_name} from client {client_name}."):
controller.process_submission(
client=client, task_name=task_name, task_id=str(uuid.uuid4()), fl_ctx=FLContext(), result=Shareable()
)
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("num_client_requests", [1, 2, 3, 4])
def test_process_task_request_client_request_multiple_times(self, method, num_client_requests):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
for i in range(num_client_requests):
task_name_out, _, data = controller.process_task_request(client, fl_ctx)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map["__test_client0"].task_send_count == num_client_requests
controller.cancel_task(task)
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_process_submission(self, method):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task("__test_task")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
# in here we make up client results:
result = Shareable()
result["result"] = "result"
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=result
)
assert task.last_client_task_map["__test_client0"].result == result
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
@pytest.mark.parametrize("timeout", [1, 2])
def test_task_timeout(self, method, timeout):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task(name="__test_task", data=Shareable(), timeout=timeout)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
time.sleep(timeout + 1)
assert controller.get_num_standing_tasks() == 0
assert task.completion_status == TaskCompletionStatus.TIMEOUT
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_cancel_task(self, method):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task(name="__test_task")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
controller.cancel_task(task=task)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", TestController.ALL_APIS)
def test_cancel_all_tasks(self, method):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
task = create_task("__test_task")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread)
task1 = create_task("__test_task1")
launch_thread1 = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task1,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [client]},
},
)
get_ready(launch_thread1)
assert controller.get_num_standing_tasks() == 2
controller.cancel_all_tasks()
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
assert task.completion_status == TaskCompletionStatus.CANCELLED
assert task1.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("method", ["broadcast", "broadcast_and_wait"])
class TestBroadcastBehavior(TestController):
@pytest.mark.parametrize("num_of_clients", [1, 2, 3, 4])
def test_client_receive_only_one_task(self, method, num_of_clients):
controller, fl_ctx, clients = self.setup_system(num_of_clients=num_of_clients)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": None, "min_responses": num_of_clients},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
for client in clients:
task_name_out = ""
client_task_id = ""
data = None
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[client.name].task_send_count == 1
assert controller.get_num_standing_tasks() == 1
_, next_client_task_id, _ = controller.process_task_request(client, fl_ctx)
assert next_client_task_id == client_task_id
assert task.last_client_task_map[client.name].task_send_count == 2
result = Shareable()
result["result"] = "result"
controller.process_submission(
client=client,
task_name="__test_task",
task_id=client_task_id,
fl_ctx=fl_ctx,
result=result,
)
assert task.last_client_task_map[client.name].result == result
controller._check_tasks()
assert task.completion_status == TaskCompletionStatus.OK
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("num_of_clients", [1, 2, 3, 4])
def test_only_client_in_target_will_get_task(self, method, num_of_clients):
controller, fl_ctx, clients = self.setup_system(num_of_clients=num_of_clients)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [clients[0]], "min_responses": 0},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
task_name_out = ""
data = None
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(clients[0], fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[clients[0].name].task_send_count == 1
assert controller.get_num_standing_tasks() == 1
for client in clients[1:]:
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
assert task_name_out == ""
assert client_task_id == ""
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("min_responses", [1, 2, 3, 4])
def test_task_only_exit_when_min_responses_received(self, method, min_responses):
controller, fl_ctx, clients = self.setup_system(num_of_clients=min_responses)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": None, "min_responses": min_responses},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
client_task_ids = []
for client in clients:
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
client_task_ids.append(client_task_id)
assert task_name_out == "__test_task"
for client, client_task_id in zip(clients, client_task_ids):
result = Shareable()
controller._check_tasks()
assert controller.get_num_standing_tasks() == 1
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, result=result, fl_ctx=fl_ctx
)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
assert task.completion_status == TaskCompletionStatus.OK
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("min_responses", [1, 2, 3, 4])
@pytest.mark.parametrize("wait_time_after_min_received", [1, 2])
def test_task_exit_quickly_when_all_responses_received(self, method, min_responses, wait_time_after_min_received):
controller, fl_ctx, clients = self.setup_system(num_of_clients=min_responses)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {
"targets": None,
"min_responses": min_responses,
"wait_time_after_min_received": wait_time_after_min_received,
},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
client_task_ids = []
for client in clients:
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
client_task_ids.append(client_task_id)
assert task_name_out == "__test_task"
for client, client_task_id in zip(clients, client_task_ids):
result = Shareable()
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, result=result, fl_ctx=fl_ctx
)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
assert task.completion_status == TaskCompletionStatus.OK
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("num_clients", [1, 2, 3, 4])
def test_min_resp_is_zero_task_only_exit_when_all_client_task_done(self, method, num_clients):
controller, fl_ctx, clients = self.setup_system(num_clients)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {
"targets": None,
"min_responses": 0,
},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
client_task_ids = []
for client in clients:
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
client_task_ids.append(client_task_id)
assert task_name_out == "__test_task"
controller._check_tasks()
assert controller.get_num_standing_tasks() == 1
for client, client_task_id in zip(clients, client_task_ids):
controller._check_tasks()
assert controller.get_num_standing_tasks() == 1
result = Shareable()
controller.process_submission(
client=client, task_name="__test_task", task_id=client_task_id, result=result, fl_ctx=fl_ctx
)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
assert task.completion_status == TaskCompletionStatus.OK
launch_thread.join()
self.teardown_system(controller, fl_ctx)
def _process_task_request_test_cases():
"""Returns a list of
targets, request_client, dynamic_targets, task_assignment_timeout, time_before_first_request,
expected_to_get_task, expected_targets
"""
clients = [create_client(f"__test_client{i}") for i in range(3)]
client_names = [c.name for c in clients]
dynamic_targets_cases = [
(clients[1:], clients[0], True, 2, 0, False, [clients[1].name, clients[2].name, clients[0].name]),
(clients[1:], clients[1], True, 2, 0, True, client_names[1:]),
(clients[1:], clients[2], True, 2, 0, False, client_names[1:]),
([clients[0]], clients[1], True, 2, 0, False, [clients[0].name, clients[1].name]),
([clients[0]], clients[1], True, 1, 2, False, [clients[0].name]),
([clients[0], clients[0]], clients[0], True, 1, 0, True, [clients[0].name, clients[0].name]),
(None, clients[0], True, 1, 0, True, [clients[0].name]),
]
static_targets_cases = [
(clients[1:], clients[0], False, 2, 0, False, client_names[1:]),
(clients[1:], clients[1], False, 2, 0, True, client_names[1:]),
(clients[1:], clients[2], False, 2, 0, False, client_names[1:]),
(clients[1:], clients[0], False, 1, 1.5, False, client_names[1:]),
(clients[1:], clients[1], False, 1, 1.5, True, client_names[1:]),
(clients[1:], clients[2], False, 1, 1.5, True, client_names[1:]),
]
return dynamic_targets_cases + static_targets_cases
def _get_sequential_sequence_test_cases():
"""Returns a list of list of clients"""
clients = [create_client(f"__test_client{i}") for i in range(3)]
normal_cases = [list(x) for x in permutations(clients)]
duplicate_clients = [[clients[0], clients[0]], [clients[0], clients[1], clients[0], clients[2]]]
return normal_cases + duplicate_clients
def _get_order_with_task_assignment_timeout_test_cases():
"""Returns a list of
send_order, targets, task_assignment_timeout,
time_before_first_request, request_orders, expected_clients_to_get_task
Each item in request_orders is a request_order.
In reality, this request order should be random, because each client side does not sync with other clients.
"""
num_clients = 3
clients = [create_client(name=f"__test_client{i}") for i in range(num_clients)]
# these are just helpful orders
clients_120 = [clients[1], clients[2], clients[0]]
clients_201 = [clients[2], clients[0], clients[1]]
clients_210 = [clients[2], clients[1], clients[0]]
return [
[
SendOrder.SEQUENTIAL,
clients,
2,
1,
[clients, clients, clients],
clients,
],
[
SendOrder.SEQUENTIAL,
clients,
2,
1,
[clients_120, clients_120, clients_120],
clients,
],
[
SendOrder.SEQUENTIAL,
clients,
2,
1,
[clients_201, clients_201, clients_201],
clients,
],
[
SendOrder.SEQUENTIAL,
clients,
2,
3,
[clients, clients, clients],
clients,
],
[
SendOrder.SEQUENTIAL,
clients,
2,
3,
[clients_120, clients_120, clients_120],
[clients[1], clients[2], None],
],
[
SendOrder.SEQUENTIAL,
clients,
2,
3,
[clients_201, clients_201, clients_201],
clients,
],
[
SendOrder.SEQUENTIAL,
clients,
2,
3,
[clients_210, clients_210, clients_210],
[clients[1], clients[2], None],
],
[
SendOrder.SEQUENTIAL,
clients,
2,
3,
[clients_120, clients, clients_120],
[clients[1], clients[2], None],
],
[
SendOrder.SEQUENTIAL,
clients,
2,
5,
[clients, clients, clients],
clients,
],
[
SendOrder.SEQUENTIAL,
clients,
2,
5,
[clients_120, clients_120, clients_120],
[clients[1], clients[2], None],
],
[SendOrder.SEQUENTIAL, clients, 2, 5, [clients_201, clients_201, clients_201], [clients[2], None, None]],
[SendOrder.SEQUENTIAL, clients, 2, 5, [clients_201, clients, clients_120], [clients[2], None, None]],
[
SendOrder.SEQUENTIAL,
[clients[0], clients[1], clients[2], clients[1], clients[0], clients[0]],
2,
5,
[clients, clients, clients, clients, clients, clients],
[clients[0], clients[1], clients[2], clients[1], clients[0], clients[0]],
],
[
SendOrder.SEQUENTIAL,
[clients[0], clients[1], clients[2], clients[1], clients[0], clients[0]],
2,
5,
[clients_201, clients_201, clients_201, clients_201, clients_201, clients_201],
[clients[2], clients[1], clients[0], clients[0], None, None],
],
[
SendOrder.ANY,
clients,
2,
1,
[clients, clients, clients],
clients,
],
[
SendOrder.ANY,
clients,
2,
1,
[clients_120, clients_120, clients_120],
clients_120,
],
[
SendOrder.ANY,
clients,
2,
1,
[clients_201, clients_201, clients_201],
clients_201,
],
[
SendOrder.ANY,
clients,
2,
3,
[clients, clients, clients],
clients,
],
[
SendOrder.ANY,
clients,
2,
3,
[clients_120, clients_120, clients_120],
clients_120,
],
[
SendOrder.ANY,
clients,
2,
3,
[clients_201, clients_201, clients_201],
clients_201,
],
[
SendOrder.ANY,
clients,
2,
3,
[clients_210, clients_210, clients_210],
clients_210,
],
[
SendOrder.ANY,
clients,
2,
3,
[clients_120, clients, clients_120],
[clients[1], clients[0], clients[2]],
],
[
SendOrder.ANY,
clients,
2,
5,
[clients, clients, clients],
clients,
],
[
SendOrder.ANY,
clients,
2,
5,
[clients_120, clients_120, clients_120],
clients_120,
],
[
SendOrder.ANY,
clients,
2,
5,
[clients_201, clients_201, clients_201],
clients_201,
],
[
SendOrder.ANY,
clients,
2,
5,
[clients_201, clients, clients_120],
clients_201,
],
[
SendOrder.ANY,
[clients[0], clients[1], clients[2], clients[1], clients[0], clients[0]],
2,
5,
[clients, clients, clients, clients, clients, clients],
[clients[0], clients[0], clients[0], clients[1], clients[1], clients[2]],
],
[
SendOrder.ANY,
[clients[0], clients[1], clients[2], clients[1], clients[0], clients[0]],
2,
5,
[clients_201, clients_201, clients_201, clients_201, clients_201, clients_201],
[clients[2], clients[0], clients[0], clients[0], clients[1], clients[1]],
],
[
SendOrder.ANY,
[clients[0], clients[1], clients[2], clients[1], clients[0], clients[0]],
2,
5,
[clients_210, clients_210, clients_210, clients_210, clients_201, clients_201, clients],
[clients[2], clients[1], clients[1], clients[0], clients[0], clients[0], None],
],
]
@pytest.mark.parametrize("method", ["relay", "relay_and_wait"])
class TestRelayBehavior(TestController):
@pytest.mark.parametrize("send_order", [SendOrder.ANY, SendOrder.SEQUENTIAL])
def test_only_client_in_target_will_get_task(self, method, send_order):
controller, fl_ctx, clients = self.setup_system(4)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": [clients[0]], "send_order": send_order},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
task_name_out = ""
data = None
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(clients[0], fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[clients[0].name].task_send_count == 1
assert controller.get_num_standing_tasks() == 1
for client in clients[1:]:
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
assert task_name_out == ""
assert client_task_id == ""
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
def test_task_assignment_timeout_sequential_order_only_client_in_target_will_get_task(self, method):
task_assignment_timeout = 3
task_result_timeout = 3
controller, fl_ctx, clients = self.setup_system(4)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {
"targets": [clients[0]],
"send_order": SendOrder.SEQUENTIAL,
"task_assignment_timeout": task_assignment_timeout,
"task_result_timeout": task_result_timeout,
"dynamic_targets": False,
},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
time.sleep(task_assignment_timeout + 1)
for client in clients[1:]:
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
assert task_name_out == ""
assert client_task_id == ""
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize(
"targets,request_client,dynamic_targets,task_assignment_timeout,time_before_first_request,"
"expected_to_get_task,expected_targets",
_process_task_request_test_cases(),
)
def test_process_task_request(
self,
method,
targets,
request_client,
dynamic_targets,
task_assignment_timeout,
time_before_first_request,
expected_to_get_task,
expected_targets,
):
controller, fl_ctx, clients = self.setup_system()
task = create_task("__test_task")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {
"targets": targets,
"dynamic_targets": dynamic_targets,
"task_assignment_timeout": task_assignment_timeout,
},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
time.sleep(time_before_first_request)
task_name, task_id, data = controller.process_task_request(client=request_client, fl_ctx=fl_ctx)
client_get_a_task = True if task_name == "__test_task" else False
assert client_get_a_task == expected_to_get_task
assert task.targets == expected_targets
controller.cancel_task(task)
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("targets", _get_sequential_sequence_test_cases())
def test_sequential_sequence(self, method, targets):
controller, fl_ctx, clients = self.setup_system()
input_data = Shareable()
input_data["result"] = "start_"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": targets, "send_order": SendOrder.SEQUENTIAL},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
expected_client_index = 0
while controller.get_num_standing_tasks() != 0:
client_tasks_and_results = {}
for c in targets:
task_name, task_id, data = controller.process_task_request(client=c, fl_ctx=fl_ctx)
if task_name != "":
client_result = Shareable()
client_result["result"] = f"{c.name}"
if task_id not in client_tasks_and_results:
client_tasks_and_results[task_id] = (c, task_name, client_result)
assert c == targets[expected_client_index]
for task_id in client_tasks_and_results.keys():
c, task_name, client_result = client_tasks_and_results[task_id]
task.data["result"] += client_result["result"]
controller.process_submission(
client=c, task_name=task_name, task_id=task_id, result=client_result, fl_ctx=fl_ctx
)
assert task.last_client_task_map[c.name].result == client_result
expected_client_index += 1
launch_thread.join()
assert task.data["result"] == "start_" + "".join([c.name for c in targets])
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize(
"send_order,targets,task_assignment_timeout,time_before_first_request,request_orders,"
"expected_clients_to_get_task",
_get_order_with_task_assignment_timeout_test_cases(),
)
def test_process_request_and_submission_with_task_assignment_timeout(
self,
method,
send_order,
targets,
request_orders,
task_assignment_timeout,
time_before_first_request,
expected_clients_to_get_task,
):
controller, fl_ctx, clients = self.setup_system()
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {
"targets": targets,
"send_order": send_order,
"task_assignment_timeout": task_assignment_timeout,
},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
time.sleep(time_before_first_request)
for request_order, expected_client_to_get_task in zip(request_orders, expected_clients_to_get_task):
task_name_out = ""
client_task_id = ""
# processing task request
for client in request_order:
if expected_client_to_get_task and client.name == expected_client_to_get_task.name:
data = None
task_name_out = ""
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[client.name].task_send_count == 1
else:
_task_name_out, _client_task_id, _ = controller.process_task_request(client, fl_ctx)
assert _task_name_out == ""
assert _client_task_id == ""
# client side running some logic to generate result
if expected_client_to_get_task:
controller._check_tasks()
assert controller.get_num_standing_tasks() == 1
result = Shareable()
controller.process_submission(
client=expected_client_to_get_task,
task_name=task_name_out,
task_id=client_task_id,
fl_ctx=fl_ctx,
result=result,
)
launch_thread.join()
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("send_order", [SendOrder.ANY, SendOrder.SEQUENTIAL])
def test_process_submission_after_first_client_task_result_timeout(self, method, send_order):
task_assignment_timeout = 1
task_result_timeout = 2
controller, fl_ctx, clients = self.setup_system(2)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {
"targets": clients,
"send_order": send_order,
"task_assignment_timeout": task_assignment_timeout,
"task_result_timeout": task_result_timeout,
},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
# first client get a task
data = None
task_name_out = ""
old_client_task_id = ""
while task_name_out == "":
task_name_out, old_client_task_id, data = controller.process_task_request(clients[0], fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[clients[0].name].task_send_count == 1
time.sleep(task_result_timeout + 1)
# same client ask should get the same task
task_name_out, client_task_id, data = controller.process_task_request(clients[0], fl_ctx)
assert client_task_id == old_client_task_id
assert task.last_client_task_map[clients[0].name].task_send_count == 2
time.sleep(task_result_timeout + 1)
# second client ask should get a task since task_result_timeout passed
task_name_out, client_task_id_1, data = controller.process_task_request(clients[1], fl_ctx)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[clients[1].name].task_send_count == 1
# then we get back first client's result
result = Shareable()
controller.process_submission(
client=clients[0],
task_name=task_name_out,
task_id=client_task_id,
fl_ctx=fl_ctx,
result=result,
)
# need to make sure the header is set
assert result.get_header(ReservedHeaderKey.REPLY_IS_LATE)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 1
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("send_order", [SendOrder.ANY, SendOrder.SEQUENTIAL])
def test_process_submission_all_client_task_result_timeout(self, method, send_order):
task_assignment_timeout = 1
task_result_timeout = 2
controller, fl_ctx, clients = self.setup_system(2)
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {
"targets": clients,
"send_order": send_order,
"task_assignment_timeout": task_assignment_timeout,
"task_result_timeout": task_result_timeout,
},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
# each client get a client task then time out
for client in clients:
data = None
task_name_out = ""
while task_name_out == "":
task_name_out, old_client_task_id, data = controller.process_task_request(client, fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[client.name].task_send_count == 1
time.sleep(task_result_timeout + 1)
if send_order == SendOrder.SEQUENTIAL:
assert task.completion_status == TaskCompletionStatus.TIMEOUT
assert controller.get_num_standing_tasks() == 0
elif send_order == SendOrder.ANY:
assert controller.get_num_standing_tasks() == 1
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
def _assert_other_clients_get_no_task(controller, fl_ctx, client_idx: int, clients):
"""Assert clients get no task."""
assert client_idx < len(clients)
for i, client in enumerate(clients):
if i == client_idx:
continue
_task_name_out, _client_task_id, data = controller.process_task_request(client, fl_ctx)
assert _task_name_out == ""
assert _client_task_id == ""
def _get_process_task_request_test_cases():
"""Returns a lit of
targets, send_order, request_client_idx
"""
num_clients = 3
clients = [create_client(name=f"__test_client{i}") for i in range(num_clients)]
return [
[clients, SendOrder.ANY, 0],
[clients, SendOrder.ANY, 1],
[clients, SendOrder.ANY, 2],
[clients, SendOrder.SEQUENTIAL, 0],
]
def _get_process_task_request_with_task_assignment_timeout_test_cases():
"""Returns a list of
targets, send_order, task_assignment_timeout, time_before_first_request, request_order, expected_client_to_get_task
"""
num_clients = 3
clients = [create_client(name=f"__test_client{i}") for i in range(num_clients)]
clients_120 = [clients[1], clients[2], clients[0]]
clients_201 = [clients[2], clients[0], clients[1]]
return [
(clients, SendOrder.SEQUENTIAL, 2, 1, clients, clients[0].name),
(clients, SendOrder.SEQUENTIAL, 2, 1, clients_120, clients[0].name),
(clients, SendOrder.SEQUENTIAL, 2, 1, clients_201, clients[0].name),
(clients, SendOrder.SEQUENTIAL, 2, 3, clients, clients[0].name),
(clients, SendOrder.SEQUENTIAL, 2, 3, clients_120, clients[1].name),
(clients, SendOrder.SEQUENTIAL, 2, 3, clients_201, clients[0].name),
(clients, SendOrder.SEQUENTIAL, 2, 5, clients, clients[0].name),
(clients, SendOrder.SEQUENTIAL, 2, 5, clients_120, clients[1].name),
(clients, SendOrder.SEQUENTIAL, 2, 5, clients_201, clients[2].name),
(clients, SendOrder.SEQUENTIAL, 2, 3, [clients[2], clients[1], clients[0]], clients[1].name),
(clients, SendOrder.ANY, 2, 1, clients, clients[0].name),
(clients, SendOrder.ANY, 2, 1, clients_120, clients[1].name),
(clients, SendOrder.ANY, 2, 1, clients_201, clients[2].name),
]
@pytest.mark.parametrize("method", ["send", "send_and_wait"])
class TestSendBehavior(TestController):
@pytest.mark.parametrize("send_order", [SendOrder.ANY, SendOrder.SEQUENTIAL])
def test_process_task_request_client_not_in_target_get_nothing(self, method, send_order):
controller, fl_ctx, clients = self.setup_system()
client = clients[0]
targets = [create_client("__target_client")]
task = create_task("__test_task")
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": targets, "send_order": send_order},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
# this client not in target so should get nothing
_task_name_out, _client_task_id, data = controller.process_task_request(client, fl_ctx)
assert _task_name_out == ""
assert _client_task_id == ""
controller.cancel_task(task)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("targets,send_order,client_idx", _get_process_task_request_test_cases())
def test_process_task_request_expected_client_get_task_and_unexpected_clients_get_nothing(
self, method, targets, send_order, client_idx
):
controller, fl_ctx, clients = self.setup_system()
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": targets, "send_order": SendOrder.ANY},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
# first client
task_name_out = ""
data = None
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(targets[client_idx], fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[targets[client_idx].name].task_send_count == 1
# other clients
_assert_other_clients_get_no_task(controller=controller, fl_ctx=fl_ctx, client_idx=client_idx, clients=targets)
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize(
"targets,send_order,task_assignment_timeout,"
"time_before_first_request,request_order,expected_client_to_get_task",
_get_process_task_request_with_task_assignment_timeout_test_cases(),
)
def test_process_task_request_with_task_assignment_timeout_expected_client_get_task(
self,
method,
targets,
send_order,
task_assignment_timeout,
time_before_first_request,
request_order,
expected_client_to_get_task,
):
controller, fl_ctx, clients = self.setup_system()
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {
"targets": targets,
"send_order": send_order,
"task_assignment_timeout": task_assignment_timeout,
},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
time.sleep(time_before_first_request)
for client in request_order:
data = None
if client.name == expected_client_to_get_task:
task_name_out = ""
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[client.name].task_send_count == 1
else:
task_name_out, client_task_id, data = controller.process_task_request(client, fl_ctx)
assert task_name_out == ""
assert client_task_id == ""
controller.cancel_task(task)
assert task.completion_status == TaskCompletionStatus.CANCELLED
launch_thread.join()
self.teardown_system(controller, fl_ctx)
@pytest.mark.parametrize("num_of_clients", [1, 2, 3])
def test_send_only_one_task_and_exit_when_client_task_done(self, method, num_of_clients):
controller, fl_ctx, clients = self.setup_system()
input_data = Shareable()
input_data["hello"] = "world"
task = create_task("__test_task", data=input_data)
launch_thread = threading.Thread(
target=launch_task,
kwargs={
"controller": controller,
"task": task,
"method": method,
"fl_ctx": fl_ctx,
"kwargs": {"targets": clients, "send_order": SendOrder.SEQUENTIAL},
},
)
get_ready(launch_thread)
assert controller.get_num_standing_tasks() == 1
# first client
task_name_out = ""
client_task_id = ""
data = None
while task_name_out == "":
task_name_out, client_task_id, data = controller.process_task_request(clients[0], fl_ctx)
time.sleep(0.1)
assert task_name_out == "__test_task"
assert data == input_data
assert task.last_client_task_map[clients[0].name].task_send_count == 1
# once a client gets a task, other clients should not get task
_assert_other_clients_get_no_task(controller=controller, fl_ctx=fl_ctx, client_idx=0, clients=clients)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 1
controller.process_submission(
client=clients[0], task_name="__test_task", task_id=client_task_id, fl_ctx=fl_ctx, result=data
)
controller._check_tasks()
assert controller.get_num_standing_tasks() == 0
assert task.completion_status == TaskCompletionStatus.OK
launch_thread.join()
self.teardown_system(controller, fl_ctx)
| NVFlare-main | tests/unit_test/apis/impl/controller_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/apis/impl/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.apis.utils.format_check import name_check
class TestNameCheck:
@pytest.mark.parametrize("name, err_value", [["bad***", True], ["bad?!", True], ["bad{}", True], ["good", False]])
def test_org(self, name, err_value):
err, reason = name_check(name, "org")
assert err == err_value
@pytest.mark.parametrize(
"name, err_value",
[
["localhost", False],
["mylocalmachine", False],
["bad_name", True],
[" badname", True],
["bad_name.com", True],
["good-name.com", False],
],
)
def test_server(self, name, err_value):
err, reason = name_check(name, "server")
assert err == err_value
@pytest.mark.parametrize("name, err_value", [["*.-", True], ["good-name", False], ["good_name", False]])
def test_client(self, name, err_value):
err, reason = name_check(name, "client")
assert err == err_value
@pytest.mark.parametrize(
"name, err_value", [["bad_email*", True], ["bad_email", True], ["bad_email@", True], ["bad_email@123", True]]
)
def test_admin(self, name, err_value):
err, reason = name_check(name, "admin")
assert err == err_value
err, reason = name_check(name, "email")
assert err == err_value
| NVFlare-main | tests/unit_test/apis/utils/format_check_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/apis/utils/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import tempfile
from pathlib import Path
import pytest
from nvflare.apis.utils.job_utils import convert_legacy_zipped_app_to_job
from nvflare.fuel.utils.zip_utils import unzip_all_from_bytes, zip_directory_to_bytes
def create_fake_app(app_root: Path):
os.makedirs(app_root)
os.mkdir(app_root / "config")
open(app_root / "config" / "config_fed_server.json", "w").close()
open(app_root / "config" / "config_fed_client.json", "w").close()
os.mkdir(app_root / "custom")
open(app_root / "custom" / "c1.py", "w").close()
open(app_root / "custom" / "c2.py", "w").close()
def create_fake_job(temp_dir, job_name, app_name):
root_dir = Path(temp_dir) / job_name
os.makedirs(root_dir)
create_fake_app(root_dir / app_name)
with open(root_dir / "meta.json", "w") as f:
f.write("{}")
@pytest.fixture()
def create_fake_app_dir():
"""
app/
config/
config_fed_server.json
config_fed_client.json
custom/
c1.py
c2.py
expect result:
app/
app/
config/
config_fed_server.json
config_fed_client.json
custom/
c1.py
c2.py
meta.json
"""
temp_dir = tempfile.mkdtemp()
app_name = "app"
root_dir = Path(temp_dir) / app_name
create_fake_app(root_dir)
temp_dir2 = tempfile.mkdtemp()
create_fake_job(temp_dir2, app_name, app_name)
yield temp_dir, app_name, temp_dir2
shutil.rmtree(temp_dir)
shutil.rmtree(temp_dir2)
@pytest.fixture()
def create_fake_job_dir():
"""
fed_avg/
app/
config/
config_fed_server.json
config_fed_client.json
custom/
c1.py
c2.py
meta.json
"""
temp_dir = tempfile.mkdtemp()
job_name = "fed_avg"
app_name = "app"
create_fake_job(temp_dir, job_name, app_name)
yield temp_dir, job_name
shutil.rmtree(temp_dir)
class TestJobUtils:
def test_convert_legacy_zip_job(self, create_fake_job_dir):
tmp_dir, job_name = create_fake_job_dir
zip_data = zip_directory_to_bytes(root_dir=tmp_dir, folder_name=job_name)
new_bytes = convert_legacy_zipped_app_to_job(zip_data)
output_tmp_dir = tempfile.mkdtemp()
unzip_all_from_bytes(new_bytes, output_dir_name=output_tmp_dir)
# stays the same
for i, j in zip(os.walk(tmp_dir), os.walk(output_tmp_dir)):
assert i[1] == j[1]
assert i[2] == j[2]
shutil.rmtree(output_tmp_dir)
def test_convert_legacy_zip_app(self, create_fake_app_dir):
tmp_dir, app_name, tmp_dir_with_job = create_fake_app_dir
zip_data = zip_directory_to_bytes(root_dir=tmp_dir, folder_name=app_name)
new_bytes = convert_legacy_zipped_app_to_job(zip_data)
output_tmp_dir = tempfile.mkdtemp()
unzip_all_from_bytes(new_bytes, output_dir_name=output_tmp_dir)
for i, j in zip(os.walk(tmp_dir_with_job), os.walk(output_tmp_dir)):
assert i[1] == j[1]
assert i[2] == j[2]
shutil.rmtree(output_tmp_dir)
| NVFlare-main | tests/unit_test/apis/utils/job_utils_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from argparse import Namespace
from typing import Any
from nvflare.apis.analytix import AnalyticsDataType
from nvflare.apis.client import Client
from nvflare.apis.dxo import DXO, DataKind
from nvflare.apis.fl_constant import ReservedKey, ServerCommandKey
from nvflare.apis.fl_context import FLContext
from nvflare.apis.fl_snapshot import RunSnapshot
from nvflare.apis.shareable import Shareable
from nvflare.apis.signal import Signal
from nvflare.apis.utils.decomposers import flare_decomposers
from nvflare.fuel.utils import fobs
class TestFlareDecomposers:
ID1 = "abc"
ID2 = "xyz"
@classmethod
def setup_class(cls):
flare_decomposers.register()
def test_nested_shareable(self):
shareable = Shareable()
shareable[ReservedKey.TASK_ID] = TestFlareDecomposers.ID1
command_shareable = Shareable()
command_shareable[ReservedKey.TASK_ID] = TestFlareDecomposers.ID2
command_shareable.set_header(ServerCommandKey.SHAREABLE, shareable)
new_command_shareable = self._run_fobs(command_shareable)
assert new_command_shareable[ReservedKey.TASK_ID] == TestFlareDecomposers.ID2
new_shareable = new_command_shareable.get_header(ServerCommandKey.SHAREABLE)
assert new_shareable[ReservedKey.TASK_ID] == TestFlareDecomposers.ID1
def test_fl_context(self):
context = FLContext()
context.set_prop("A", "test")
context.set_prop("B", 123)
new_context = self._run_fobs(context)
assert new_context.get_prop("A") == context.get_prop("A")
assert new_context.get_prop("B") == context.get_prop("B")
def test_dxo(self):
dxo = DXO(DataKind.WEIGHTS, {"A": 123})
dxo.set_meta_prop("B", "test")
new_dxo = self._run_fobs(dxo)
assert new_dxo.data_kind == DataKind.WEIGHTS
assert new_dxo.get_meta_prop("B") == "test"
def test_client(self):
client = Client("Name", "Token")
client.set_prop("A", "test")
new_client = self._run_fobs(client)
assert new_client.name == client.name
assert new_client.token == client.token
assert new_client.get_prop("A") == client.get_prop("A")
def test_run_snapshot(self):
snapshot = RunSnapshot("Job-ID")
snapshot.set_component_snapshot("comp_id", {"A": 123})
new_snapshot = self._run_fobs(snapshot)
assert new_snapshot.job_id == snapshot.job_id
assert new_snapshot.get_component_snapshot("comp_id") == snapshot.get_component_snapshot("comp_id")
def test_signal(self):
signal = Signal()
signal.trigger("test")
new_signal = self._run_fobs(signal)
assert new_signal.value == signal.value
assert new_signal.trigger_time == signal.trigger_time
assert new_signal.triggered == signal.triggered
# The decomposer for the enum is auto-registered
def test_analytics_data_type(self):
adt = AnalyticsDataType.SCALARS
new_adt = self._run_fobs(adt)
assert new_adt == adt
def test_namespace(self):
ns = Namespace(a="foo", b=123)
new_ns = self._run_fobs(ns)
assert new_ns.a == ns.a
assert new_ns.b == ns.b
@staticmethod
def _run_fobs(data: Any) -> Any:
buf = fobs.dumps(data)
return fobs.loads(buf)
| NVFlare-main | tests/unit_test/apis/utils/decomposers/flare_decomposers_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/apis/utils/decomposers/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pathlib import Path
from unittest.mock import patch
import pytest
from pyhocon import ConfigFactory as CF
from nvflare.utils.cli_utils import (
append_if_not_in_list,
create_startup_kit_config,
get_hidden_nvflare_config_path,
get_hidden_nvflare_dir,
)
class TestCLIUtils:
def test_get_hidden_nvflare_dir(self):
hidden_dir = get_hidden_nvflare_dir()
assert str(hidden_dir) == str(Path.home() / ".nvflare")
def test_get_hidden_nvflare_config_path(self):
assert get_hidden_nvflare_config_path(str(get_hidden_nvflare_dir())) == str(
Path.home() / ".nvflare/config.conf"
)
def test_create_startup_kit_config(self):
with patch("nvflare.utils.cli_utils.check_startup_dir", side_effect=None) as mock:
mock.return_value = ""
with patch("os.path.isdir", side_effect=None) as mock1:
mock1.return_value = True
prev_conf = CF.parse_string(
"""
poc_workspace {
path = "/tmp/nvflare/poc"
}
"""
)
config = create_startup_kit_config(
nvflare_config=prev_conf, startup_kit_dir="/tmp/nvflare/poc/example_project/prod_00"
)
assert "/tmp/nvflare/poc" == config.get("poc_workspace.path")
assert "/tmp/nvflare/poc/example_project/prod_00" == config.get("startup_kit.path")
config = create_startup_kit_config(nvflare_config=prev_conf, startup_kit_dir="")
assert config.get("startup_kit.path", None) is None
@pytest.mark.parametrize(
"inputs, result", [(([], "a"), ["a"]), ((["a"], "a"), ["a"]), ((["a", "b"], "b"), ["a", "b"])]
)
def test_append_if_not_in_list(self, inputs, result):
arr, item = inputs
assert result == append_if_not_in_list(arr, item)
| NVFlare-main | tests/unit_test/utils/cli_utils_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvflare.utils.decorators import collect_time, measure_time
class MyClass:
def __init__(self):
super().__init__()
@measure_time
def method1(self):
# Some code here
pass
@collect_time
def method2(self) -> dict:
self.method1()
pass
def total(self):
for i in range(1000):
self.method2()
print(" total time (ms) took = ", self.method2.time_taken)
print(" total count took = ", self.method2.count)
@collect_time
def method3(self, x: dict):
self.method1()
pass
class TestDecorators:
def test_code_timer_on_fn(self):
@measure_time
def fn1(x: int, *, a: int, b: int, c: str):
pass
a1 = fn1(100, a=1, b=2, c="three")
print(fn1.time_taken)
assert fn1.time_taken > 0
def test_code_timer_on_class_fn(self):
c = MyClass()
c.total()
c.method1()
assert c.method1.time_taken > 0
c.method2()
assert c.method2.time_taken > 0
c.method2(reset=True)
assert c.method2.time_taken == 0
assert c.method2.count == 0
for i in range(100):
c.method3(c.method2())
assert c.method2.time_taken > 0
assert c.method3.time_taken > 0
assert c.method2.count == 100
assert c.method3.count == 100
| NVFlare-main | tests/unit_test/utils/decorator_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/utils/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
CLIENT1 = {"name": "site-1", "organization": "test.com", "capacity": {"num_gpus": 16, "mem_per_gpu_in_GiB": 64}}
CLIENT2 = {"name": "site-2", "organization": "example.com", "capacity": {"num_gpus": 4, "mem_per_gpu_in_GiB": 32}}
NEW_ORG = "company.com"
class TestClients:
@pytest.fixture(scope="session")
def client_ids(self, auth_header, client):
response1 = client.post("/api/v1/clients", json=CLIENT1, headers=auth_header)
assert response1.status_code == 201
response2 = client.post("/api/v1/clients", json=CLIENT2, headers=auth_header)
assert response2.status_code == 201
return [response1.json["client"]["id"], response2.json["client"]["id"]]
def test_create_clients(self, client_ids):
# The fixture test the call already
assert len(client_ids) == 2
def test_get_all_clients(self, client, client_ids, auth_header):
response = client.get("/api/v1/clients", headers=auth_header)
assert response.status_code == 200
assert len(response.json["client_list"]) == len(client_ids)
def test_get_one_client(self, client, client_ids, auth_header):
client_id = client_ids[0]
response = client.get("/api/v1/clients/" + str(client_id), headers=auth_header)
assert response.status_code == 200
assert response.json["client"]["id"] == client_id
assert response.json["client"]["name"] == CLIENT1["name"]
def test_update_client(self, client, client_ids, auth_header):
client_id = client_ids[0]
response = client.patch(
"/api/v1/clients/" + str(client_id), json={"organization": NEW_ORG}, headers=auth_header
)
assert response.status_code == 200
# Retrieve through API again
response = client.get("/api/v1/clients/" + str(client_id), headers=auth_header)
assert response.status_code == 200
assert response.json["client"]["organization"] == NEW_ORG
| NVFlare-main | tests/unit_test/dashboard/clients_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import tempfile
import pytest
from nvflare.dashboard.application import init_app
TEST_USER = "[email protected]"
TEST_PW = "testing1234"
@pytest.fixture(scope="session")
def app():
web_root = tempfile.mkdtemp(prefix="nvflare-")
sqlite_file = os.path.join(web_root, "db.sqlite")
if os.path.exists(sqlite_file):
os.remove(sqlite_file)
os.environ["DATABASE_URL"] = f"sqlite:///{sqlite_file}"
os.environ["NVFL_CREDENTIAL"] = f"{TEST_USER}:{TEST_PW}"
app = init_app()
app.config.update(
{
"TESTING": True,
"ENV": "prod", # To get rid of the performance warning
}
)
yield app
# Cleanup
shutil.rmtree(web_root, ignore_errors=True)
@pytest.fixture(scope="session")
def client(app):
return app.test_client()
@pytest.fixture(scope="session")
def access_token(client):
response = client.post("/api/v1/login", json={"email": TEST_USER, "password": TEST_PW})
assert response.status_code == 200
return response.json["access_token"]
@pytest.fixture(scope="session")
def auth_header(access_token):
return {"Authorization": "Bearer " + access_token}
| NVFlare-main | tests/unit_test/dashboard/conftest.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
USER_NAME = "Test User"
class TestUsers:
@pytest.fixture(scope="session")
def first_user_id(self, auth_header, client):
response = client.get("/api/v1/users", headers=auth_header)
assert response.status_code == 200
user_list = response.json["user_list"]
assert len(user_list) >= 1
return user_list[0]["id"]
def test_get_all_users(self, first_user_id):
# get_all_users is tested by user_id
assert first_user_id
def test_get_one_user(self, auth_header, client, first_user_id):
user = self._get_one_user(auth_header, client, first_user_id)
assert user["id"] == first_user_id
def test_update_user(self, auth_header, client, first_user_id):
user = {"name": USER_NAME}
response = client.patch("/api/v1/users/" + str(first_user_id), json=user, headers=auth_header)
assert response.status_code == 200
new_user = self._get_one_user(auth_header, client, first_user_id)
assert new_user["name"] == USER_NAME
def test_create_and_delete(self, auth_header, client):
test_user = {
"name": USER_NAME,
"email": "[email protected]",
"password": "pw123456",
"organization": "test.com",
"role": "org_admin",
"approval_state": 200,
}
response = client.post("/api/v1/users", json=test_user, headers=auth_header)
assert response.status_code == 201
new_id = response.json["user"]["id"]
response = client.delete("/api/v1/users/" + str(new_id), headers=auth_header)
assert response.status_code == 200
# Make sure user is deleted
response = client.get("/api/v1/users/" + str(new_id), headers=auth_header)
assert response.status_code == 200
# The API returns empty dict for non-existent user
assert len(response.json["user"]) == 0
def _get_one_user(self, auth_header, client, user_id):
response = client.get("/api/v1/users/" + str(user_id), headers=auth_header)
assert response.status_code == 200
return response.json["user"]
| NVFlare-main | tests/unit_test/dashboard/users_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/dashboard/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class TestProject:
def test_login(self, access_token):
# login is already tested if access_token is not empty
assert access_token
def test_get_project(self, client, auth_header):
response = client.get("/api/v1/project", headers=auth_header)
assert response.status_code == 200
assert response.json["project"]
def test_get_orgs(self, client, auth_header):
response = client.get("/api/v1/organizations", headers=auth_header)
assert response.status_code == 200
assert response.json["client_list"]
| NVFlare-main | tests/unit_test/dashboard/project_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/private/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/private/fed/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import shutil
import tempfile
import unittest
from unittest.mock import patch
import pytest
from nvflare.apis.fl_constant import WorkspaceConstants
from nvflare.fuel.hci.server.authz import AuthorizationService
from nvflare.fuel.sec.audit import AuditService
from nvflare.private.fed.app.deployer.simulator_deployer import SimulatorDeployer
from nvflare.private.fed.app.simulator.simulator import define_simulator_parser
from nvflare.private.fed.client.fed_client import FederatedClient
from nvflare.private.fed.simulator.simulator_server import SimulatorServer
from nvflare.security.security import EmptyAuthorizer
@pytest.mark.xdist_group(name="simulator_deploy")
class TestSimulatorDeploy(unittest.TestCase):
def setUp(self) -> None:
self.deployer = SimulatorDeployer()
AuthorizationService.initialize(EmptyAuthorizer())
AuditService.initialize(audit_file_name=WorkspaceConstants.AUDIT_LOG)
def tearDown(self) -> None:
self.deployer.close()
def _create_parser(self):
parser = argparse.ArgumentParser()
define_simulator_parser(parser)
parser.add_argument("--set", metavar="KEY=VALUE", nargs="*")
return parser
def test_create_server(self):
with patch("nvflare.private.fed.app.utils.FedAdminServer") as mock_admin:
workspace = tempfile.mkdtemp()
parser = self._create_parser()
args = parser.parse_args(["job_folder", "-w" + workspace, "-n 2", "-t 1"])
_, server = self.deployer.create_fl_server(args)
assert isinstance(server, SimulatorServer)
server.cell.stop()
shutil.rmtree(workspace)
@patch("nvflare.private.fed.client.fed_client.FederatedClient.register")
# @patch("nvflare.private.fed.app.deployer.simulator_deployer.FederatedClient.start_heartbeat")
# @patch("nvflare.private.fed.app.deployer.simulator_deployer.FedAdminAgent")
def test_create_client(self, mock_register):
workspace = tempfile.mkdtemp()
parser = self._create_parser()
args = parser.parse_args(["job_folder", "-w" + workspace, "-n 2", "-t 1"])
client, _, _, _ = self.deployer.create_fl_client("client0", args)
assert isinstance(client, FederatedClient)
client.cell.stop()
shutil.rmtree(workspace)
| NVFlare-main | tests/unit_test/private/fed/app/deployer/simulator_deployer_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import tempfile
from unittest.mock import patch
import pytest
from nvflare.private.fed.app.simulator.simulator_runner import SimulatorRunner
from nvflare.private.fed.utils.fed_utils import split_gpus
class TestSimulatorRunner:
def setup_method(self) -> None:
self.workspace = tempfile.mkdtemp()
def teardown_method(self) -> None:
shutil.rmtree(self.workspace)
@patch("nvflare.private.fed.app.deployer.simulator_deployer.SimulatorServer.deploy")
@patch("nvflare.private.fed.app.utils.FedAdminServer")
@patch("nvflare.private.fed.client.fed_client.FederatedClient.register")
# @patch("nvflare.private.fed.app.deployer.simulator_deployer.FederatedClient.start_heartbeat")
# @patch("nvflare.private.fed.app.deployer.simulator_deployer.FedAdminAgent")
def test_valid_job_simulate_setup(self, mock_server, mock_admin, mock_register):
workspace = tempfile.mkdtemp()
job_folder = os.path.join(os.path.dirname(__file__), "../../../../data/jobs/valid_job")
runner = SimulatorRunner(job_folder=job_folder, workspace=workspace, threads=1)
assert runner.setup()
expected_clients = ["site-1", "site-2"]
client_names = []
for client in runner.client_names:
client_names.append(client)
assert sorted(client_names) == sorted(expected_clients)
@patch("nvflare.private.fed.app.deployer.simulator_deployer.SimulatorServer.deploy")
@patch("nvflare.private.fed.app.utils.FedAdminServer")
@patch("nvflare.private.fed.client.fed_client.FederatedClient.register")
# @patch("nvflare.private.fed.app.deployer.simulator_deployer.FederatedClient.start_heartbeat")
# @patch("nvflare.private.fed.app.deployer.simulator_deployer.FedAdminAgent")
def test_client_names_setup(self, mock_server, mock_admin, mock_register):
workspace = tempfile.mkdtemp()
job_folder = os.path.join(os.path.dirname(__file__), "../../../../data/jobs/valid_job")
runner = SimulatorRunner(job_folder=job_folder, workspace=workspace, clients="site-1", threads=1)
assert runner.setup()
expected_clients = ["site-1"]
client_names = []
for client in runner.client_names:
client_names.append(client)
assert sorted(client_names) == sorted(expected_clients)
@patch("nvflare.private.fed.app.deployer.simulator_deployer.SimulatorServer.deploy")
@patch("nvflare.private.fed.app.utils.FedAdminServer")
@patch("nvflare.private.fed.client.fed_client.FederatedClient.register")
# @patch("nvflare.private.fed.app.deployer.simulator_deployer.FederatedClient.start_heartbeat")
# @patch("nvflare.private.fed.app.deployer.simulator_deployer.FedAdminAgent")
def test_no_app_for_client(self, mock_server, mock_admin, mock_register):
workspace = tempfile.mkdtemp()
job_folder = os.path.join(os.path.dirname(__file__), "../../../../data/jobs/valid_job")
runner = SimulatorRunner(job_folder=job_folder, workspace=workspace, n_clients=3, threads=1)
assert not runner.setup()
@pytest.mark.parametrize(
"client_names, gpus, expected_split_names",
[
(["1", "2", "3", "4"], ["0", "1"], [["1", "3"], ["2", "4"]]),
(["1", "2", "3", "4", "5"], ["0", "1"], [["1", "3", "5"], ["2", "4"]]),
(["1", "2", "3", "4", "5"], ["0", "1", "2"], [["1", "4"], ["2", "5"], ["3"]]),
],
)
def test_split_names(self, client_names, gpus, expected_split_names):
runner = SimulatorRunner(job_folder="", workspace="")
split_names = runner.split_clients(client_names, gpus)
assert sorted(split_names) == sorted(expected_split_names)
@pytest.mark.parametrize(
"gpus, expected_gpus",
[
("[0,1],[1, 2]", ["0,1", "1,2"]),
("[0,1],[3]", ["0,1", "3"]),
("[0,1],[ 3 ]", ["0,1", "3"]),
("[02,1],[ a ]", ["02,1", "a"]),
("[]", [""]),
("[0,1],3", ["0,1", "3"]),
("[0,1],[1,2,3],3", ["0,1", "1,2,3", "3"]),
("0,1,2", ["0", "1", "2"]),
],
)
def test_split_gpus_success(self, gpus, expected_gpus):
splitted_gpus = split_gpus(gpus)
assert splitted_gpus == expected_gpus
@pytest.mark.parametrize(
"gpus",
[
"[0,1],3]",
"0,1,[2",
"[0,1]extra",
"[1, [2, 3], 4]",
],
)
def test_split_gpus_fail(self, gpus):
with pytest.raises(ValueError):
split_gpus(gpus)
| NVFlare-main | tests/unit_test/private/fed/app/simulator/simulator_runner_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import MagicMock, patch
import pytest
from nvflare.private.defs import CellMessageHeaderKeys, new_cell_message
from nvflare.private.fed.server.fed_server import FederatedServer
from nvflare.private.fed.server.server_state import ColdState, HotState
class TestFederatedServer:
@pytest.mark.parametrize("server_state, expected", [(HotState(), ["extra_job"]), (ColdState(), [])])
def test_heart_beat_abort_jobs(self, server_state, expected):
with patch("nvflare.private.fed.server.fed_server.ServerEngine") as mock_engine:
server = FederatedServer(
project_name="project_name",
min_num_clients=1,
max_num_clients=100,
cmd_modules=None,
heart_beat_timeout=600,
args=MagicMock(),
secure_train=False,
snapshot_persistor=MagicMock(),
overseer_agent=MagicMock(),
)
server.server_state = server_state
request = new_cell_message(
{
CellMessageHeaderKeys.TOKEN: "token",
CellMessageHeaderKeys.SSID: "ssid",
CellMessageHeaderKeys.CLIENT_NAME: "client_name",
CellMessageHeaderKeys.PROJECT_NAME: "task_name",
CellMessageHeaderKeys.JOB_IDS: ["extra_job"],
}
)
result = server.client_heartbeat(request)
assert result.get_header(CellMessageHeaderKeys.ABORT_JOBS, []) == expected
| NVFlare-main | tests/unit_test/private/fed/server/fed_server_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/private/fed/server/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from argparse import Namespace
from typing import List
import pytest
from nvflare.private.fed.server.job_cmds import _create_list_job_cmd_parser
TEST_CASES = [
(
["-d", "-u", "12345", "-n", "hello_", "-m", "3"],
Namespace(u=True, d=True, job_id="12345", m=3, n="hello_", r=False),
),
(
["12345", "-d", "-u", "-n", "hello_", "-m", "3"],
Namespace(u=True, d=True, job_id="12345", m=3, n="hello_", r=False),
),
(["-d", "-u", "-n", "hello_", "-m", "3"], Namespace(u=True, d=True, job_id=None, m=3, n="hello_", r=False)),
(["-u", "-n", "hello_", "-m", "5"], Namespace(u=True, d=False, job_id=None, m=5, n="hello_", r=False)),
(["-u"], Namespace(u=True, d=False, job_id=None, m=None, n=None, r=False)),
(["-r"], Namespace(u=False, d=False, job_id=None, m=None, n=None, r=True)),
(["nvflare"], Namespace(u=False, d=False, job_id="nvflare", m=None, n=None, r=False)),
]
class TestListJobCmdParser:
@pytest.mark.parametrize("args, expected_args", TEST_CASES)
def test_parse_args(self, args: List[str], expected_args):
parser = _create_list_job_cmd_parser()
parsed_args = parser.parse_args(args)
assert parsed_args == expected_args
| NVFlare-main | tests/unit_test/private/fed/server/job_cmds_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
import sys
import zipfile
from zipfile import ZipFile
import pytest
from nvflare.apis.fl_constant import JobConstants
from nvflare.apis.utils.job_utils import convert_legacy_zipped_app_to_job
from nvflare.fuel.utils.zip_utils import get_all_file_paths, normpath_for_zip, split_path
from nvflare.private.fed.server.job_meta_validator import JobMetaValidator
def _zip_directory_with_meta(root_dir: str, folder_name: str, meta: str, writer: io.BytesIO):
dir_name = normpath_for_zip(os.path.join(root_dir, folder_name))
assert os.path.exists(dir_name), 'directory "{}" does not exist'.format(dir_name)
assert os.path.isdir(dir_name), '"{}" is not a valid directory'.format(dir_name)
file_paths = get_all_file_paths(dir_name)
if folder_name:
prefix_len = len(split_path(dir_name)[0]) + 1
else:
prefix_len = len(dir_name) + 1
with ZipFile(writer, "w", compression=zipfile.ZIP_DEFLATED) as z:
# writing each file one by one
for full_path in file_paths:
rel_path = full_path[prefix_len:]
if len(meta) > 0 and rel_path.endswith(JobConstants.META_FILE):
z.writestr(rel_path, meta)
else:
z.write(full_path, arcname=rel_path)
def _zip_job_with_meta(folder_name: str, meta: str) -> bytes:
job_path = os.path.join(os.path.dirname(__file__), "../../../data/jobs")
bio = io.BytesIO()
_zip_directory_with_meta(job_path, folder_name, meta, bio)
zip_data = bio.getvalue()
return convert_legacy_zipped_app_to_job(zip_data)
META_WITH_VALID_DEPLOY_MAP = [
pytest.param({"deploy_map": {"app1": ["@ALL"]}}, id="all"),
pytest.param({"deploy_map": {"app1": ["@ALL"], "app2": []}}, id="all_idle"),
pytest.param({"deploy_map": {"app1": ["server", "site-1", "site-2"], "app2": []}}, id="idle_app"),
pytest.param({"deploy_map": {"app1": ["server", "site-1", "site-2"]}}, id="one_app"),
pytest.param({"deploy_map": {"app1": ["server", "site-1"], "app2": ["site-2"]}}, id="two_app"),
]
META_WITH_INVALID_DEPLOY_MAP = [
pytest.param({"deploy_map": {"app1": ["@ALL", "server"]}}, id="all_other"),
pytest.param({"deploy_map": {"app1": ["@ALL"], "app2": ["@all"]}}, id="dup_all"),
pytest.param({"deploy_map": {"app1": ["server", "site-1", "site-2"], "app2": ["site-2"]}}, id="dup_client"),
pytest.param({"deploy_map": {"app1": ["server", "site-1"], "app2": ["server", "site-2"]}}, id="dup_server"),
pytest.param({"deploy_map": {}}, id="empty_deploy_map"),
pytest.param({"deploy_map": {"app1": []}}, id="no_deployment"),
pytest.param({"deploy_map": {"app1": [], "app2": []}}, id="no_deployment_two_apps"),
]
VALID_JOBS = [
pytest.param("valid_job", id="valid_job"),
pytest.param("valid_job_deployment_all_idle", id="valid_job_deployment_all_idle"),
pytest.param("valid_app_as_job", id="valid_app_wo_meta"),
]
INVALID_JOBS = [
pytest.param("duplicate_clients", id="duplicate_clients"),
pytest.param("duplicate_server", id="duplicate_server"),
pytest.param("invalid_resource_spec_data_type", id="invalid_resource_spec_data_type"),
pytest.param("mandatory_not_met", id="mandatory_not_met"),
pytest.param("missing_app", id="missing_app"),
pytest.param("missing_client_config", id="missing_client_config"),
pytest.param("missing_server_config", id="missing_server_config"),
pytest.param("missing_server_in_deployment", id="missing_server_in_deploy_map"),
pytest.param("no_deployment", id="no_deployment"),
pytest.param("not_enough_clients", id="not_enough_clients"),
]
class TestJobMetaValidator:
@classmethod
def setup_class(cls):
cls.validator = JobMetaValidator()
@pytest.mark.parametrize("meta", META_WITH_VALID_DEPLOY_MAP)
def test_validate_valid_deploy_map(self, meta):
site_list = JobMetaValidator._validate_deploy_map("unit_test", meta)
assert site_list
@pytest.mark.parametrize("meta", META_WITH_INVALID_DEPLOY_MAP)
def test_validate_invalid_deploy_map(self, meta):
with pytest.raises(ValueError):
JobMetaValidator._validate_deploy_map("unit_test", meta)
@pytest.mark.parametrize("job_name", VALID_JOBS)
def test_validate_valid_jobs(self, job_name):
self._assert_valid(job_name)
@pytest.mark.parametrize("job_name", INVALID_JOBS)
def test_validate_invalid_jobs(self, job_name):
self._assert_invalid(job_name)
@pytest.mark.parametrize(
"min_clients",
[
pytest.param(-1, id="negative value"),
pytest.param(0, id="zero value"),
pytest.param(sys.maxsize + 1, id="sys.maxsize + 1 value"),
],
)
def test_invalid_min_clients_value_range(self, min_clients):
job_name = "min_clients_value_range"
meta = f"""
{{
"name": "sag",
"resource_spec": {{ "site-a": {{"gpu": 1}}, "site-b": {{"gpu": 1}} }},
"deploy_map": {{"min_clients_value_range": ["server","site-a", "site-b"]}},
"min_clients" : {min_clients}
}}
"""
self._assert_invalid(job_name, meta)
def test_deploy_map_config_non_exists_app(self):
job_name = "valid_job"
# valid_job folder contains sag app, not hello-pt app
meta = """
{
"resource_spec": { "site-a": {"gpu": 1}, "site-b": {"gpu": 1}},
"deploy_map": {"hello-pt": ["server","site-a", "site-b"]}
}
"""
self._assert_invalid(job_name, meta)
def test_meta_missing_job_folder_name(self):
job_name = "valid_job"
meta = """
{
"resource_spec": { "site-a": {"gpu": 1}, "site-b": {"gpu": 1}},
"deploy_map": {"sag": ["server","site-a", "site-b"]}
}
"""
self._assert_valid(job_name, meta)
def _assert_valid(self, job_name: str, meta: str = ""):
data = _zip_job_with_meta(job_name, meta)
valid, error, meta = self.validator.validate(job_name, data)
assert valid
assert error == ""
def _assert_invalid(self, job_name: str, meta: str = ""):
data = _zip_job_with_meta(job_name, meta)
valid, error, meta = self.validator.validate(job_name, data)
assert not valid
assert error
| NVFlare-main | tests/unit_test/private/fed/server/job_meta_validator_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import random
import pandas as pd
from nvflare.app_common.abstract.statistics_spec import Bin, DataType, Histogram, HistogramType
from nvflare.app_common.statistics.numpy_utils import dtype_to_data_type
from nvflare.app_common.utils.json_utils import ObjectEncoder
class TestStatsDef:
def test_dtype_to_data_type(self):
train_data = [
["tom", 10, 15.5],
["nick", 15, 10.2],
["juli", 14],
["tom2", 10, 13.0],
["nick1", 25],
["juli1", 24, 10.5],
]
train = pd.DataFrame(train_data, columns=["Name", "Age", "Edu"])
assert DataType.STRING == dtype_to_data_type(train["Name"].dtype)
assert DataType.INT == dtype_to_data_type(train["Age"].dtype)
assert DataType.FLOAT == dtype_to_data_type(train["Edu"].dtype)
def test_feature_histogram_to_json(self):
even = [1, 3, 5, 7, 9]
odd = [2, 4, 6, 8, 10]
buckets = zip(even, odd)
bins = [Bin(low_value=b[0], high_value=b[1], sample_count=random.randint(10, 100)) for b in buckets]
hist = Histogram(HistogramType.STANDARD, bins)
statistics = {"histogram": {"site-1": {"train": {"feat": hist}}}}
x = json.dumps(statistics, cls=ObjectEncoder)
assert x.__eq__(
{
"histogram": {
"site-1": {
"train": {"feat": [0, [[1, 2, 83], [3, 4, 79], [5, 6, 69], [7, 8, 72], [9, 10, 20]], "null"]}
}
}
}
)
| NVFlare-main | tests/unit_test/app_common/statistics/stats_def_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/statistics/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import random
from typing import List
from nvflare.app_common.abstract.statistics_spec import (
Bin,
BinRange,
DataType,
Feature,
Histogram,
HistogramType,
StatisticConfig,
)
from nvflare.app_common.app_constant import StatisticsConstants
from nvflare.app_common.statistics.statisitcs_objects_decomposer import (
BinDecomposer,
BinRangeDecomposer,
FeatureDecomposer,
HistogramDecomposer,
StatisticConfigDecomposer,
)
from nvflare.fuel.utils import fobs
class TestStatisticConfigDecomposer:
@classmethod
def setup_class(cls):
pass
@classmethod
def teardown_class(cls):
pass
def test_statistic_configs_serde(self):
fobs.register(StatisticConfigDecomposer)
data = fobs.dumps(StatisticConfig("foo", {}))
obj: StatisticConfig = fobs.loads(data)
assert isinstance(obj, StatisticConfig)
assert obj.config == {}
assert obj.name == "foo"
def test_statistic_configs_serde2(self):
config = """
{
"count": {},
"mean": {},
"sum": {},
"stddev": {},
"histogram": {
"*": {"bins": 20},
"Age": { "bins": 10, "range": [0,120] }
}
}
"""
config_dict = json.loads(config)
statistic_configs = []
for k in config_dict:
statistic_configs.append([k, config_dict[k]])
data = fobs.dumps(statistic_configs)
obj = fobs.loads(data)
assert isinstance(obj, List)
for o in obj:
assert isinstance(o, list)
print(o)
assert o[0] in config_dict.keys()
def test_statistic_configs_serde3(self):
fobs.register(StatisticConfigDecomposer)
config = """
{
"count": {},
"mean": {},
"sum": {},
"stddev": {},
"histogram": {
"*": {"bins": 20},
"Age": { "bins": 10, "range": [0,120] }
}
}
"""
config_dict = json.loads(config)
from nvflare.app_common.workflows.statistics_controller import StatisticsController
ordered_statistics = StatisticsConstants.ordered_statistics[StatisticsConstants.STATS_1st_STATISTICS]
target_configs: List[StatisticConfig] = StatisticsController._get_target_statistics(
config_dict, ordered_statistics
)
o = fobs.dumps(target_configs)
target_configs1 = fobs.loads(o)
assert target_configs == target_configs1
def test_datatype_serde(self):
dt = DataType.FLOAT
o = fobs.dumps(dt)
dt1 = fobs.loads(o)
assert dt == dt1
def test_histogram_type_serde(self):
f = HistogramType.STANDARD
o = fobs.dumps(f)
f1 = fobs.loads(o)
assert f1 == f
def test_feature_serde(self):
fobs.register(FeatureDecomposer)
f = Feature(feature_name="feature1", data_type=DataType.INT)
o = fobs.dumps(f)
f1 = fobs.loads(o)
assert f1 == f
def test_bin_serde(self):
fobs.register(BinDecomposer)
f = Bin(low_value=0, high_value=255, sample_count=100)
o = fobs.dumps(f)
f1 = fobs.loads(o)
assert f1 == f
def test_bin_range_serde(self):
fobs.register(BinRangeDecomposer)
f = BinRange(min_value=0, max_value=255)
o = fobs.dumps(f)
f1 = fobs.loads(o)
assert f1 == f
def test_histogram_serde(self):
fobs.register(HistogramDecomposer)
fobs.register(BinDecomposer)
fobs.register(BinRangeDecomposer)
bins = []
for i in range(0, 10):
bins = Bin(i, i + 1, random.randint(0, 100))
f = Histogram(HistogramType.STANDARD, bins)
o = fobs.dumps(f)
f1 = fobs.loads(o)
assert f1 == f
| NVFlare-main | tests/unit_test/app_common/statistics/statistics_objects_descomposer_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict
import pytest
from nvflare.app_common.statistics.numeric_stats import get_min_or_max_values
TEST_CASE_1 = [
(
{
"site-1": {
"train": {
"Age": 630,
"fnlwgt": 3673746,
"Education-Num": 177,
"Capital Gain": 16258,
"Capital Loss": 0,
"Hours per week": 631,
},
}
},
{
"train": {
"Age": 630,
"fnlwgt": 3673746,
"Education-Num": 177,
"Capital Gain": 16258,
"Capital Loss": 0,
"Hours per week": 631,
}
},
)
]
class TestNumericStats:
@pytest.mark.parametrize("client_stats, expected_global_stats", TEST_CASE_1)
def test_accumulate_metrics(self, client_stats, expected_global_stats):
from nvflare.app_common.statistics.numeric_stats import accumulate_metrics
global_stats = {}
for client_name in client_stats:
global_stats = accumulate_metrics(metrics=client_stats[client_name], global_metrics=global_stats)
assert global_stats.keys() == expected_global_stats.keys()
assert global_stats == expected_global_stats
def test_get_min_or_max_values(self):
client_statistics = {
"site-1": {"train": {"Age": 0}, "test": {"Age": 2}},
"site-2": {"train": {"Age": 1}, "test": {"Age": 3}},
}
global_statistics: Dict[str, Dict[str, int]] = {}
for client in client_statistics:
statistics = client_statistics[client]
print("get_min_or_max_values =", global_statistics)
global_statistics = get_min_or_max_values(statistics, global_statistics, min)
assert global_statistics == {"test": {"Age": 0}, "train": {"Age": 0}}
global_statistics: Dict[str, Dict[str, int]] = {}
for client in client_statistics:
statistics = client_statistics[client]
print("get_min_or_max_values =", global_statistics)
global_statistics = get_min_or_max_values(statistics, global_statistics, max)
assert global_statistics == {"test": {"Age": 3}, "train": {"Age": 3}}
| NVFlare-main | tests/unit_test/app_common/statistics/numeric_stats_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.app_common.statistics.min_count_cleanser import MinCountCleanser
MIN_COUNT_VALIDATION_TEST_CASES = [
# statistics with age count = 6, failure count = 0 , min_count = 7, count < min_count (6 - 0 < 7), valid = False
({"count": {"train": {"age": 6}}, "failure_count": {"train": {"age": 0}}}, 7, {"train": {"age": False}}),
# statistics with age count=12, failure count=3, min_count=7, effective count > min_count (12 - 3 < 7), valid = True
({"count": {"train": {"age": 12}}, "failure_count": {"train": {"age": 3}}}, 7, {"train": {"age": True}}),
# statistics with age count=9, failure count=3, min_count=7, effective count > min_count (9 - 3 < 7), valid = False
({"count": {"train": {"age": 9}}, "failure_count": {"train": {"age": 3}}}, 7, {"train": {"age": False}}),
# statistics with age count = 8, failure count = 0 , min_count = 7, count > min_count (8 - 0 > 7), valid = True
({"count": {"train": {"age": 8}}, "failure_count": {"train": {"age": 0}}}, 7, {"train": {"age": True}}),
# statistics with age count = 8, edu count = 5, failure count = 0 , min_count = 7,
# age count > min_count (8 - 0 > 7), valid = True
# edu count < min_count (5 - 0 < 7), valid = False
(
{"count": {"train": {"age": 8, "edu": 5}}, "failure_count": {"train": {"age": 0, "edu": 0}}},
7,
{"train": {"age": True, "edu": False}},
),
]
MIN_COUNT_APPLY_TEST_CASES = [
# statistics with age count = 6, failure count = 0 , min_count = 7, count < min_count (6 - 0 < 7), valid = False
# all features are removed from result. Modified Flag = True
(
{"count": {"train": {"age": 6}}, "failure_count": {"train": {"age": 0}}},
7,
({"count": {"train": {"age": 6}}, "failure_count": {"train": {}}}, True),
),
# statistics with age count = 8, failure count = 1 , min_count = 7, (8 - 0 < 7), valid = True
# all feature statistics remain. Modified Flag = False
(
{"count": {"train": {"age": 8}}, "failure_count": {"train": {"age": 1}}},
7,
({"count": {"train": {"age": 8}}, "failure_count": {"train": {"age": 1}}}, False),
),
(
{
"count": {"train": {"age": 8, "edu": 5}},
"sum": {"train": {"age": 120, "edu": 360}},
"failure_count": {"train": {"age": 0, "edu": 0}},
},
7,
(
{
"count": {"train": {"age": 8, "edu": 5}},
"sum": {"train": {"age": 120}},
"failure_count": {"train": {"age": 0}},
},
True,
),
),
]
class TestMinCountChecker:
@pytest.mark.parametrize("statistics, min_count, statistics_valid", MIN_COUNT_VALIDATION_TEST_CASES)
def test_min_count_validate(self, statistics, min_count, statistics_valid):
checker = MinCountCleanser(min_count=min_count)
results = checker.min_count_validate("site-1", statistics=statistics)
assert results == statistics_valid
@pytest.mark.parametrize("statistics, min_count, expected_result", MIN_COUNT_APPLY_TEST_CASES)
def test_min_count_apply(self, statistics, min_count, expected_result):
checker = MinCountCleanser(min_count=min_count)
results = checker.apply(statistics=statistics, client_name="site-1")
assert results == expected_result
| NVFlare-main | tests/unit_test/app_common/statistics/min_count_cleanser_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.app_common.app_constant import StatisticsConstants as StC
from nvflare.app_common.statistics.min_max_cleanser import AddNoiseToMinMax
MAX_TEST_CASES = [
(100, (0.1, 0.3), (100 * 1.1, 100 * 1.3)),
(0, (0.1, 0.3), (1e-5, 1e-5 * 1.3)),
(1e-4, (0.1, 0.3), (1e-4, 1e-4 * 1.3)),
(0.6 * 1e-3, (0.1, 0.3), (0.6 * 1e-3, 0.6 * 1.3)),
(-0.6 * 1e-3, (0.1, 0.3), (-0.6 * 1e-3, -0.6 * 1e-3 * 0.7)),
(-1e-3, (0.1, 0.3), (-1e-3, -1e-3 * 0.7)),
(-100, (0.1, 0.3), (-100, -100 * 0.7)),
]
MIN_TEST_CASES = [
(100, (0.1, 0.3), (100 * 0.7, 100 * 0.9)),
(0, (0.1, 0.3), (-1e-5, 0)),
(-100, (0.1, 0.3), (-100 * 1.3, -100 * 0.9)),
(0.6 * 1e-3, (0.1, 0.3), (0.6 * 1e-3 * 0.7, 0.6 * 1e-3 * 0.9)),
(-0.6 * 1e-3, (0.1, 0.3), (-0.6 * 1e-3 * 1.3, -0.6 * 1e-3 * 0.9)),
(-1e-3, (0.1, 0.3), (-1e-3 * 1.3, -1e-3 * 0.9)),
(-1e-4, (0.1, 0.3), (-1e-4 * 1.3, -1e-4 * 0.9)),
]
NOISE_TEST_CASES = [
(
{"min": {"train": {"age": 0, "edu": 4}}, "max": {"train": {"age": 120, "edu": 13}}},
(0.1, 0.3),
{"min": {"train": {"age": -1e-5 * 0.7, "edu": 4 * 0.9}}, "max": {"train": {"age": 120 * 1.1, "edu": 4 * 1.1}}},
)
]
class TestAddNoiseToMinMax:
@pytest.mark.parametrize("value, noise_level, compare_result", MAX_TEST_CASES)
def test_get_max_value(self, value, noise_level, compare_result):
value_with_noise = AddNoiseToMinMax._get_max_value(value, noise_level)
assert value_with_noise > compare_result[0]
assert value_with_noise <= compare_result[1]
@pytest.mark.parametrize("value, noise_level, compare_result", MIN_TEST_CASES)
def test_get_min_value(self, value, noise_level, compare_result):
value_with_noise = AddNoiseToMinMax._get_min_value(value, noise_level)
assert value_with_noise > compare_result[0]
assert value_with_noise <= compare_result[1]
@pytest.mark.parametrize("statistics, noise_level, compare_result", NOISE_TEST_CASES)
def test_min_value_noise_generator(self, statistics, noise_level, compare_result):
gen = AddNoiseToMinMax(noise_level[0], noise_level[1])
statistic = StC.STATS_MIN
statistics_with_noise = gen.generate_noise(statistics, statistic)
min_statistics = statistics_with_noise[statistic]
for ds in min_statistics:
for feature in min_statistics[ds]:
assert min_statistics[ds][feature] <= compare_result[statistic][ds][feature]
@pytest.mark.parametrize("statistics, noise_level, compare_result", NOISE_TEST_CASES)
def test_max_value_noise_generator(self, statistics, noise_level, compare_result):
gen = AddNoiseToMinMax(noise_level[0], noise_level[1])
statistic = StC.STATS_MAX
statistics_with_noise = gen.generate_noise(statistics, statistic)
max_statistics = statistics_with_noise[statistic]
for ds in max_statistics:
for feature in max_statistics[ds]:
assert max_statistics[ds][feature] > compare_result[statistic][ds][feature]
| NVFlare-main | tests/unit_test/app_common/statistics/min_max_cleanser_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.app_common.abstract.statistics_spec import Bin, Histogram, HistogramType
from nvflare.app_common.statistics.histogram_bins_cleanser import HistogramBinsCleanser
hist_bins = [Bin(i, i + 1, 100) for i in range(0, 10)]
age_hist = Histogram(hist_type=HistogramType.STANDARD, bins=hist_bins, hist_name=None)
"""
# case 1:
# numbers of bins = 10
# count = 6
# max_bins_percent = 10, i.e 10%
# 6*10% = 0.1*6 = 0.6 ==> round(0.6) ==> 1
# 10 > 1
# case 2:
# numbers of bins = 10
# count = 200
# max_bins_percent = 10, i.e 10%
# 200*10% = 0.1*200 = 20
# 10 < 20
"""
HIST_BINS_VALIDATION_TEST_CASES = [
(
{
"count": {"train": {"age": 6}},
"failure_count": {"train": {"age": 0}},
"histogram": {"train": {"age": age_hist}},
},
10,
{"train": {"age": False}},
),
(
{
"count": {"train": {"age": 200}},
"failure_count": {"train": {"age": 0}},
"histogram": {"train": {"age": age_hist}},
},
10,
{"train": {"age": True}},
),
]
HIST_BINS_APPLY_TEST_CASES = [
(
{
"count": {"train": {"age": 6}},
"failure_count": {"train": {"age": 0}},
"histogram": {"train": {"age": age_hist}},
},
10,
({"count": {"train": {"age": 6}}, "failure_count": {"train": {"age": 0}}, "histogram": {"train": {}}}, True),
),
(
{
"count": {"train": {"age": 200}},
"failure_count": {"train": {"age": 0}},
"histogram": {"train": {"age": age_hist}},
},
10,
(
{
"count": {"train": {"age": 200}},
"failure_count": {"train": {"age": 0}},
"histogram": {"train": {"age": age_hist}},
},
False,
),
),
]
class TestHistBinsCleanser:
@pytest.mark.parametrize("statistics, max_bins_percent, expected_result", HIST_BINS_VALIDATION_TEST_CASES)
def test_hist_bins_validate(self, statistics, max_bins_percent, expected_result):
checker = HistogramBinsCleanser(max_bins_percent=max_bins_percent)
results = checker.hist_bins_validate("site-1", statistics=statistics)
assert results == expected_result
@pytest.mark.parametrize("statistics, max_bins_percent, expected_result", HIST_BINS_APPLY_TEST_CASES)
def test_hist_bins_apply(self, statistics, max_bins_percent, expected_result):
checker = HistogramBinsCleanser(max_bins_percent=max_bins_percent)
results = checker.apply(statistics=statistics, client_name="site-1")
assert results == expected_result
| NVFlare-main | tests/unit_test/app_common/statistics/hist_bins_cleanser_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/filters/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.apis.dxo import DXO, DataKind, from_shareable
from nvflare.apis.fl_context import FLContext
from nvflare.app_common.filters import ExcludeVars
TEST_CASES = [
({"a": 1.0, "b": 2.0}, "a", {"b": 2.0}),
({"a": 1.0, "b": 2.0, "c": 3.0}, ["a", "b"], {"c": 3.0}),
({"a": 1.0, "b": 2.0, "c": 3.0, "d": 4.0}, ["a", "d"], {"b": 2.0, "c": 3.0}),
(
{"conv/a": 1.0, "conv/b": 2.0, "drop/c": 3.0, "conv/d": 4.0},
["conv/*"],
{"conv/a": 1.0, "conv/b": 2.0, "drop/c": 3.0, "conv/d": 4.0},
),
({"conv/a": 1.0, "conv/b": 2.0, "drop/c": 3.0, "conv/d": 4.0}, "conv/*", {"drop/c": 3.0}),
]
class TestExcludeVars:
@pytest.mark.parametrize("input_data,exclude_vars,expected_data", TEST_CASES)
def test_exclude(self, input_data, exclude_vars, expected_data):
dxo = DXO(
data_kind=DataKind.WEIGHTS,
data=input_data,
)
data = dxo.to_shareable()
fl_ctx = FLContext()
f = ExcludeVars(exclude_vars=exclude_vars)
new_data = f.process(data, fl_ctx)
new_dxo = from_shareable(new_data)
assert new_dxo.data == expected_data
| NVFlare-main | tests/unit_test/app_common/filters/exclude_vars_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/storages/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import json
import os
import random
import tempfile
from collections import defaultdict
from pathlib import Path
import pytest
from nvflare.apis.storage import StorageException
from nvflare.app_common.storages.filesystem_storage import FilesystemStorage
def random_string(length):
s = "abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ"
p = "".join(random.sample(s, length))
return p
def random_path(depth):
path = os.sep.join([random_string(4) for _ in range(depth)])
return path
def random_data():
return bytes(bytearray(random.getrandbits(8) for _ in range(16384)))
def random_meta():
return {random.getrandbits(8): random.getrandbits(8) for _ in range(32)}
ROOT_DIR = os.path.abspath(os.sep)
# TODO:: Add S3Storage test
@pytest.fixture(name="storage", params=["FilesystemStorage"])
def setup_and_teardown(request):
print(f"setup {request.param}")
if request.param == "FilesystemStorage":
with tempfile.TemporaryDirectory() as tmp_dir:
storage = FilesystemStorage(root_dir=os.path.join(tmp_dir, "filesystem-storage"))
else:
raise StorageException(f"Storage type {request.param} is not supported.")
yield storage
print("teardown")
class TestStorage:
@pytest.mark.parametrize("n_files", [20, 100])
@pytest.mark.parametrize("n_folders", [5, 20])
@pytest.mark.parametrize("path_depth", [3, 10])
def test_large_storage(self, storage, n_folders, n_files, path_depth):
test_tmp_dir = tempfile.TemporaryDirectory()
test_tmp_dir_name = test_tmp_dir.name
dir_to_files = defaultdict(list)
print(f"Prepare data {n_files} files for {n_folders} folders")
for _ in range(n_folders):
base_path = os.path.join(ROOT_DIR, random_path(path_depth))
for i in range(round(n_files / n_folders)):
# distribute files among path_depth levels of directory depth
dir_path = base_path
for _ in range(round(i / (n_files / path_depth))):
dir_path = os.path.split(dir_path)[0]
filename = random_string(8)
dir_to_files[dir_path].append(os.path.join(dir_path, filename))
filepath = os.path.join(dir_path, filename)
test_filepath = os.path.join(test_tmp_dir_name, filepath.lstrip("/"))
Path(test_filepath).mkdir(parents=True, exist_ok=True)
# use f.write() as reference to compare with storage implementation
with open(os.path.join(test_filepath, "data"), "wb") as f:
data = random_data()
f.write(data)
with open(os.path.join(test_filepath, "meta"), "wb") as f:
meta = random_meta()
f.write(json.dumps(str(meta)).encode("utf-8"))
storage.create_object(filepath, data, meta, overwrite_existing=True)
for test_dir_path, _, object_files in os.walk(test_tmp_dir_name):
dir_path = "/" + test_dir_path[len(test_tmp_dir_name) :].lstrip("/")
assert set(storage.list_objects(dir_path)) == set(dir_to_files[dir_path])
# if dir_path is an object
if object_files:
with open(os.path.join(test_dir_path, "data"), "rb") as f:
data = f.read()
with open(os.path.join(test_dir_path, "meta"), "rb") as f:
meta = ast.literal_eval(json.loads(f.read().decode("utf-8")))
assert storage.get_data(dir_path) == data
assert storage.get_detail(dir_path)[1] == data
assert storage.get_meta(dir_path) == meta
assert storage.get_detail(dir_path)[0] == meta
storage.delete_object(dir_path)
test_tmp_dir.cleanup()
@pytest.mark.parametrize(
"uri, data, meta, overwrite_existing",
[
(1234, b"c", {}, True),
("/test_dir/test_object", "not a valid file name", {}, True),
("/test_dir/test_object", b"c", "not a dictionary", True),
("/test_dir/test_object", b"c", {}, "not a bool"),
],
)
def test_create_invalid_inputs(self, storage, uri, data, meta, overwrite_existing):
with pytest.raises(Exception):
storage.create_object(uri, data, meta, overwrite_existing)
def test_invalid_inputs(self, storage):
uri = 1234
with pytest.raises(TypeError):
storage.list_objects(uri)
with pytest.raises(TypeError):
storage.get_meta(uri)
with pytest.raises(TypeError):
storage.get_data(uri)
with pytest.raises(TypeError):
storage.get_detail(uri)
with pytest.raises(TypeError):
storage.delete_object(uri)
@pytest.mark.parametrize(
"uri, meta, overwrite_existing",
[
(1234, {}, True),
("/test_dir/test_object", "not a dictionary", True),
("/test_dir/test_object", {}, "not a bool"),
],
)
def test_update_meta_invalid_inputs(self, storage, uri, meta, overwrite_existing):
with pytest.raises(TypeError):
storage.update_meta(uri, meta, overwrite_existing)
@pytest.mark.parametrize(
"uri, data",
[
(1234, "not valid file"),
("/test_dir/test_object", "not bytes"),
],
)
def test_update_data_invalid_inputs(self, storage, uri, data):
with pytest.raises(Exception):
storage.update_object(uri, data)
@pytest.mark.parametrize(
"uri",
["/test_dir/test_object"],
)
def test_create_read(self, storage, uri):
data = random_data()
meta = random_meta()
storage.create_object(uri, data, meta, overwrite_existing=True)
# get_data()
assert storage.get_data(uri) == data
assert storage.get_detail(uri)[1] == data
# get_meta()
assert storage.get_meta(uri) == meta
assert storage.get_detail(uri)[0] == meta
storage.delete_object(uri)
@pytest.mark.parametrize(
"uri",
["/test_dir/test_object"],
)
def test_create_overwrite(self, storage, uri):
data = random_data()
meta = random_meta()
storage.create_object(uri, random_data(), random_meta(), overwrite_existing=True)
storage.create_object(uri, data, meta, overwrite_existing=True)
assert storage.get_data(uri) == data
assert storage.get_meta(uri) == meta
with pytest.raises(StorageException):
storage.create_object(uri, data, meta, overwrite_existing=False)
storage.delete_object(uri)
@pytest.mark.parametrize(
"uri, test_uri",
[("/test_dir/test_object", "/test_dir")],
)
def test_create_nonempty(self, storage, uri, test_uri):
storage.create_object("/test_dir/test_object", random_data(), random_meta())
# cannot create object at nonempty directory
with pytest.raises(StorageException):
storage.create_object(test_uri, random_data(), random_meta(), overwrite_existing=True)
storage.delete_object(uri)
@pytest.mark.parametrize(
"dir_path, num",
[("/test_dir/test_object", 10), ("/test_dir/test_happy/test_object", 20)],
)
def test_list(self, storage, dir_path, num):
dir_to_files = defaultdict(list)
for i in range(num):
object_uri = os.path.join(dir_path, str(i))
storage.create_object(object_uri, random_data(), random_meta())
dir_to_files[dir_path].append(object_uri)
assert set(storage.list_objects(dir_path)) == set(dir_to_files[dir_path])
for i in range(num):
object_uri = os.path.join(dir_path, str(i))
storage.delete_object(object_uri)
def test_delete(self, storage):
uri = "/test_dir/test_object"
storage.create_object(uri, random_data(), random_meta(), overwrite_existing=True)
storage.delete_object(uri)
# methods on non-existent object
with pytest.raises(StorageException):
data3 = random_data()
storage.update_object(uri, data3)
with pytest.raises(StorageException):
meta4 = random_meta()
storage.update_meta(uri, meta4, replace=True)
with pytest.raises(StorageException):
storage.get_data(uri)
with pytest.raises(StorageException):
storage.get_meta(uri)
with pytest.raises(StorageException):
storage.get_detail(uri)
with pytest.raises(StorageException):
storage.delete_object(uri)
@pytest.mark.parametrize(
"uri",
["/test_dir/test_object"],
)
def test_data_read_update(self, storage, uri):
data = random_data()
meta = random_meta()
storage.create_object(uri, data, meta, overwrite_existing=True)
# get_data()
assert storage.get_data(uri) == data
assert storage.get_detail(uri)[1] == data
# update_data()
data2 = random_data()
storage.update_object(uri, data2)
assert storage.get_data(uri) == data2
assert storage.get_detail(uri)[1] == data2
storage.delete_object(uri)
@pytest.mark.parametrize(
"uri",
["/test_dir/test_object"],
)
def test_meta_read_update(self, storage, uri):
data = random_data()
meta = random_meta()
storage.create_object(uri, data, meta, overwrite_existing=True)
# get_meta()
assert storage.get_meta(uri) == meta
assert storage.get_detail(uri)[0] == meta
# update_meta() w/ replace
meta2 = random_meta()
storage.update_meta(uri, meta2, replace=True)
assert storage.get_meta(uri) == meta2
assert storage.get_detail(uri)[0] == meta2
# update_meta() w/o replace
meta3 = random_meta()
meta2.update(meta3)
storage.update_meta(uri, meta3, replace=False)
assert storage.get_meta(uri) == meta2
assert storage.get_detail(uri)[0] == meta2
storage.delete_object(uri)
| NVFlare-main | tests/unit_test/app_common/storages/storage_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import re
import numpy as np
import pytest
from nvflare.apis.dxo import DXO, DataKind, MetaKey, from_shareable
from nvflare.apis.fl_constant import ReservedKey
from nvflare.apis.fl_context import FLContext
from nvflare.apis.shareable import Shareable
from nvflare.app_common.aggregators.intime_accumulate_model_aggregator import InTimeAccumulateWeightedAggregator
from nvflare.app_common.app_constant import AppConstants
class TestInTimeAccumulateWeightedAggregator:
@pytest.mark.parametrize(
"exclude_vars,aggregation_weights,expected_data_kind,error,error_msg",
[
(
2.0,
None,
DataKind.WEIGHT_DIFF,
ValueError,
f"exclude_vars = 2.0 should be a regex string but got {type(2.0)}.",
),
(
{"dxo1": 3.0, "dxo2": ""},
None,
{"dxo1": DataKind.WEIGHT_DIFF, "dxo2": DataKind.WEIGHT_DIFF},
ValueError,
f"exclude_vars[dxo1] = 3.0 should be a regex string but got {type(3.0)}.",
),
(None, None, DataKind.ANALYTIC, ValueError, "expected_data_kind = ANALYTIC is not WEIGHT_DIFF or WEIGHTS"),
(
None,
None,
{"dxo1": DataKind.WEIGHT_DIFF, "dxo2": DataKind.ANALYTIC},
ValueError,
"expected_data_kind[dxo2] = ANALYTIC is not WEIGHT_DIFF or WEIGHTS",
),
(
None,
{"dxo1": {"client_0": 1.0, "client_1": 2.0}},
{"dxo1": DataKind.WEIGHT_DIFF, "dxo2": DataKind.WEIGHT_DIFF},
ValueError,
"A dict of dict aggregation_weights should specify aggregation_weights "
"for every key in expected_data_kind. But missed these keys: ['dxo2']",
),
(
{"dxo2": ""},
None,
{"dxo1": DataKind.WEIGHT_DIFF, "dxo2": DataKind.WEIGHT_DIFF},
ValueError,
"A dict exclude_vars should specify exclude_vars for every key in expected_data_kind. "
"But missed these keys: ['dxo1']",
),
],
)
def test_invalid_create(self, exclude_vars, aggregation_weights, expected_data_kind, error, error_msg):
with pytest.raises(error, match=re.escape(error_msg)):
_ = InTimeAccumulateWeightedAggregator(
exclude_vars=exclude_vars,
aggregation_weights=aggregation_weights,
expected_data_kind=expected_data_kind,
)
@pytest.mark.parametrize(
"exclude_vars,aggregation_weights,expected_data_kind,expected_object",
[
(
None,
None,
DataKind.WEIGHTS,
InTimeAccumulateWeightedAggregator(
exclude_vars=None, aggregation_weights=None, expected_data_kind=DataKind.WEIGHTS
),
),
(
"hello",
None,
{"dxo1": DataKind.WEIGHTS, "dxo2": DataKind.WEIGHT_DIFF},
InTimeAccumulateWeightedAggregator(
exclude_vars={"dxo1": "hello", "dxo2": "hello"},
aggregation_weights=None,
expected_data_kind={"dxo1": DataKind.WEIGHTS, "dxo2": DataKind.WEIGHT_DIFF},
),
),
(
None,
{"client_0": 1.0, "client_1": 2.0},
{"dxo1": DataKind.WEIGHTS, "dxo2": DataKind.WEIGHT_DIFF},
InTimeAccumulateWeightedAggregator(
exclude_vars=None,
aggregation_weights={
"dxo1": {"client_0": 1.0, "client_1": 2.0},
"dxo2": {"client_0": 1.0, "client_1": 2.0},
},
expected_data_kind={"dxo1": DataKind.WEIGHTS, "dxo2": DataKind.WEIGHT_DIFF},
),
),
],
)
def test_create(self, exclude_vars, aggregation_weights, expected_data_kind, expected_object):
result = InTimeAccumulateWeightedAggregator(
exclude_vars=exclude_vars, aggregation_weights=aggregation_weights, expected_data_kind=expected_data_kind
)
assert result.exclude_vars == expected_object.exclude_vars
assert result.aggregation_weights == expected_object.aggregation_weights
assert result.expected_data_kind == expected_object.expected_data_kind
@pytest.mark.parametrize("current_round,contribution_round,expected", [(1, 1, True), (2, 1, False)])
def test_accept(self, current_round, contribution_round, expected):
aggregation_weights = {f"client_{i}": random.random() for i in range(2)}
agg = InTimeAccumulateWeightedAggregator(aggregation_weights=aggregation_weights)
client_name = "client_0"
iter_number = 1
weights = np.random.random(4)
fl_ctx = FLContext()
s = Shareable()
s.set_peer_props({ReservedKey.IDENTITY_NAME: client_name})
s.add_cookie(AppConstants.CONTRIBUTION_ROUND, contribution_round)
fl_ctx.set_prop(AppConstants.CURRENT_ROUND, current_round)
dxo = DXO(
DataKind.WEIGHT_DIFF,
data={"var1": weights},
meta={
MetaKey.NUM_STEPS_CURRENT_ROUND: iter_number,
},
)
assert agg.accept(dxo.update_shareable(s), fl_ctx) == expected
@pytest.mark.parametrize(
"received,expected",
[
(
{"client1": {"weight": 0.5, "iter_number": 1, "aggr_data": {"var1": np.array([2.0, 3.0, 1.1, 0.1])}}},
{"var1": np.array([2.0, 3.0, 1.1, 0.1])},
),
(
{"client1": {"weight": 1.0, "iter_number": 1, "aggr_data": {"var1": np.array([2.0, 3.0, 1.1, 0.1])}}},
{"var1": np.array([2.0, 3.0, 1.1, 0.1])},
),
(
{
"client1": {"weight": 0.5, "iter_number": 1, "aggr_data": {"var1": np.array([2.0, 3.0, 1.1, 0.1])}},
"client2": {"weight": 1.0, "iter_number": 1, "aggr_data": {"var1": np.array([1.0, 1.0, 2.1, 0.5])}},
},
{
"var1": np.array(
[
(0.5 * 2.0 + 1.0 * 1.0) / (0.5 + 1),
(0.5 * 3.0 + 1.0 * 1.0) / (0.5 + 1),
(0.5 * 1.1 + 1.0 * 2.1) / (0.5 + 1),
(0.5 * 0.1 + 1.0 * 0.5) / (0.5 + 1),
]
)
},
),
(
{
"client1": {"weight": 1.0, "iter_number": 2, "aggr_data": {"var1": np.array([2.0, 3.0, 1.1, 0.1])}},
"client2": {"weight": 1.0, "iter_number": 4, "aggr_data": {"var1": np.array([1.0, 1.0, 2.1, 0.5])}},
},
{
"var1": np.array(
[
(2 * 2.0 + 4 * 1.0) / (2 + 4),
(2 * 3.0 + 4 * 1.0) / (2 + 4),
(2 * 1.1 + 4 * 2.1) / (2 + 4),
(2 * 0.1 + 4 * 0.5) / (2 + 4),
]
)
},
),
],
)
def test_aggregate(self, received, expected):
aggregation_weights = {k: v["weight"] for k, v in received.items()}
agg = InTimeAccumulateWeightedAggregator(aggregation_weights=aggregation_weights)
fl_ctx = FLContext()
fl_ctx.set_prop(AppConstants.CURRENT_ROUND, 0)
for k, v in received.items():
dxo = DXO(
DataKind.WEIGHT_DIFF,
data=v["aggr_data"],
meta={
MetaKey.NUM_STEPS_CURRENT_ROUND: v["iter_number"],
},
)
s = Shareable()
s.set_peer_props({ReservedKey.IDENTITY_NAME: k})
s.add_cookie(AppConstants.CONTRIBUTION_ROUND, 0)
agg.accept(dxo.update_shareable(s), fl_ctx)
result = agg.aggregate(fl_ctx)
np.testing.assert_allclose(result["DXO"]["data"]["var1"], expected["var1"])
@pytest.mark.parametrize("shape", [4, (6, 6)])
@pytest.mark.parametrize("n_clients", [10, 50, 100])
def test_aggregate_random(self, shape, n_clients):
aggregation_weights = {f"client_{i}": random.random() for i in range(n_clients)}
agg = InTimeAccumulateWeightedAggregator(aggregation_weights=aggregation_weights)
weighted_sum = np.zeros(shape)
sum_of_weights = 0
fl_ctx = FLContext()
fl_ctx.set_prop(AppConstants.CURRENT_ROUND, 0)
for client_name in aggregation_weights:
iter_number = random.randint(1, 50)
weights = np.random.random(shape)
s = Shareable()
s.set_peer_props({ReservedKey.IDENTITY_NAME: client_name})
s.add_cookie(AppConstants.CONTRIBUTION_ROUND, 0)
dxo = DXO(
DataKind.WEIGHT_DIFF,
data={"var1": weights},
meta={
MetaKey.NUM_STEPS_CURRENT_ROUND: iter_number,
},
)
weighted_sum = weighted_sum + (weights * iter_number * aggregation_weights[client_name])
sum_of_weights = sum_of_weights + (iter_number * aggregation_weights[client_name])
agg.accept(dxo.update_shareable(s), fl_ctx)
result = agg.aggregate(fl_ctx)
result_dxo = from_shareable(result)
np.testing.assert_allclose(result_dxo.data["var1"], weighted_sum / sum_of_weights)
@pytest.mark.parametrize("num_dxo", [1, 2, 3])
@pytest.mark.parametrize("shape", [4, (6, 6)])
@pytest.mark.parametrize("n_clients", [10, 50, 100])
def test_aggregate_random_dxos(self, num_dxo, shape, n_clients):
dxo_names = [f"dxo_{i}" for i in range(num_dxo)]
client_names = [f"client_{i}" for i in range(n_clients)]
aggregation_weights = {
dxo_name: {client_name: random.random() for client_name in client_names} for dxo_name in dxo_names
}
agg = InTimeAccumulateWeightedAggregator(
aggregation_weights=aggregation_weights,
expected_data_kind={dxo_name: DataKind.WEIGHT_DIFF for dxo_name in dxo_names},
)
weighted_sum = {dxo_name: np.zeros(shape) for dxo_name in dxo_names}
sum_of_weights = {dxo_name: 0 for dxo_name in dxo_names}
fl_ctx = FLContext()
fl_ctx.set_prop(AppConstants.CURRENT_ROUND, 0)
for client_name in client_names:
iter_number = random.randint(1, 50)
dxo_collection_data = {}
for dxo_name in dxo_names:
values = np.random.random(shape)
dxo = DXO(
data_kind=DataKind.WEIGHT_DIFF,
data={"var1": values},
meta={
MetaKey.NUM_STEPS_CURRENT_ROUND: iter_number,
},
)
dxo_collection_data[dxo_name] = dxo
weighted_sum[dxo_name] = (
weighted_sum[dxo_name] + values * iter_number * aggregation_weights[dxo_name][client_name]
)
sum_of_weights[dxo_name] = (
sum_of_weights[dxo_name] + iter_number * aggregation_weights[dxo_name][client_name]
)
dxo_collection = DXO(data_kind=DataKind.COLLECTION, data=dxo_collection_data)
s = Shareable()
s.set_peer_props({ReservedKey.IDENTITY_NAME: client_name})
s.add_cookie(AppConstants.CONTRIBUTION_ROUND, 0)
agg.accept(dxo_collection.update_shareable(s), fl_ctx)
result = agg.aggregate(fl_ctx)
result_dxo = from_shareable(result)
for dxo_name in dxo_names:
np.testing.assert_allclose(
result_dxo.data[dxo_name].data["var1"], weighted_sum[dxo_name] / sum_of_weights[dxo_name]
)
| NVFlare-main | tests/unit_test/app_common/aggregators/in_time_accumulate_weighted_aggregator_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/aggregators/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any
import numpy as np
from nvflare.app_common.abstract.learnable import Learnable
from nvflare.app_common.abstract.model import ModelLearnable
from nvflare.app_common.decomposers import common_decomposers
from nvflare.app_common.widgets.event_recorder import _CtxPropReq, _EventReq, _EventStats
from nvflare.fuel.utils import fobs
class TestCommonDecomposers:
@classmethod
def setup_class(cls):
common_decomposers.register()
def test_learnable(self):
# Learnable is simply a dict with 2 extra methods
learnable = Learnable()
learnable["A"] = "foo"
learnable["B"] = 123
new_learnable = self._run_fobs(learnable)
assert new_learnable["A"] == learnable["A"]
assert new_learnable["B"] == learnable["B"]
def test_model_learnable(self):
# Learnable is simply a dict with 2 extra methods
model_learnable = ModelLearnable()
model_learnable["A"] = "bar"
model_learnable["B"] = 456
new_learnable = self._run_fobs(model_learnable)
assert new_learnable["A"] == model_learnable["A"]
assert new_learnable["B"] == model_learnable["B"]
def test_np_float64(self):
f64 = np.float64(1.234)
new_f64 = self._run_fobs(f64)
assert new_f64 == f64
def test_np_array(self):
npa = np.array([[1, 2, 3], [4, 5, 6]])
new_npa = self._run_fobs(npa)
assert (new_npa == npa).all()
def test_ctx_prop_req(self):
cpr = _CtxPropReq("data_type", True, False, True)
new_cpr = self._run_fobs(cpr)
assert new_cpr.dtype == cpr.dtype
assert new_cpr.is_sticky == cpr.is_sticky
assert new_cpr.is_private == cpr.is_private
assert new_cpr.allow_none == cpr.allow_none
def test_event_req(self):
req = _EventReq(
{"A": "foo"}, {"B": "bar"}, ["block_list1", "block_list2"], ["peer_block_list1", "peer_block_list2"]
)
new_req = self._run_fobs(req)
assert new_req.ctx_reqs == req.ctx_reqs
assert new_req.peer_ctx_reqs == req.peer_ctx_reqs
assert new_req.ctx_block_list == req.ctx_block_list
assert new_req.peer_ctx_block_list == req.peer_ctx_block_list
def test_event_stats(self):
stats = _EventStats()
stats.call_count = 1
stats.prop_missing = 2
stats.prop_none_value = 3
stats.prop_dtype_mismatch = 4
stats.prop_attr_mismatch = 5
stats.prop_block_list_violation = 6
stats.peer_ctx_missing = 7
new_stats = self._run_fobs(stats)
assert new_stats.call_count == stats.call_count
assert new_stats.prop_missing == stats.prop_missing
assert new_stats.prop_none_value == stats.prop_none_value
assert new_stats.prop_dtype_mismatch == stats.prop_dtype_mismatch
assert new_stats.prop_attr_mismatch == stats.prop_attr_mismatch
assert new_stats.prop_block_list_violation == stats.prop_block_list_violation
assert new_stats.peer_ctx_missing == stats.peer_ctx_missing
@staticmethod
def _run_fobs(data: Any) -> Any:
buf = fobs.dumps(data)
return fobs.loads(buf)
| NVFlare-main | tests/unit_test/app_common/decomposers/common_decomposers_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/decomposers/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/job_schedulers/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, List, Optional, Tuple
from unittest.mock import Mock
import pytest
from nvflare.apis.client import Client
from nvflare.apis.fl_context import FLContext, FLContextManager
from nvflare.apis.job_def import ALL_SITES, Job, JobMetaKey, RunStatus
from nvflare.apis.job_def_manager_spec import JobDefManagerSpec
from nvflare.apis.job_scheduler_spec import DispatchInfo
from nvflare.apis.resource_manager_spec import ResourceManagerSpec
from nvflare.apis.server_engine_spec import ServerEngineSpec
from nvflare.app_common.job_schedulers.job_scheduler import DefaultJobScheduler
from nvflare.app_common.resource_managers.list_resource_manager import ListResourceManager
class DummyResourceManager(ResourceManagerSpec):
def __init__(self, name, resources):
self.name = name
self.resources = resources
def check_resources(self, resource_requirement: dict, fl_ctx: FLContext) -> (bool, Optional[str]):
print(f"{self.name}: checking resources with requirements {resource_requirement}")
for k in resource_requirement:
if k in self.resources:
if self.resources[k] < resource_requirement[k]:
return False, None
return True, None
def cancel_resources(self, resource_requirement: dict, token: str, fl_ctx: FLContext):
print(f"{self.name}: cancelling resources {resource_requirement}")
def allocate_resources(self, resource_requirement: dict, token: str, fl_ctx: FLContext) -> dict:
print(f"{self.name}: allocating resources {resource_requirement}")
result = {}
for k in resource_requirement:
if k in self.resources:
self.resources[k] -= resource_requirement[k]
result[k] = resource_requirement[k]
return result
def free_resources(self, resources: dict, token: str, fl_ctx: FLContext):
print(f"{self.name}: freeing resources {resources}")
for k in resources:
self.resources[k] += resources[k]
def report_resources(self, fl_ctx):
return self.resources
class Site:
def __init__(self, name, resources, resource_manager=None):
self.name = name
if resource_manager:
self.resource_manager = resource_manager
else:
self.resource_manager = DummyResourceManager(name=name, resources=resources)
class MockServerEngine(ServerEngineSpec):
def __init__(self, clients: Dict[str, Site], run_name="exp1"):
self.fl_ctx_mgr = FLContextManager(
engine=self,
identity_name="__mock_engine",
job_id=run_name,
public_stickers={},
private_stickers={},
)
self.clients = clients
def fire_event(self, event_type: str, fl_ctx: FLContext):
pass
def get_clients(self):
return [Client(name=x, token="") for x in self.clients]
def sync_clients_from_main_process(self):
pass
def validate_targets(self, client_names: List[str]):
pass
def new_context(self):
return self.fl_ctx_mgr.new_context()
def get_workspace(self):
pass
def get_component(self, component_id: str) -> object:
pass
def register_aux_message_handler(self, topic: str, message_handle_func):
pass
def send_aux_request(
self, targets: [], topic: str, request, timeout: float, fl_ctx: FLContext, optional=False
) -> dict:
pass
def get_widget(self, widget_id: str):
pass
def persist_components(self, fl_ctx: FLContext, completed: bool):
pass
def restore_components(self, snapshot, fl_ctx: FLContext):
pass
def start_client_job(self, job_id, client_sites):
pass
def check_client_resources(
self, job_id: str, resource_reqs: Dict[str, dict]
) -> Dict[str, Tuple[bool, Optional[str]]]:
result = {}
with self.new_context() as fl_ctx:
for site_name, requirements in resource_reqs.items():
result[site_name] = self.clients[site_name].resource_manager.check_resources(requirements, fl_ctx)
return result
def get_client_name_from_token(self, token):
return self.clients.get(token)
def cancel_client_resources(
self, resource_check_results: Dict[str, Tuple[bool, str]], resource_reqs: Dict[str, dict]
):
with self.new_context() as fl_ctx:
for site_name, result in resource_check_results.items():
check_result, token = result
if check_result and token:
self.clients[site_name].resource_manager.cancel_resources(
resource_requirement=resource_reqs[site_name], token=token, fl_ctx=fl_ctx
)
def update_job_run_status(self):
pass
def create_servers(server_num, sites: List[Site]):
servers = []
for i in range(server_num):
engine = MockServerEngine(clients={s.name: s for s in sites})
servers.append(engine)
return servers
def create_resource(cpu, gpu):
return {"cpu": cpu, "gpu": gpu}
def create_job(job_id, resource_spec, deploy_map, min_sites, required_sites=None):
return Job(
job_id=job_id,
resource_spec=resource_spec,
deploy_map=deploy_map,
min_sites=min_sites,
required_sites=required_sites,
meta={},
)
def create_jobs(num_jobs, prefix="job", **kwargs):
return [Job(job_id=f"{prefix}{i}", **kwargs) for i in range(num_jobs)]
job1 = create_job(
job_id="job1",
resource_spec={"site1": create_resource(1, 4), "site2": create_resource(1, 4), "site3": create_resource(2, 1)},
deploy_map={"app1": ["server", "site1", "site2"], "app2": ["site3"]},
min_sites=3,
)
job2 = create_job(
job_id="job2",
resource_spec={"site1": create_resource(2, 4), "site2": create_resource(2, 4), "site3": create_resource(12, 4)},
deploy_map={"app3": ["server", "site1", "site2"], "app4": ["site3"]},
min_sites=3,
)
job3 = create_job(
job_id="job3",
resource_spec={},
deploy_map={"app5": [ALL_SITES]},
min_sites=3,
)
job4 = create_job(
job_id="job4",
resource_spec={"site1": create_resource(2, 4), "site2": create_resource(5, 4), "site3": create_resource(12, 4)},
deploy_map={"app7": ["server", "site1", "site2"], "app8": ["site3", "site4", "site5"]},
min_sites=3,
)
job5 = create_job(
job_id="job5",
resource_spec={},
deploy_map={"app9": [ALL_SITES], "app10": []},
min_sites=3,
)
TEST_CASES = [
(
[job1],
[
Site(name="site1", resources=create_resource(16, 8)),
Site(name="site2", resources=create_resource(16, 8)),
Site(name="site3", resources=create_resource(32, 1)),
Site(name="site4", resources=create_resource(2, 1)),
],
job1,
{
"server": DispatchInfo(app_name="app1", resource_requirements={}, token=None),
"site1": DispatchInfo(app_name="app1", resource_requirements=create_resource(1, 4), token=None),
"site2": DispatchInfo(app_name="app1", resource_requirements=create_resource(1, 4), token=None),
"site3": DispatchInfo(app_name="app2", resource_requirements=create_resource(2, 1), token=None),
},
),
(
[job2, job1],
[
Site(name="site1", resources=create_resource(16, 8)),
Site(name="site2", resources=create_resource(16, 8)),
Site(name="site3", resources=create_resource(32, 1)),
Site(name="site4", resources=create_resource(2, 1)),
],
job1,
{
"server": DispatchInfo(app_name="app1", resource_requirements={}, token=None),
"site1": DispatchInfo(app_name="app1", resource_requirements=create_resource(1, 4), token=None),
"site2": DispatchInfo(app_name="app1", resource_requirements=create_resource(1, 4), token=None),
"site3": DispatchInfo(app_name="app2", resource_requirements=create_resource(2, 1), token=None),
},
),
(
[job3],
[Site(name=f"site{i}", resources=create_resource(16, 8)) for i in range(8)],
job3,
{
"server": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
"site0": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
"site1": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
"site2": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
"site3": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
"site4": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
"site5": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
"site6": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
"site7": DispatchInfo(app_name="app5", resource_requirements={}, token=None),
},
),
(
[job4, job1],
[
Site(name="site1", resources=create_resource(16, 8)),
Site(name="site2", resources=create_resource(16, 8)),
Site(name="site3", resources=create_resource(32, 1)),
Site(name="site4", resources=create_resource(2, 1)),
],
job4,
{
"server": DispatchInfo(app_name="app7", resource_requirements={}, token=None),
"site1": DispatchInfo(app_name="app7", resource_requirements=create_resource(2, 4), token=None),
"site2": DispatchInfo(app_name="app7", resource_requirements=create_resource(5, 4), token=None),
"site4": DispatchInfo(app_name="app8", resource_requirements={}, token=None),
},
),
(
[job5],
[Site(name=f"site{i}", resources=create_resource(16, 8)) for i in range(8)],
job5,
{
"server": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
"site0": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
"site1": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
"site2": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
"site3": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
"site4": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
"site5": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
"site6": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
"site7": DispatchInfo(app_name="app9", resource_requirements={}, token=None),
},
),
]
@pytest.fixture(
params=[{"num_sites": 3}],
)
def setup_and_teardown(request):
num_sites = request.param["num_sites"]
sites = [Site(name=f"site{i}", resources=create_resource(1, 1)) for i in range(num_sites)]
servers = create_servers(server_num=1, sites=sites)
scheduler = DefaultJobScheduler(max_jobs=1)
job_manager = Mock(spec=JobDefManagerSpec)
yield servers, scheduler, num_sites, job_manager
class TestDefaultJobScheduler:
def test_weird_deploy_map(self, setup_and_teardown):
servers, scheduler, num_sites, job_manager = setup_and_teardown
candidate = create_job(
job_id="test_job",
resource_spec={},
deploy_map={"app5": []},
min_sites=1,
)
with servers[0].new_context() as fl_ctx:
job, dispatch_info = scheduler.schedule_job(
job_manager=job_manager, job_candidates=[candidate], fl_ctx=fl_ctx
)
assert job is None
def test_missing_deploy_map(self, setup_and_teardown):
servers, scheduler, num_sites, job_manager = setup_and_teardown
candidate = create_job(
job_id="test_job",
resource_spec={},
deploy_map=None,
min_sites=1,
)
with servers[0].new_context() as fl_ctx:
_, _ = scheduler.schedule_job(job_manager=job_manager, job_candidates=[candidate], fl_ctx=fl_ctx)
assert job_manager.set_status.called
assert job_manager.set_status.call_args[0][1] == RunStatus.FINISHED_CANT_SCHEDULE
def test_less_active_than_min(self, setup_and_teardown):
servers, scheduler, num_sites, job_manager = setup_and_teardown
candidate = create_job(
job_id="job",
resource_spec={},
deploy_map={"app5": [ALL_SITES]},
min_sites=num_sites + 1,
)
with servers[0].new_context() as fl_ctx:
job, dispatch_info = scheduler.schedule_job(
job_manager=job_manager, job_candidates=[candidate], fl_ctx=fl_ctx
)
assert job is None
def test_require_sites_not_active(self, setup_and_teardown):
servers, scheduler, num_sites, job_manager = setup_and_teardown
candidate = create_job(
job_id="job",
resource_spec={},
deploy_map={"app5": [ALL_SITES]},
min_sites=1,
required_sites=[f"site{num_sites}"],
)
with servers[0].new_context() as fl_ctx:
job, dispatch_info = scheduler.schedule_job(
job_manager=job_manager, job_candidates=[candidate], fl_ctx=fl_ctx
)
assert job is None
def test_require_sites_not_enough_resource(self, setup_and_teardown):
servers, scheduler, num_sites, job_manager = setup_and_teardown
candidate = create_job(
job_id="job",
resource_spec={"site2": create_resource(2, 2)},
deploy_map={"app5": [ALL_SITES]},
min_sites=1,
required_sites=["site2"],
)
with servers[0].new_context() as fl_ctx:
job, dispatch_info = scheduler.schedule_job(
job_manager=job_manager, job_candidates=[candidate], fl_ctx=fl_ctx
)
assert job is None
def test_not_enough_sites_has_enough_resource(self, setup_and_teardown):
servers, scheduler, num_sites, job_manager = setup_and_teardown
candidate = create_job(
job_id="job",
resource_spec={f"site{i}": create_resource(2, 2) for i in range(num_sites)},
deploy_map={"app5": [ALL_SITES]},
min_sites=2,
required_sites=[],
)
with servers[0].new_context() as fl_ctx:
job, dispatch_info = scheduler.schedule_job(
job_manager=job_manager, job_candidates=[candidate], fl_ctx=fl_ctx
)
assert job is None
@pytest.mark.parametrize("job_candidates,sites,expected_job,expected_dispatch_info", TEST_CASES)
def test_normal_case(self, job_candidates, sites, expected_job, expected_dispatch_info):
servers = create_servers(server_num=1, sites=sites)
scheduler = DefaultJobScheduler(max_jobs=10, min_schedule_interval=0)
job_manager = Mock(spec=JobDefManagerSpec)
with servers[0].new_context() as fl_ctx:
job, dispatch_info = scheduler.schedule_job(
job_manager=job_manager, job_candidates=job_candidates, fl_ctx=fl_ctx
)
assert job == expected_job
assert dispatch_info == expected_dispatch_info
@pytest.mark.parametrize("add_first_job", [True, False])
def test_a_list_of_jobs(self, add_first_job):
num_sites = 8
num_jobs = 5
max_jobs_allow = 4
resource_on_each_site = {"gpu": [0, 1]}
sites: Dict[str, Site] = {
f"site{i}": Site(
name=f"site{i}",
resources=resource_on_each_site,
resource_manager=ListResourceManager(resources=resource_on_each_site),
)
for i in range(num_sites)
}
first_job = create_jobs(
num_jobs=1,
prefix="weird_job",
resource_spec={"site0": {"gpu": 1}},
deploy_map={"app": ["server", "site0"]},
min_sites=1,
required_sites=["site0"],
meta={},
)
jobs = create_jobs(
num_jobs=num_jobs,
resource_spec={f"site{i}": {"gpu": 1} for i in range(num_sites)},
deploy_map={"app": ["server"] + [f"site{i}" for i in range(num_sites)]},
min_sites=num_sites,
required_sites=[f"site{i}" for i in range(num_sites)],
meta={},
)
if add_first_job:
jobs = first_job + jobs
servers = create_servers(server_num=1, sites=list(sites.values()))
scheduler = DefaultJobScheduler(max_jobs=max_jobs_allow, min_schedule_interval=0)
job_manager = Mock(spec=JobDefManagerSpec)
submitted_jobs = list(jobs)
results = []
for i in range(10):
with servers[0].new_context() as fl_ctx:
job, dispatch_infos = scheduler.schedule_job(
job_manager=job_manager, job_candidates=submitted_jobs, fl_ctx=fl_ctx
)
if job:
submitted_jobs.remove(job)
results.append(job)
for site_name, dispatch_info in dispatch_infos.items():
if site_name != "server":
sites[site_name].resource_manager.allocate_resources(
dispatch_info.resource_requirements, token=dispatch_info.token, fl_ctx=fl_ctx
)
assert results == [jobs[0], jobs[1]]
def test_failed_schedule_history(self, setup_and_teardown):
servers, scheduler, num_sites, job_manager = setup_and_teardown
candidate = create_job(
job_id="job",
resource_spec={},
deploy_map={"app5": [ALL_SITES]},
min_sites=num_sites + 1,
)
with servers[0].new_context() as fl_ctx:
_, _ = scheduler.schedule_job(job_manager=job_manager, job_candidates=[candidate], fl_ctx=fl_ctx)
assert candidate.meta[JobMetaKey.SCHEDULE_COUNT.value] == 1
assert "connected sites (3) < min_sites (4)" in candidate.meta[JobMetaKey.SCHEDULE_HISTORY.value][0]
def test_job_cannot_scheduled(self, setup_and_teardown):
servers, scheduler, num_sites, job_manager = setup_and_teardown
scheduler = DefaultJobScheduler(max_jobs=4, min_schedule_interval=0, max_schedule_count=2)
candidate = create_job(
job_id="job",
resource_spec={},
deploy_map={"app5": [ALL_SITES]},
min_sites=num_sites + 1,
)
for i in range(3):
with servers[0].new_context() as fl_ctx:
_, _ = scheduler.schedule_job(job_manager=job_manager, job_candidates=[candidate], fl_ctx=fl_ctx)
assert candidate.meta[JobMetaKey.SCHEDULE_COUNT.value] == 3
assert job_manager.set_status.call_args[0][1] == RunStatus.FINISHED_CANT_SCHEDULE
| NVFlare-main | tests/unit_test/app_common/job_schedulers/job_scheduler_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.apis.dxo import DXO, DataKind, from_shareable
from nvflare.app_common.abstract.fl_model import FLModel, FLModelConst, ParamsType
from nvflare.app_common.app_constant import AppConstants
from nvflare.app_common.utils.fl_model_utils import FLModelUtils
TEST_CASES = [
({"hello": 123}, 100, 1),
({"cool": 123, "very": 4}, 10, 0),
]
FL_MODEL_TEST_CASES = [
(FLModel(params={"hello": 123}, params_type=ParamsType.FULL, current_round=0, total_rounds=10), DataKind.WEIGHTS),
(
FLModel(params={"hello": 123}, params_type=ParamsType.DIFF, current_round=0, total_rounds=10),
DataKind.WEIGHT_DIFF,
),
(FLModel(metrics={"loss": 0.79}, current_round=0, total_rounds=10, params_type=None), DataKind.METRICS),
]
class TestFLModelUtils:
@pytest.mark.parametrize("weights,num_rounds,current_round", TEST_CASES)
def test_from_shareable(self, weights, num_rounds, current_round):
dxo = DXO(data_kind=DataKind.WEIGHTS, data=weights, meta={AppConstants.VALIDATE_TYPE: "before_train_validate"})
shareable = dxo.to_shareable()
shareable.set_header(AppConstants.NUM_ROUNDS, num_rounds)
shareable.set_header(AppConstants.CURRENT_ROUND, current_round)
fl_model = FLModelUtils.from_shareable(shareable=shareable)
assert fl_model.params == dxo.data
assert fl_model.params_type == ParamsType.FULL
assert fl_model.current_round == current_round
assert fl_model.total_rounds == num_rounds
@pytest.mark.parametrize("fl_model,expected_data_kind", FL_MODEL_TEST_CASES)
def test_to_shareable(self, fl_model, expected_data_kind):
shareable = FLModelUtils.to_shareable(fl_model)
dxo = from_shareable(shareable)
assert shareable.get_header(AppConstants.CURRENT_ROUND) == fl_model.current_round
assert shareable.get_header(AppConstants.NUM_ROUNDS) == fl_model.total_rounds
assert dxo.data_kind == expected_data_kind
if expected_data_kind == DataKind.METRICS:
assert dxo.data == fl_model.metrics
else:
assert dxo.data == fl_model.params
@pytest.mark.parametrize("weights,num_rounds,current_round", TEST_CASES)
def test_from_to_shareable(self, weights, num_rounds, current_round):
dxo = DXO(data_kind=DataKind.WEIGHTS, data=weights, meta={AppConstants.VALIDATE_TYPE: "before_train_validate"})
shareable = dxo.to_shareable()
shareable.set_header(AppConstants.NUM_ROUNDS, num_rounds)
shareable.set_header(AppConstants.CURRENT_ROUND, current_round)
fl_model = FLModelUtils.from_shareable(shareable=shareable)
result_shareable = FLModelUtils.to_shareable(fl_model)
assert shareable == result_shareable
@pytest.mark.parametrize("weights,num_rounds,current_round", TEST_CASES)
def test_from_dxo(self, weights, num_rounds, current_round):
dxo = DXO(
data_kind=DataKind.FL_MODEL,
data={
FLModelConst.PARAMS: weights,
FLModelConst.PARAMS_TYPE: ParamsType.FULL,
FLModelConst.TOTAL_ROUNDS: num_rounds,
FLModelConst.CURRENT_ROUND: current_round,
},
)
fl_model = FLModelUtils.from_dxo(dxo)
assert fl_model.params == weights
assert fl_model.params_type == ParamsType.FULL
assert fl_model.current_round == current_round
assert fl_model.total_rounds == num_rounds
@pytest.mark.parametrize("weights,num_rounds,current_round", TEST_CASES)
def test_to_dxo(self, weights, num_rounds, current_round):
fl_model = FLModel(
params=weights, params_type=ParamsType.FULL, current_round=current_round, total_rounds=num_rounds
)
dxo = FLModelUtils.to_dxo(fl_model)
assert dxo.data_kind == DataKind.FL_MODEL
assert dxo.data[FLModelConst.PARAMS] == weights
assert dxo.data[FLModelConst.PARAMS_TYPE] == ParamsType.FULL
assert dxo.data[FLModelConst.CURRENT_ROUND] == current_round
assert dxo.data[FLModelConst.TOTAL_ROUNDS] == num_rounds
| NVFlare-main | tests/unit_test/app_common/utils/fl_model_utils_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/utils/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
import time
from typing import Optional
import numpy as np
import pytest
from nvflare.apis.utils.decomposers import flare_decomposers
from nvflare.app_common.abstract.fl_model import FLModel, ParamsType
from nvflare.app_common.decomposers import common_decomposers
from nvflare.app_common.model_exchange.model_exchanger import ModelExchanger
from nvflare.fuel.utils.constants import Mode
from nvflare.fuel.utils.pipe.file_pipe import FilePipe
from nvflare.fuel.utils.pipe.pipe import Message
from nvflare.fuel.utils.pipe.pipe_handler import PipeHandler
TEST_CASES = [
{"a": 1, "b": 3},
{},
{"abc": [1, 2, 3], "d": [4, 5]},
{"abc": (1, 2, 3), "d": (4, 5)},
{"hello": b"a string", "cool": 6},
{f"layer{i}": np.random.rand(256, 256) for i in range(5)},
]
class TestModelExchanger:
@pytest.mark.parametrize("weights", TEST_CASES)
def test_put_get_fl_model_with_file_exchanger(self, weights):
fl_model = FLModel(params=weights, params_type=ParamsType.FULL)
test_pipe_name = "test_pipe"
test_topic = "test_topic"
flare_decomposers.register()
common_decomposers.register()
with tempfile.TemporaryDirectory() as root_dir:
send_pipe = FilePipe(Mode.ACTIVE, root_path=root_dir)
send_pipe.open(test_pipe_name)
pipe_handler = PipeHandler(send_pipe)
pipe_handler.start()
req = Message.new_request(topic=test_topic, data=fl_model)
_ = pipe_handler.send_to_peer(req)
recv_pipe = FilePipe(Mode.PASSIVE, root_path=root_dir)
y_mdx = ModelExchanger(pipe=recv_pipe, pipe_name=test_pipe_name, topic=test_topic)
result_model = y_mdx.receive_model()
for k, v in result_model.params.items():
np.testing.assert_array_equal(weights[k], v)
y_mdx.submit_model(result_model)
start_time = time.time()
receive_reply_model = None
while True:
if time.time() - start_time >= 50:
break
reply: Optional[Message] = pipe_handler.get_next()
if reply is not None and reply.topic == req.topic and req.msg_id == reply.req_id:
receive_reply_model = reply.data
break
time.sleep(0.1)
assert receive_reply_model is not None
for k, v in receive_reply_model.params.items():
np.testing.assert_array_equal(receive_reply_model.params[k], result_model.params[k])
pipe_handler.stop(close_pipe=True)
y_mdx.finalize()
| NVFlare-main | tests/unit_test/app_common/model_exchange/model_exchanger_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/model_exchange/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from collections import deque
import pytest
from nvflare.apis.event_type import EventType
from nvflare.apis.fl_context import FLContext, FLContextManager
from nvflare.app_common.resource_managers.list_resource_manager import ListResourceManager
class MockEngine:
def __init__(self, run_name="exp1"):
self.fl_ctx_mgr = FLContextManager(
engine=self,
identity_name="__mock_engine",
job_id=run_name,
public_stickers={},
private_stickers={},
)
def new_context(self):
return self.fl_ctx_mgr.new_context()
def fire_event(self, event_type: str, fl_ctx: FLContext):
pass
CHECK_TEST_CASES = [
({"gpu": [1, 2, 3, 4]}, {"gpu": 1}, True, {"gpu": [1]}),
({"gpu": [1, 2, 3, 4]}, {"gpu": 4}, True, {"gpu": [1, 2, 3, 4]}),
({"gpu": [1]}, {"gpu": 1}, True, {"gpu": [1]}),
({"gpu": [1], "cpu": [1, 2, 3, 4, 5]}, {"gpu": 1, "cpu": 3}, True, {"gpu": [1], "cpu": [1, 2, 3]}),
({"gpu": [1]}, {"gpu": 2}, False, {}),
({"gpu": [1, 2]}, {"gpu": 5}, False, {}),
({"gpu": [1, 2]}, {"cpu": 1}, False, {}),
]
TEST_CASES = [
({"gpu": [1, 2, 3, 4]}, {"gpu": 1}, {"gpu": [1]}),
({"gpu": [1, 2, 3, 4]}, {"gpu": 4}, {"gpu": [1, 2, 3, 4]}),
({"gpu": [1]}, {"gpu": 1}, {"gpu": [1]}),
({"gpu": [1], "cpu": [1, 2, 3, 4, 5]}, {"gpu": 1, "cpu": 3}, {"gpu": [1], "cpu": [1, 2, 3]}),
]
class TestListResourceManager:
@pytest.mark.parametrize(
"resources, resource_requirement, expected_check_result, expected_reserved_resources", CHECK_TEST_CASES
)
def test_check_resource(self, resources, resource_requirement, expected_check_result, expected_reserved_resources):
engine = MockEngine()
list_resource_manager = ListResourceManager(resources=resources)
with engine.new_context() as fl_ctx:
check_result, token = list_resource_manager.check_resources(
resource_requirement=resource_requirement, fl_ctx=fl_ctx
)
assert expected_check_result == check_result
if expected_check_result:
assert expected_reserved_resources == list_resource_manager.reserved_resources[token][0]
@pytest.mark.parametrize("resources, resource_requirement, expected_reserved_resources", TEST_CASES)
def test_cancel_resource(self, resources, resource_requirement, expected_reserved_resources):
engine = MockEngine()
list_resource_manager = ListResourceManager(resources=resources)
with engine.new_context() as fl_ctx:
_, token = list_resource_manager.check_resources(resource_requirement=resource_requirement, fl_ctx=fl_ctx)
assert expected_reserved_resources == list_resource_manager.reserved_resources[token][0]
with engine.new_context() as fl_ctx:
list_resource_manager.cancel_resources(
resource_requirement=resource_requirement, token=token, fl_ctx=fl_ctx
)
assert list_resource_manager.reserved_resources == {}
@pytest.mark.parametrize("resources, resource_requirement, expected_reserved_resources", TEST_CASES)
def test_allocate_resource(self, resources, resource_requirement, expected_reserved_resources):
engine = MockEngine()
list_resource_manager = ListResourceManager(resources=resources)
with engine.new_context() as fl_ctx:
_, token = list_resource_manager.check_resources(resource_requirement=resource_requirement, fl_ctx=fl_ctx)
assert expected_reserved_resources == list_resource_manager.reserved_resources[token][0]
with engine.new_context() as fl_ctx:
result = list_resource_manager.allocate_resources(
resource_requirement=resource_requirement, token=token, fl_ctx=fl_ctx
)
assert result == expected_reserved_resources
@pytest.mark.parametrize("resources, resource_requirement, expected_reserved_resources", TEST_CASES)
def test_free_resource(self, resources, resource_requirement, expected_reserved_resources):
engine = MockEngine()
list_resource_manager = ListResourceManager(resources=resources)
with engine.new_context() as fl_ctx:
check_result, token = list_resource_manager.check_resources(
resource_requirement=resource_requirement, fl_ctx=fl_ctx
)
assert expected_reserved_resources == list_resource_manager.reserved_resources[token][0]
with engine.new_context() as fl_ctx:
result = list_resource_manager.allocate_resources(
resource_requirement=resource_requirement, token=token, fl_ctx=fl_ctx
)
with engine.new_context() as fl_ctx:
list_resource_manager.free_resources(resources=result, token=token, fl_ctx=fl_ctx)
assert list_resource_manager.reserved_resources == {}
def test_check_one_check_two_then_allocate_two_allocate_one(self):
engine = MockEngine()
list_resource_manager = ListResourceManager(resources={"gpu": [f"gpu_{i}" for i in range(4)]})
resource_requirement = {"gpu": 1}
with engine.new_context() as fl_ctx:
check1, token1 = list_resource_manager.check_resources(
resource_requirement=resource_requirement, fl_ctx=fl_ctx
)
check2, token2 = list_resource_manager.check_resources(
resource_requirement=resource_requirement, fl_ctx=fl_ctx
)
with engine.new_context() as fl_ctx:
result = list_resource_manager.allocate_resources(
resource_requirement=resource_requirement, token=token2, fl_ctx=fl_ctx
)
assert result == {"gpu": ["gpu_1"]}
with engine.new_context() as fl_ctx:
result = list_resource_manager.allocate_resources(
resource_requirement=resource_requirement, token=token1, fl_ctx=fl_ctx
)
assert result == {"gpu": ["gpu_0"]}
def test_check_one_cancel_one_check_four_then_allocate_four(self):
engine = MockEngine()
list_resource_manager = ListResourceManager(resources={"gpu": [f"gpu_{i}" for i in range(4)]})
resource_requirement1 = {"gpu": 1}
resource_requirement2 = {"gpu": 4}
with engine.new_context() as fl_ctx:
check1, token1 = list_resource_manager.check_resources(
resource_requirement=resource_requirement1, fl_ctx=fl_ctx
)
with engine.new_context() as fl_ctx:
list_resource_manager.cancel_resources(
resource_requirement=resource_requirement1, token=token1, fl_ctx=fl_ctx
)
with engine.new_context() as fl_ctx:
check2, token2 = list_resource_manager.check_resources(
resource_requirement=resource_requirement2, fl_ctx=fl_ctx
)
result = list_resource_manager.allocate_resources(
resource_requirement=resource_requirement2, token=token2, fl_ctx=fl_ctx
)
assert result == {"gpu": ["gpu_0", "gpu_1", "gpu_2", "gpu_3"]}
def test_check_and_timeout(self):
timeout = 5
engine = MockEngine()
list_resource_manager = ListResourceManager(
resources={"gpu": [f"gpu_{i}" for i in range(4)]}, expiration_period=timeout
)
resource_requirement = {"gpu": 1}
with engine.new_context() as fl_ctx:
list_resource_manager.handle_event(event_type=EventType.SYSTEM_START, fl_ctx=fl_ctx)
check_result, token = list_resource_manager.check_resources(
resource_requirement=resource_requirement, fl_ctx=fl_ctx
)
assert {"gpu": ["gpu_0"]} == list_resource_manager.reserved_resources[token][0]
time.sleep(timeout + 1)
with engine.new_context() as fl_ctx:
list_resource_manager.handle_event(event_type=EventType.SYSTEM_END, fl_ctx=fl_ctx)
assert list_resource_manager.reserved_resources == {}
assert list_resource_manager.resources == {"gpu": deque(["gpu_0", "gpu_1", "gpu_2", "gpu_3"])}
| NVFlare-main | tests/unit_test/app_common/resource_managers/list_resource_manager_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from unittest.mock import patch
import pytest
from nvflare.apis.event_type import EventType
from nvflare.apis.fl_context import FLContext, FLContextManager
from nvflare.app_common.resource_managers.gpu_resource_manager import GPUResourceManager
NUM_GPU_KEY = "num_of_gpus"
GPU_MEM_KEY = "mem_per_gpu_in_GiB"
class MockEngine:
def __init__(self, run_name="exp1"):
self.fl_ctx_mgr = FLContextManager(
engine=self,
identity_name="__mock_engine",
job_id=run_name,
public_stickers={},
private_stickers={},
)
def new_context(self):
return self.fl_ctx_mgr.new_context()
def fire_event(self, event_type: str, fl_ctx: FLContext):
pass
def _gen_requirement(gpus, gpu_mem):
return {NUM_GPU_KEY: gpus, GPU_MEM_KEY: gpu_mem}
CHECK_TEST_CASES = [
(4, 16, _gen_requirement(1, 8), True, {0: 8}),
(4, 16, _gen_requirement(2, 8), True, {0: 8, 1: 8}),
]
TEST_CASES = [
(4, 16, _gen_requirement(1, 8), {0: 8}),
(4, 16, _gen_requirement(2, 8), {0: 8, 1: 8}),
]
@pytest.fixture(scope="class", autouse=True)
def mock_get_host_gpu_ids():
with patch("nvflare.app_common.resource_managers.gpu_resource_manager.get_host_gpu_ids") as _fixture:
_fixture.return_value = [0, 1, 2, 3]
yield _fixture
class TestGPUResourceManager:
@pytest.mark.parametrize(
"gpus, gpu_mem, resource_requirement, expected_check_result, expected_reserved_resources", CHECK_TEST_CASES
)
def test_check_resource(
self,
mock_get_host_gpu_ids,
gpus,
gpu_mem,
resource_requirement,
expected_check_result,
expected_reserved_resources,
):
engine = MockEngine()
gpu_resource_manager = GPUResourceManager(num_of_gpus=gpus, mem_per_gpu_in_GiB=gpu_mem)
with engine.new_context() as fl_ctx:
check_result, token = gpu_resource_manager.check_resources(
resource_requirement=resource_requirement, fl_ctx=fl_ctx
)
assert expected_check_result == check_result
if expected_check_result:
assert expected_reserved_resources == gpu_resource_manager.reserved_resources[token][0]
@pytest.mark.parametrize("gpus, gpu_mem, resource_requirement, expected_reserved_resources", TEST_CASES)
def test_cancel_resource(
self, mock_get_host_gpu_ids, gpus, gpu_mem, resource_requirement, expected_reserved_resources
):
engine = MockEngine()
gpu_resource_manager = GPUResourceManager(num_of_gpus=gpus, mem_per_gpu_in_GiB=gpu_mem)
with engine.new_context() as fl_ctx:
_, token = gpu_resource_manager.check_resources(resource_requirement=resource_requirement, fl_ctx=fl_ctx)
assert expected_reserved_resources == gpu_resource_manager.reserved_resources[token][0]
with engine.new_context() as fl_ctx:
gpu_resource_manager.cancel_resources(resource_requirement=resource_requirement, token=token, fl_ctx=fl_ctx)
assert gpu_resource_manager.reserved_resources == {}
@pytest.mark.parametrize("gpus, gpu_mem, resource_requirement, expected_reserved_resources", TEST_CASES)
def test_allocate_resource(
self, mock_get_host_gpu_ids, gpus, gpu_mem, resource_requirement, expected_reserved_resources
):
engine = MockEngine()
gpu_resource_manager = GPUResourceManager(num_of_gpus=gpus, mem_per_gpu_in_GiB=gpu_mem)
with engine.new_context() as fl_ctx:
_, token = gpu_resource_manager.check_resources(resource_requirement=resource_requirement, fl_ctx=fl_ctx)
assert expected_reserved_resources == gpu_resource_manager.reserved_resources[token][0]
with engine.new_context() as fl_ctx:
result = gpu_resource_manager.allocate_resources(
resource_requirement=resource_requirement, token=token, fl_ctx=fl_ctx
)
assert result == expected_reserved_resources
@pytest.mark.parametrize("gpus, gpu_mem, resource_requirement, expected_reserved_resources", TEST_CASES)
def test_free_resource(
self, mock_get_host_gpu_ids, gpus, gpu_mem, resource_requirement, expected_reserved_resources
):
engine = MockEngine()
gpu_resource_manager = GPUResourceManager(num_of_gpus=gpus, mem_per_gpu_in_GiB=gpu_mem)
with engine.new_context() as fl_ctx:
check_result, token = gpu_resource_manager.check_resources(
resource_requirement=resource_requirement, fl_ctx=fl_ctx
)
assert expected_reserved_resources == gpu_resource_manager.reserved_resources[token][0]
with engine.new_context() as fl_ctx:
result = gpu_resource_manager.allocate_resources(
resource_requirement=resource_requirement, token=token, fl_ctx=fl_ctx
)
with engine.new_context() as fl_ctx:
gpu_resource_manager.free_resources(resources=result, token=token, fl_ctx=fl_ctx)
assert gpu_resource_manager.reserved_resources == {}
def test_check_four_allocate_four(self, mock_get_host_gpu_ids):
engine = MockEngine()
gpu_resource_manager = GPUResourceManager(num_of_gpus=4, mem_per_gpu_in_GiB=16)
with engine.new_context() as fl_ctx:
check1, token1 = gpu_resource_manager.check_resources(
resource_requirement=_gen_requirement(1, 8), fl_ctx=fl_ctx
)
check2, token2 = gpu_resource_manager.check_resources(
resource_requirement=_gen_requirement(1, 8), fl_ctx=fl_ctx
)
check3, token3 = gpu_resource_manager.check_resources(
resource_requirement=_gen_requirement(1, 12), fl_ctx=fl_ctx
)
check4, token4 = gpu_resource_manager.check_resources(
resource_requirement=_gen_requirement(1, 12), fl_ctx=fl_ctx
)
with engine.new_context() as fl_ctx:
result = gpu_resource_manager.allocate_resources(
resource_requirement=_gen_requirement(1, 8), token=token2, fl_ctx=fl_ctx
)
assert result == {0: 8}
with engine.new_context() as fl_ctx:
result = gpu_resource_manager.allocate_resources(
resource_requirement=_gen_requirement(1, 8), token=token1, fl_ctx=fl_ctx
)
assert result == {0: 8}
with engine.new_context() as fl_ctx:
result = gpu_resource_manager.allocate_resources(
resource_requirement=_gen_requirement(1, 12), token=token3, fl_ctx=fl_ctx
)
assert result == {1: 12}
with engine.new_context() as fl_ctx:
result = gpu_resource_manager.allocate_resources(
resource_requirement=_gen_requirement(1, 12), token=token4, fl_ctx=fl_ctx
)
assert result == {2: 12}
def test_check_one_cancel_one_check_four_then_allocate_four(self):
engine = MockEngine()
gpu_resource_manager = GPUResourceManager(num_of_gpus=4, mem_per_gpu_in_GiB=16)
resource_requirement1 = _gen_requirement(1, 8)
resource_requirement2 = _gen_requirement(4, 8)
with engine.new_context() as fl_ctx:
check1, token1 = gpu_resource_manager.check_resources(
resource_requirement=resource_requirement1, fl_ctx=fl_ctx
)
with engine.new_context() as fl_ctx:
gpu_resource_manager.cancel_resources(
resource_requirement=resource_requirement1, token=token1, fl_ctx=fl_ctx
)
with engine.new_context() as fl_ctx:
check2, token2 = gpu_resource_manager.check_resources(
resource_requirement=resource_requirement2, fl_ctx=fl_ctx
)
result = gpu_resource_manager.allocate_resources(
resource_requirement=resource_requirement2, token=token2, fl_ctx=fl_ctx
)
assert result == {0: 8, 1: 8, 2: 8, 3: 8}
def test_check_and_timeout(self):
timeout = 5
engine = MockEngine()
gpu_resource_manager = GPUResourceManager(num_of_gpus=4, mem_per_gpu_in_GiB=16, expiration_period=timeout)
resource_requirement = _gen_requirement(1, 8)
with engine.new_context() as fl_ctx:
gpu_resource_manager.handle_event(event_type=EventType.SYSTEM_START, fl_ctx=fl_ctx)
check_result, token = gpu_resource_manager.check_resources(
resource_requirement=resource_requirement, fl_ctx=fl_ctx
)
assert {0: 8} == gpu_resource_manager.reserved_resources[token][0]
time.sleep(timeout + 1)
with engine.new_context() as fl_ctx:
gpu_resource_manager.handle_event(event_type=EventType.SYSTEM_END, fl_ctx=fl_ctx)
assert gpu_resource_manager.reserved_resources == {}
for r, v in gpu_resource_manager.resources.items():
assert v.memory == 16
| NVFlare-main | tests/unit_test/app_common/resource_managers/gpu_resource_manager_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/executors/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/executors/statistics/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, List
import pytest
from nvflare.apis.fl_context import FLContext
from nvflare.apis.shareable import Shareable
from nvflare.app_common.abstract.statistics_spec import Feature, HistogramType, StatisticConfig
from nvflare.app_common.executors.statistics.statistics_task_handler import StatisticsTaskHandler
from tests.unit_test.app_common.executors.statistics.mock_df_stats_executor import MockDFStatistics
class MockStatsExecutor(StatisticsTaskHandler):
def __init__(self):
super().__init__(generator_id="")
self.stats_generator = None
def initialize(self, fl_ctx: FLContext):
self.stats_generator = MockDFStatistics(data_path="")
self.stats_generator.initialize(None)
class TestStatisticsExecutor:
@classmethod
def setup_class(cls):
print("starting class: {} execution".format(cls.__name__))
cls.stats_executor = MockStatsExecutor()
cls.stats_executor.initialize(None)
def test_get_numeric_features(self):
features: Dict[str, List[Feature]] = self.stats_executor.get_numeric_features()
assert len(features["train"]) == 1
assert features["train"][0].feature_name == "Age"
assert len(features["test"]) == 1
def test_method_implementation(self):
with pytest.raises(NotImplementedError):
r = self.stats_executor.get_sum("train", "Age", StatisticConfig("sum", {}), None, None)
def test_histogram_num_of_bins(self):
hist_config = {"Age": {"bins": 5}}
print(hist_config["Age"]["bins"])
bins = self.stats_executor.get_number_of_bins("Age", hist_config)
assert bins == 5
hist_config = {"*": {"bins": 5}}
bins = self.stats_executor.get_number_of_bins("Age", hist_config)
assert bins == 5
hist_config = {"Age": {"bins": 6}, "*": {"bins": 10}}
bins = self.stats_executor.get_number_of_bins("Edu", hist_config)
assert bins == 10
bins = self.stats_executor.get_number_of_bins("Age", hist_config)
assert bins == 6
with pytest.raises(Exception) as e:
hist_config = {}
bins = self.stats_executor.get_number_of_bins("Age", hist_config)
assert str(e.value) == "feature name = 'Age': missing required 'bins' config in histogram config = {}"
with pytest.raises(Exception) as e:
hist_config = {"Age": {"bin": 5}}
bins = self.stats_executor.get_number_of_bins("Age", hist_config)
assert (
str(e.value)
== "feature name = 'Age': missing required 'bins' config in histogram config = {'Age': {'bin': 5}}"
)
def test_histogram_bin_range(self):
hist_config = {"Age": {"bins": 5, "range": [0, 120]}}
bin_range = self.stats_executor.get_bin_range("Age", 0, 100, hist_config)
assert bin_range == [0, 120]
hist_config = {"*": {"bins": 5, "range": [0, 120]}}
bin_range = self.stats_executor.get_bin_range("Age", 0, 50, hist_config)
assert bin_range == [0, 120]
hist_config = {"*": {"bins": 5}}
bin_range = self.stats_executor.get_bin_range("Age", 0, 50, hist_config)
assert bin_range == [0, 50]
hist_config = {"*": {"bins": 5}, "Age": {"bins": 10}}
bin_range = self.stats_executor.get_bin_range("Age", 0, 50, hist_config)
assert bin_range == [0, 50]
def test_histogram(self):
hist_config = {"*": {"bins": 3}}
inputs = Shareable()
inputs["min"] = {"train": {"Age": 0}}
inputs["max"] = {"train": {"Age": 50}}
inputs["statistic_config"] = hist_config
statistic_config = StatisticConfig("histogram", hist_config)
histogram = self.stats_executor.get_histogram("train", "Age", statistic_config, inputs, None)
assert histogram.hist_type == HistogramType.STANDARD
assert len(histogram.bins) == 3
| NVFlare-main | tests/unit_test/app_common/executors/statistics/statistics_executor_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, List, Optional
import numpy as np
import pandas as pd
from pandas.core.series import Series
from nvflare.apis.fl_context import FLContext
from nvflare.app_common.abstract.statistics_spec import (
BinRange,
DataType,
Feature,
Histogram,
HistogramType,
Statistics,
)
from nvflare.app_common.statistics.numpy_utils import get_std_histogram_buckets
def load_data() -> Dict[str, pd.DataFrame]:
try:
train_data = [["tom", 10], ["nick", 15], ["juli", 14], ["tom2", 10], ["nick1", 25], ["juli1", 24]]
test_data = [["john", 100], ["mary", 25], ["rose", 34], ["tom1", 20], ["nick2", 35], ["juli1", 34]]
train = pd.DataFrame(train_data, columns=["Name", "Age"])
test = pd.DataFrame(test_data, columns=["Name", "Age"])
return {"train": train, "test": test}
except Exception as e:
raise Exception(f"Load data failed! {e}")
class MockDFStatistics(Statistics):
def __init__(self, data_path):
super().__init__()
self.data_path = data_path
self.data: Optional[Dict[str, pd.DataFrame]] = None
def initialize(self, fl_ctx: FLContext):
self.data = load_data()
if self.data is None:
raise ValueError("data is not loaded. make sure the data is loaded")
def features(self) -> Dict[str, List[Feature]]:
features = [Feature("Name", DataType.STRING), Feature("Age", DataType.INT)]
results: Dict[str, List[Feature]] = {"train": features, "test": features}
return results
def count(self, dataset_name: str, feature_name: str) -> int:
df: pd.DataFrame = self.data[dataset_name]
return df[feature_name].count()
def sum(self, dataset_name: str, feature_name: str) -> float:
raise NotImplementedError
def mean(self, dataset_name: str, feature_name: str) -> float:
count: int = self.count(dataset_name, feature_name)
sum_value: float = self.sum(dataset_name, feature_name)
return sum_value / count
def stddev(self, dataset_name: str, feature_name: str) -> float:
raise NotImplementedError
def variance_with_mean(
self, dataset_name: str, feature_name: str, global_mean: float, global_count: float
) -> float:
raise NotImplementedError
def histogram(
self, dataset_name: str, feature_name: str, num_of_bins: int, global_min_value: float, global_max_value: float
) -> Histogram:
num_of_bins: int = num_of_bins
df = self.data[dataset_name]
feature: Series = df[feature_name]
flattened = feature.ravel()
flattened = flattened[flattened != np.array(None)]
buckets = get_std_histogram_buckets(flattened, num_of_bins, BinRange(global_min_value, global_max_value))
return Histogram(HistogramType.STANDARD, buckets)
def max_value(self, dataset_name: str, feature_name: str) -> float:
"""this is needed for histogram global max estimation, not used for reporting"""
df = self.data[dataset_name]
return df[feature_name].max()
def min_value(self, dataset_name: str, feature_name: str) -> float:
"""this is needed for histogram global min estimation, not used for reporting"""
df = self.data[dataset_name]
return df[feature_name].min()
| NVFlare-main | tests/unit_test/app_common/executors/statistics/mock_df_stats_executor.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock
from nvflare.apis.fl_constant import ReturnCode
from tests.unit_test.app_common.workflow.mock_common_controller import MockController
class TestCommonController:
def test_handle_client_errors(self):
mock_client_task_result = Mock()
mock_client_task_result.get_return_code = ReturnCode.EXECUTION_EXCEPTION
ctr = MockController(mock_client_task_result)
ctr.control_flow(fl_ctx=ctr.fl_ctx)
# todo: how to register a event listener and listen to the system_panic event
| NVFlare-main | tests/unit_test/app_common/workflow/common_controller_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict
from nvflare.apis.client import Client
from nvflare.apis.fl_context import FLContext
from nvflare.apis.impl.controller import ClientTask
from nvflare.apis.shareable import Shareable
from nvflare.apis.signal import Signal
from nvflare.app_common.workflows.statistics_controller import StatisticsController
class MockStatisticsController(StatisticsController):
def __init__(self, statistic_configs: Dict[str, dict], writer_id: str):
super(MockStatisticsController, self).__init__(statistic_configs, writer_id)
def control_flow(self, abort_signal: Signal, fl_ctx: FLContext):
pass
def start_controller(self, fl_ctx: FLContext):
pass
def stop_controller(self, fl_ctx: FLContext):
pass
def process_result_of_unknown_task(
self, client: Client, task_name: str, client_task_id: str, result: Shareable, fl_ctx: FLContext
):
pass
def statistics_task_flow(self, abort_signal: Signal, fl_ctx: FLContext, statistic_task: str):
pass
def results_cb(self, client_task: ClientTask, fl_ctx: FLContext):
pass
def post_fn(self, task_name: str, fl_ctx: FLContext):
pass
| NVFlare-main | tests/unit_test/app_common/workflow/mock_statistics_controller.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/workflow/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
from nvflare.app_common.abstract.statistics_spec import StatisticConfig
from nvflare.app_common.app_constant import StatisticsConstants as SC
from nvflare.app_common.workflows.statistics_controller import StatisticsController
from nvflare.fuel.utils import fobs
from .mock_statistics_controller import MockStatisticsController
class TestStatisticsController:
@classmethod
def setup_class(cls):
print("starting class: {} execution".format(cls.__name__))
statistic_configs = {
"count": {},
"mean": {},
"sum": {},
"stddev": {},
"histogram": {"*": {"bins": 10}, "Age": {"bins": 5, "range": [0, 120]}},
}
cls.stats_controller = MockStatisticsController(statistic_configs=statistic_configs, writer_id="")
def test_target_statistics(self):
target_statistics: List[StatisticConfig] = StatisticsController._get_target_statistics(
self.stats_controller.statistic_configs, SC.ordered_statistics[SC.STATS_1st_STATISTICS]
)
for mc in target_statistics:
assert mc.name in SC.ordered_statistics[SC.STATS_1st_STATISTICS]
if mc.name not in [SC.STATS_MAX, SC.STATS_MIN]:
assert mc.config == {}
else:
assert mc.config == {"*": {"bins": 10}, "Age": {"bins": 5, "range": [0, 120]}}
target_statistics: List[StatisticConfig] = StatisticsController._get_target_statistics(
self.stats_controller.statistic_configs, SC.ordered_statistics[SC.STATS_2nd_STATISTICS]
)
for mc in target_statistics:
assert mc.name in SC.ordered_statistics[SC.STATS_2nd_STATISTICS]
if mc.name not in [SC.STATS_HISTOGRAM]:
assert mc.config == {}
else:
assert mc.config == {"*": {"bins": 10}, "Age": {"bins": 5, "range": [0, 120]}}
def test_wait_for_all_results(self):
# waiting for 1 more client
client_statistics = {
"count": {"site-1": {}},
"mean": {"site-2": {}},
"sum": {"site-3": {}},
"stddev": {"site-4": {}},
}
import time
t0 = time.time()
StatisticsController._wait_for_all_results(self.stats_controller.logger, 0.5, 3, client_statistics, 0.1)
t = time.time()
second_spent = t - t0
# for 4 statistic, each have 0.5 second timeout
assert second_spent > 0.5 * 4
def test_prepare_input(self):
xs = self.stats_controller._prepare_inputs(SC.STATS_1st_STATISTICS)
assert xs[SC.STATISTICS_TASK_KEY] == SC.STATS_1st_STATISTICS
seq = StatisticsController._get_target_statistics(
self.stats_controller.statistic_configs, SC.ordered_statistics[SC.STATS_1st_STATISTICS]
)
rhs = [mc.name for mc in seq]
rhs.sort()
target_statistics: List[StatisticConfig] = fobs.loads(xs[SC.STATS_TARGET_STATISTICS])
lhs = [mc.name for mc in target_statistics]
lhs.sort()
assert lhs == rhs
# simulate aggregation and populate the global results
self.stats_controller.global_statistics[SC.STATS_COUNT] = {"train": {"Age": 100}, "test": {"Age": 10}}
self.stats_controller.global_statistics[SC.STATS_MEAN] = {"train": {"Age": 25}, "test": {"Age": 30}}
self.stats_controller.global_statistics[SC.STATS_MAX] = {"train": {"Age": 120}, "test": {"Age": 120}}
self.stats_controller.global_statistics[SC.STATS_MIN] = {"train": {"Age": 0}, "test": {"Age": 0}}
assert self.stats_controller.global_statistics != {}
xs = self.stats_controller._prepare_inputs(SC.STATS_2nd_STATISTICS)
assert xs[SC.STATISTICS_TASK_KEY] == SC.STATS_2nd_STATISTICS
rhs = SC.ordered_statistics[SC.STATS_2nd_STATISTICS]
rhs.sort()
target_statistics: List[StatisticConfig] = fobs.loads(xs[SC.STATS_TARGET_STATISTICS])
lhs = [mc.name for mc in target_statistics]
lhs.sort()
assert lhs == rhs
def test_validate_min_clients(self):
# waiting for 1 more client
client_statistics = {
"count": {"site-1": {}},
"mean": {"site-2": {}},
"sum": {"site-3": {}},
"stddev": {"site-4": {}},
}
assert not self.stats_controller._validate_min_clients(5, client_statistics)
# waiting for 1 more client
client_statistics = {
"count": {"site-1": {"train": {}}},
"mean": {"site-2": {"train": {}}},
"sum": {"site-3": {"train": {}}},
"stddev": {"site-4": {"train": {}}},
}
assert not self.stats_controller._validate_min_clients(5, client_statistics)
| NVFlare-main | tests/unit_test/app_common/workflow/statistics_controller_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import patch
from nvflare.apis.client import Client
from nvflare.apis.controller_spec import ClientTask, Task
from nvflare.apis.dxo import from_shareable
from nvflare.apis.fl_constant import ReturnCode
from nvflare.apis.fl_context import FLContext
from nvflare.apis.shareable import Shareable
from nvflare.apis.signal import Signal
from nvflare.app_common.workflows.error_handling_controller import ErrorHandlingController
class MockController(ErrorHandlingController):
def __init__(self, mock_client_task_result):
super().__init__()
self.fl_ctx = FLContext()
self.task_name = "MockTask"
self.mock_client_task_result = mock_client_task_result
@patch.object(ErrorHandlingController, "broadcast_and_wait")
def control_flow(self, abort_signal: Signal, fl_ctx: FLContext):
self.log_info(fl_ctx, f"{self.task_name} control flow started.")
task_input = Shareable()
task = Task(name=self.task_name, data=task_input, result_received_cb=self.results_cb)
self.broadcast_and_wait(task, self.fl_ctx, ["no_where"], 0, 0)
self.log_info(fl_ctx, f"task {self.task_name} control flow end.")
def start_controller(self, fl_ctx: FLContext):
pass
def stop_controller(self, fl_ctx: FLContext):
pass
def process_result_of_unknown_task(
self, client: Client, task_name: str, client_task_id: str, result: Shareable, fl_ctx: FLContext
):
pass
def results_cb(self, client_task: ClientTask, fl_ctx: FLContext):
client_name = client_task.client.name
client_task.result = self.mock_client_task_result
result = client_task.result
rc = result.get_return_code()
if rc == ReturnCode.OK:
dxo = from_shareable(result)
self.update_result(client_name, dxo)
else:
if rc in self.controller.abort_job_in_error.keys():
self.controller.handle_client_errors(rc, client_task, fl_ctx)
# Cleanup task result
client_task.result = None
| NVFlare-main | tests/unit_test/app_common/workflow/mock_common_controller.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvflare.apis.dxo import DXO, DataKind
from nvflare.apis.fl_context import FLContext
from nvflare.apis.signal import Signal
from nvflare.app_common.launchers.subprocess_launcher import SubprocessLauncher
class TestSubprocessLauncher:
def test_launch(self):
task_name = "__test_task"
launcher = SubprocessLauncher("echo 'test'")
dxo = DXO(DataKind.WEIGHTS, {})
fl_ctx = FLContext()
signal = Signal()
status = launcher.launch_task(task_name, dxo.to_shareable(), fl_ctx, signal)
assert status is True
def test_stop(self):
task_name = "__test_task"
launcher = SubprocessLauncher("python -c \"for i in range(1000000): print('cool')\"")
dxo = DXO(DataKind.WEIGHTS, {})
fl_ctx = FLContext()
signal = Signal()
status = launcher.launch_task(task_name, dxo.to_shareable(), fl_ctx, signal)
assert status is True
launcher.stop_task(task_name, fl_ctx)
assert launcher._process is None
| NVFlare-main | tests/unit_test/app_common/launchers/subprocess_launcher_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/launchers/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/psi/dh_psi/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from nvflare.apis.dxo import DXO, DataKind
from nvflare.app_common.app_constant import PSIConst
from nvflare.app_common.psi.dh_psi.dh_psi_workflow import DhPSIWorkFlow
class TestDhPSIWorkflow:
def test_get_ordered_sites(self):
wf = DhPSIWorkFlow()
data_1 = {PSIConst.ITEMS_SIZE: 1000}
data_2 = {PSIConst.ITEMS_SIZE: 500}
data_3 = {PSIConst.ITEMS_SIZE: 667}
dxo_1 = DXO(data_kind=DataKind.PSI, data=data_1)
dxo_2 = DXO(data_kind=DataKind.PSI, data=data_2)
dxo_3 = DXO(data_kind=DataKind.PSI, data=data_3)
results = {"site-1": dxo_1, "site-2": dxo_2, "site-3": dxo_3}
ordered_sites = wf.get_ordered_sites(results=results)
assert ordered_sites[0].size <= ordered_sites[1].size
assert ordered_sites[1].size <= ordered_sites[2].size
| NVFlare-main | tests/unit_test/app_common/psi/dh_psi/dh_psi_workflow_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.apis.dxo import DXO, DataKind, MetaKey
from nvflare.apis.fl_constant import FLContextKey
from nvflare.apis.fl_context import FLContext, FLContextManager
from nvflare.app_common.app_constant import AppConstants
from nvflare.app_common.app_event_type import AppEventType
from nvflare.app_common.widgets.intime_model_selector import IntimeModelSelector
class MockSimpleEngine:
def __init__(self, job_id="unit_test"):
self.fl_ctx_mgr = FLContextManager(
engine=self,
identity_name="__mock_simple_engine",
job_id=job_id,
public_stickers={},
private_stickers={},
)
self.last_event = None
def new_context(self):
return self.fl_ctx_mgr.new_context()
def fire_event(self, event_type: str, fl_ctx: FLContext):
self.last_event = event_type
return True
class TestInTimeModelSelector:
@pytest.mark.parametrize(
"initial,received,expected",
[
(
1,
{
"client1": {"weight": 0.5, "iter_number": 1, "metric": 10},
},
True,
),
(
1,
{
"client1": {"weight": 0.5, "iter_number": 1, "metric": 1},
"client2": {"weight": 0.5, "iter_number": 1, "metric": 0.2},
},
False,
),
],
)
def test_model_selection(self, initial, received, expected):
aggregation_weights = {k: v["weight"] for k, v in received.items()}
handler = IntimeModelSelector(aggregation_weights=aggregation_weights)
handler.best_val_metric = initial
engine = MockSimpleEngine()
fl_ctx = engine.fl_ctx_mgr.new_context()
for k, v in received.items():
peer_ctx = FLContext()
peer_ctx.set_prop(FLContextKey.CLIENT_NAME, k, private=False)
dxo = DXO(
DataKind.WEIGHT_DIFF,
data=dict(),
meta={
MetaKey.INITIAL_METRICS: v["metric"],
MetaKey.NUM_STEPS_CURRENT_ROUND: v["iter_number"],
AppConstants.CURRENT_ROUND: 10,
},
)
peer_ctx.set_prop(FLContextKey.SHAREABLE, dxo.to_shareable(), private=True)
fl_ctx = engine.fl_ctx_mgr.new_context()
fl_ctx.set_prop(FLContextKey.PEER_CONTEXT, peer_ctx)
handler.handle_event(AppEventType.BEFORE_CONTRIBUTION_ACCEPT, fl_ctx)
handler.handle_event(AppEventType.BEFORE_AGGREGATION, fl_ctx)
assert (engine.last_event == AppEventType.GLOBAL_BEST_MODEL_AVAILABLE) == expected
| NVFlare-main | tests/unit_test/app_common/widgets/in_time_model_selector_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional
import pytest
from nvflare.apis.analytix import AnalyticsDataType
from nvflare.apis.dxo import DXO, DataKind
from nvflare.apis.fl_component import FLComponent
from nvflare.apis.fl_context import FLContext
from nvflare.app_common.tracking.tracker_types import LogWriterName, TrackConst
from nvflare.app_common.widgets.streaming import create_analytic_dxo, send_analytic_dxo
INVALID_TEST_CASES = [
(list(), dict(), FLContext(), TypeError, f"expect comp to be an instance of FLComponent, but got {type(list())}"),
(FLComponent(), dict(), FLContext(), TypeError, f"expect dxo to be an instance of DXO, but got {type(dict())}"),
(
FLComponent(),
DXO(data={"k": "v"}, data_kind=DataKind.ANALYTIC),
list(),
TypeError,
f"expect fl_ctx to be an instance of FLContext, but got {type(list())}",
),
]
INVALID_WRITE_TEST_CASES = [
(
list(),
1.0,
1,
AnalyticsDataType.SCALAR,
TypeError,
f"expect tag to be an instance of str, but got {type(list())}",
),
(
"tag",
list(),
2,
AnalyticsDataType.SCALAR,
TypeError,
f"expect 'tag' value to be an instance of float, but got '{type(list())}'",
),
(
list(),
1.0,
2,
AnalyticsDataType.SCALARS,
TypeError,
f"expect tag to be an instance of str, but got {type(list())}",
),
(
"tag",
1.0,
3,
AnalyticsDataType.SCALARS,
TypeError,
f"expect 'tag' value to be an instance of dict, but got '{type(1.0)}'",
),
(list(), 1.0, 4, AnalyticsDataType.TEXT, TypeError, f"expect tag to be an instance of str, but got {type(list())}"),
(
"tag",
1.0,
5,
AnalyticsDataType.TEXT,
TypeError,
f"expect 'tag' value to be an instance of str, but got '{type(1.0)}'",
),
(
list(),
1.0,
6,
AnalyticsDataType.IMAGE,
TypeError,
f"expect tag to be an instance of str, but got {type(list())}",
),
]
class TestStreaming:
@pytest.mark.parametrize("comp,dxo,fl_ctx,expected_error,expected_msg", INVALID_TEST_CASES)
def test_invalid_send_analytic_dxo(self, comp, dxo, fl_ctx, expected_error, expected_msg):
with pytest.raises(expected_error, match=expected_msg):
send_analytic_dxo(comp=comp, dxo=dxo, fl_ctx=fl_ctx)
@pytest.mark.parametrize("tag,value,step, data_type,expected_error,expected_msg", INVALID_WRITE_TEST_CASES)
def test_invalid_write_func(self, tag, value, step, data_type, expected_error, expected_msg):
with pytest.raises(expected_error, match=expected_msg):
create_analytic_dxo(tag=tag, value=value, data_type=data_type, step=step, writer=LogWriterName.TORCH_TB)
def mock_add(tag: str, value, data_type: AnalyticsDataType, global_step: Optional[int] = None, **kwargs):
# This mock_add tests writer behavior for MLflow and WandB too,
# but to keep the signature of the func, we use writer=LogWriterName.TORCH_TB which shows up in expected_dxo_meta
kwargs = kwargs if kwargs else {}
if global_step is not None:
if not isinstance(global_step, int):
raise TypeError(f"Expect global step to be an instance of int, but got {type(global_step)}")
kwargs[TrackConst.GLOBAL_STEP_KEY] = global_step
dxo = create_analytic_dxo(tag=tag, value=value, data_type=data_type, writer=LogWriterName.TORCH_TB, **kwargs)
return dxo
ANALYTICS_SENDER_TEST_CASES = [
(
"text",
"textsample",
AnalyticsDataType.TEXT,
None,
{},
"ANALYTIC",
{"track_key": "text", "track_value": "textsample"},
{"analytics_data_type": AnalyticsDataType.TEXT, "tracker_key": LogWriterName.TORCH_TB},
),
(
"text",
"textsample",
AnalyticsDataType.TEXT,
2,
{},
"ANALYTIC",
{"track_key": "text", "track_value": "textsample", "global_step": 2, "analytics_kwargs": {"global_step": 2}},
{"analytics_data_type": AnalyticsDataType.TEXT, "tracker_key": LogWriterName.TORCH_TB},
),
(
"text",
"textsample",
AnalyticsDataType.TEXT,
3,
{"extra_arg": 4},
"ANALYTIC",
{
"track_key": "text",
"track_value": "textsample",
"global_step": 3,
"analytics_kwargs": {"global_step": 3, "extra_arg": 4},
},
{"analytics_data_type": AnalyticsDataType.TEXT, "tracker_key": LogWriterName.TORCH_TB},
),
(
"set_tag_key_tag_name",
"tagvalue",
AnalyticsDataType.TAG,
None,
{},
"ANALYTIC",
{"track_key": "set_tag_key_tag_name", "track_value": "tagvalue"},
{"analytics_data_type": AnalyticsDataType.TAG, "tracker_key": LogWriterName.TORCH_TB},
),
(
"log_metric_key_name",
2.4,
AnalyticsDataType.METRIC,
20,
{},
"ANALYTIC",
{
"track_key": "log_metric_key_name",
"track_value": 2.4,
"global_step": 20,
"analytics_kwargs": {"global_step": 20},
},
{"analytics_data_type": AnalyticsDataType.METRIC, "tracker_key": LogWriterName.TORCH_TB},
),
( # for WandBWriter
"metrics",
{"train_loss": 2.4},
AnalyticsDataType.METRICS,
20,
{},
"ANALYTIC",
{
"track_key": "metrics",
"track_value": {"train_loss": 2.4},
"global_step": 20,
"analytics_kwargs": {"global_step": 20},
},
{"analytics_data_type": AnalyticsDataType.METRICS, "tracker_key": LogWriterName.TORCH_TB},
),
]
INVALID_SENDER_TEST_CASES = [
(
"text",
"textsample",
AnalyticsDataType.TEXT,
None,
{"global_step": 3, "extra_arg": 4},
TypeError,
"got multiple values for keyword argument 'global_step'",
),
]
class TestAnalyticsSender:
@pytest.mark.parametrize(
"tag,value,data_type,global_step,kwargs,expected_dxo_data_kind,expected_dxo_data,expected_dxo_meta",
ANALYTICS_SENDER_TEST_CASES,
)
def test_add(
self, tag, value, data_type, global_step, kwargs, expected_dxo_data_kind, expected_dxo_data, expected_dxo_meta
):
dxo = mock_add(tag=tag, value=value, data_type=data_type, global_step=global_step, **kwargs)
assert dxo.data_kind == expected_dxo_data_kind
assert dxo.data == expected_dxo_data
assert dxo.meta == expected_dxo_meta
# Since global_step is already being set, it cannot also be in kwargs.
@pytest.mark.parametrize(
"tag,value,data_type,global_step,kwargs,expected_error,expected_msg",
INVALID_SENDER_TEST_CASES,
)
def test_add_invalid(self, tag, value, data_type, global_step, kwargs, expected_error, expected_msg):
with pytest.raises(expected_error, match=expected_msg):
dxo = mock_add(tag=tag, value=value, data_type=data_type, global_step=global_step, **kwargs)
| NVFlare-main | tests/unit_test/app_common/widgets/streaming_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/app_common/widgets/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import os
import shutil
import tempfile
import pytest
from cryptography import x509
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.x509.oid import NameOID
from nvflare.lighter.impl.cert import serialize_cert
from nvflare.lighter.utils import sign_folders, verify_folder_signature
folders = ["folder1", "folder2"]
files = ["file1", "file2"]
def generate_cert(subject, subject_org, issuer, signing_pri_key, subject_pub_key, valid_days=360, ca=False):
def _x509_name(cn_name, org_name):
name = [
x509.NameAttribute(NameOID.COMMON_NAME, cn_name),
x509.NameAttribute(NameOID.ORGANIZATION_NAME, org_name),
]
return x509.Name(name)
x509_subject = _x509_name(subject, subject_org)
x509_issuer = _x509_name(issuer, "ORG")
builder = (
x509.CertificateBuilder()
.subject_name(x509_subject)
.issuer_name(x509_issuer)
.public_key(subject_pub_key)
.serial_number(x509.random_serial_number())
.not_valid_before(datetime.datetime.utcnow())
.not_valid_after(
# Our certificate will be valid for 360 days
datetime.datetime.utcnow()
+ datetime.timedelta(days=valid_days)
# Sign our certificate with our private key
)
.add_extension(x509.SubjectAlternativeName([x509.DNSName(subject)]), critical=False)
)
if ca:
builder = (
builder.add_extension(
x509.SubjectKeyIdentifier.from_public_key(subject_pub_key),
critical=False,
)
.add_extension(
x509.AuthorityKeyIdentifier.from_issuer_public_key(subject_pub_key),
critical=False,
)
.add_extension(x509.BasicConstraints(ca=True, path_length=None), critical=False)
)
return builder.sign(signing_pri_key, hashes.SHA256(), default_backend())
def get_test_certs():
root_pri_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend())
root_pub_key = root_pri_key.public_key()
root_cert = generate_cert("root", "nvidia", "root", root_pri_key, root_pub_key, ca=True)
client_pri_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend())
client_pub_key = client_pri_key.public_key()
client_cert = generate_cert("client", "nvidia", "root", root_pri_key, client_pub_key)
server_pri_key = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend())
server_pub_key = server_pri_key.public_key()
server_cert = generate_cert("client", "nvidia", "root", root_pri_key, server_pub_key)
return root_cert, client_pri_key, client_cert, server_pri_key, server_cert
def create_folder():
tmp_dir = tempfile.TemporaryDirectory().name
for folder in folders:
os.makedirs(os.path.join(tmp_dir, folder))
for file in files:
with open(os.path.join(tmp_dir, folder, file), "wb") as f:
f.write(open("/dev/urandom", "rb").read(1024))
return tmp_dir
def tamper_one_file(folder):
with open(os.path.join(folder, folders[0], files[0]), "wt") as f:
f.write("fail case")
def update_and_sign_one_folder(folder, pri_key, cert):
tmp_dir = tempfile.TemporaryDirectory().name
new_folder = os.path.join(tmp_dir, "new_folder")
shutil.move(folder, new_folder)
with open(os.path.join(tmp_dir, "test_file"), "wt") as f:
f.write("fail case")
with open("server.crt", "wb") as f:
f.write(serialize_cert(cert))
sign_folders(tmp_dir, pri_key, "server.crt", max_depth=1)
return tmp_dir
def prepare_folders():
root_cert, client_pri_key, client_cert, server_pri_key, server_cert = get_test_certs()
folder = create_folder()
with open("client.crt", "wb") as f:
f.write(serialize_cert(client_cert))
with open("root.crt", "wb") as f:
f.write(serialize_cert(root_cert))
sign_folders(folder, client_pri_key, "client.crt")
return folder, server_pri_key, server_cert
@pytest.mark.xdist_group(name="lighter_utils_group")
class TestSignFolder:
def test_verify_folder(self):
folder, server_pri_key, server_cert = prepare_folders()
assert verify_folder_signature(folder, "root.crt") is True
tamper_one_file(folder)
assert verify_folder_signature(folder, "root.crt") is False
os.unlink("client.crt")
os.unlink("root.crt")
shutil.rmtree(folder)
def test_verify_updated_folder(self):
folder, server_pri_key, server_cert = prepare_folders()
assert verify_folder_signature(folder, "root.crt") is True
folder = update_and_sign_one_folder(folder, server_pri_key, server_cert)
assert verify_folder_signature(folder, "root.crt") is True
os.unlink("client.crt")
os.unlink("root.crt")
shutil.rmtree(folder)
| NVFlare-main | tests/unit_test/lighter/utils_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.lighter.impl.static_file import StaticFileBuilder
class TestStaticFileBuilder:
@pytest.mark.parametrize(
"scheme",
[("grpc"), ("http"), ("tcp")],
)
def test_scheme(self, scheme):
builder = StaticFileBuilder(scheme=scheme)
assert builder.scheme == scheme
| NVFlare-main | tests/unit_test/lighter/static_file_builder_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/lighter/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.lighter.provision import prepare_project
class TestProvision:
def test_prepare_project(self):
project_config = {"api_version": 2}
with pytest.raises(ValueError, match="API version expected 3 but found 2"):
prepare_project(project_dict=project_config)
project_config = {
"api_version": 3,
"name": "mytest",
"description": "test",
"participants": [
{"type": "server", "name": "server1", "org": "org"},
{"type": "server", "name": "server2", "org": "org"},
{"type": "server", "name": "server3", "org": "org"},
],
}
with pytest.raises(
ValueError, match="Configuration error: Expect 2 or 1 server to be provisioned. project contains 3 servers."
):
prepare_project(project_dict=project_config)
| NVFlare-main | tests/unit_test/lighter/provision_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.lighter.spec import Participant, Project
def create_participants(type, number, org, name):
p_list = list()
for i in range(number):
name = f"{name[:2]}{i}{name[2:]}"
p_list.append(Participant(name=name, org=org, type=type))
return p_list
class TestProject:
def test_invalid_project(self):
p1 = create_participants("server", 3, "org", "server")
p2 = create_participants("server", 3, "org", "server")
p = p1 + p2
with pytest.raises(ValueError, match=r".* se0rver .*"):
_ = Project("name", "description", p)
@pytest.mark.parametrize(
"p_type,name",
[("server", "server"), ("client", "client"), ("admin", "[email protected]"), ("overseer", "overseer")],
)
def test_get_participants_by_type(self, p_type, name):
p = create_participants(type=p_type, number=3, org="org", name=name)
prj = Project("name", "description", p)
assert prj.get_participants_by_type(p_type) == p[0]
assert prj.get_participants_by_type(p_type, first_only=False) == p
| NVFlare-main | tests/unit_test/lighter/project_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import pytest
from nvflare.cli_exception import CLIException
from nvflare.lighter.service_constants import FlareServiceConstants as SC
from nvflare.lighter.spec import Participant
from nvflare.lighter.utils import update_project_server_name_config
from nvflare.tool.poc.poc_commands import (
client_gpu_assignments,
get_gpu_ids,
get_service_command,
get_service_config,
prepare_builders,
update_clients,
)
class TestPOCCommands:
def test_client_gpu_assignments(self):
clients = [f"site-{i}" for i in range(0, 12)]
gpu_ids = [0, 1, 2, 0, 3]
assignments = client_gpu_assignments(clients, gpu_ids)
assert assignments == {
"site-0": [0],
"site-1": [1],
"site-2": [2],
"site-3": [0],
"site-4": [3],
"site-5": [0],
"site-6": [1],
"site-7": [2],
"site-8": [0],
"site-9": [3],
"site-10": [0],
"site-11": [1],
}
clients = [f"site-{i}" for i in range(0, 4)]
gpu_ids = []
assignments = client_gpu_assignments(clients, gpu_ids)
assert assignments == {"site-0": [], "site-1": [], "site-2": [], "site-3": []}
clients = [f"site-{i}" for i in range(0, 4)]
gpu_ids = [0, 1, 2, 0, 3]
assignments = client_gpu_assignments(clients, gpu_ids)
assert assignments == {"site-0": [0, 3], "site-1": [1], "site-2": [2], "site-3": [0]}
def test_get_gpu_ids(self):
host_gpu_ids = [0]
gpu_ids = get_gpu_ids(-1, host_gpu_ids)
assert gpu_ids == [0]
gpu_ids = get_gpu_ids([0], host_gpu_ids)
assert gpu_ids == [0]
with pytest.raises(CLIException) as e:
# gpu id =1 is not valid GPU ID as the host only has 1 gpu where id = 0
gpu_ids = get_gpu_ids([0, 1], host_gpu_ids)
def test_get_package_command(self):
cmd = get_service_command(SC.CMD_START, "/tmp/nvflare/poc", SC.FLARE_SERVER, {})
assert "/tmp/nvflare/poc/server/startup/start.sh" == cmd
cmd = get_service_command(SC.CMD_START, "/tmp/nvflare/poc", SC.FLARE_PROJ_ADMIN, {})
assert "/tmp/nvflare/poc/[email protected]/startup/fl_admin.sh" == cmd
cmd = get_service_command(SC.CMD_START, "/tmp/nvflare/poc", "site-2000", {})
assert "/tmp/nvflare/poc/site-2000/startup/start.sh" == cmd
def test_get_package_command2(self):
project_config = {
"api_version": 3,
"name": "example_project",
"description": "NVIDIA FLARE sample project yaml file",
"participants": [
{"name": "server", "type": "server", "org": "nvidia", "fed_learn_port": 8002, "admin_port": 8003},
{"name": "[email protected]", "type": "admin", "org": "nvidia", "role": "project_admin"},
{"name": "site-1", "type": "client", "org": "nvidia"},
{"name": "site-2000", "type": "client", "org": "nvidia"},
],
"builders": [
{
"path": "nvflare.lighter.impl.static_file.StaticFileBuilder",
"args": {"config_folder": "config", "docker_image": "nvflare/nvflare"},
},
],
}
project_config = collections.OrderedDict(project_config)
global_packages = get_service_config(project_config)
assert global_packages[SC.IS_DOCKER_RUN] is True
cmd = get_service_command(SC.CMD_START, "/tmp/nvflare/poc", SC.FLARE_SERVER, global_packages)
assert "/tmp/nvflare/poc/server/startup/docker.sh -d" == cmd
cmd = get_service_command(SC.CMD_START, "/tmp/nvflare/poc", SC.FLARE_PROJ_ADMIN, global_packages)
assert "/tmp/nvflare/poc/[email protected]/startup/fl_admin.sh" == cmd
cmd = get_service_command(SC.CMD_START, "/tmp/nvflare/poc", "site-2000", global_packages)
assert "/tmp/nvflare/poc/site-2000/startup/docker.sh -d" == cmd
cmd = get_service_command(SC.CMD_STOP, "/tmp/nvflare/poc", SC.FLARE_SERVER, global_packages)
assert "docker stop server" == cmd
cmd = get_service_command(SC.CMD_STOP, "/tmp/nvflare/poc", SC.FLARE_PROJ_ADMIN, global_packages)
assert "touch /tmp/nvflare/poc/[email protected]/shutdown.fl" == cmd
cmd = get_service_command(SC.CMD_STOP, "/tmp/nvflare/poc", "site-2000", global_packages)
assert "docker stop site-2000" == cmd
def test_update_server_name(self):
project_config = {
"participants": [
{"name": "server1", "org": "nvidia", "type": "server"},
{"name": "[email protected]", "org": "nvidia", "role": "project_admin", "type": "admin"},
{"name": "client-1", "org": "nvidia", "type": "client"},
]
}
project_config = collections.OrderedDict(project_config)
old_server_name = "server1"
server_name = "server"
update_project_server_name_config(project_config, old_server_name, server_name)
servers = [p for p in project_config["participants"] if p["type"] == "server"]
assert len(servers) == 1
assert servers[0]["name"] == server_name
overseer_agent_builder = {"args": {"overseer_agent": {"args": {"sp_end_point": "server1: 8002: 8003"}}}}
project_config["builders"] = [overseer_agent_builder]
update_project_server_name_config(project_config, old_server_name, server_name)
assert project_config["builders"][0]["args"]["overseer_agent"]["args"]["sp_end_point"] == "server: 8002: 8003"
def test_update_clients(self):
project_config = {
"participants": [
{"name": "server1", "org": "nvidia", "type": "server"},
{"name": "[email protected]", "org": "nvidia", "role": "project_admin", "type": "admin"},
{"name": "client-1", "org": "nvidia", "type": "client"},
]
}
project_config = collections.OrderedDict(project_config)
clients = []
n_clients = 3
project_config = update_clients(clients, n_clients, project_config)
result_clients = [p["name"] for p in project_config["participants"] if p["type"] == "client"]
assert len(result_clients) == 3
assert result_clients == ["site-1", "site-2", "site-3"]
clients = ["client-1", "client-2", "client-3", "client-4"]
n_clients = 3
project_config = update_clients(clients, n_clients, project_config)
result_clients = [p["name"] for p in project_config["participants"] if p["type"] == "client"]
assert len(result_clients) == len(clients)
assert result_clients == clients
def test_prepare_builders(self):
project_config = {
"participants": [
{"name": "server1", "org": "nvidia", "type": "server"},
{"name": "[email protected]", "org": "nvidia", "role": "project_admin", "type": "admin"},
{"name": "client-1", "org": "nvidia", "type": "client"},
],
"builders": [
{
"path": "nvflare.lighter.impl.static_file.StaticFileBuilder",
"args": {"overseer_agent": {"args": {"sp_end_point": "server1: 8002: 8003"}}},
},
{"path": "nvflare.lighter.impl.cert.CertBuilder", "args": {}},
],
}
project_config = collections.OrderedDict(project_config)
builders = prepare_builders(project_config)
assert len(builders) == 2
for c in builders:
assert c.__class__.__name__ == "LocalStaticFileBuilder" or c.__class__.__name__ == "LocalCertBuilder"
if c.__class__.__name__ == "LocalStaticFileBuilder":
assert c.get_server_name(None) == "localhost"
assert c.get_overseer_name(None) == "localhost"
if c.__class__.__name__ == "LocalCertBuilder":
participants = project_config["participants"]
for p in participants:
if p["type"] == "server":
assert c.get_subject(Participant(**p)) == "localhost"
else:
assert c.get_subject(Participant(**p)) == p["name"]
def test_get_packages_config(self):
project_config = {
"participants": [
{"name": "server1", "org": "nvidia", "type": "server"},
{"name": "[email protected]", "org": "nvidia", "role": "project_admin", "type": "admin"},
{"name": "client-1", "org": "nvidia", "type": "client"},
{"name": "client-2", "org": "nvidia", "type": "client"},
],
"builders": [
{
"path": "nvflare.lighter.impl.static_file.StaticFileBuilder",
"args": {"overseer_agent": {"args": {"sp_end_point": "server1: 8002: 8003"}}},
},
{"path": "nvflare.lighter.impl.cert.CertBuilder", "args": {}},
],
}
project_config = collections.OrderedDict(project_config)
global_config = get_service_config(project_config)
assert "server1" == global_config[SC.FLARE_SERVER]
assert "[email protected]" == global_config[SC.FLARE_PROJ_ADMIN]
assert ["client-1", "client-2"] == global_config[SC.FLARE_CLIENTS]
| NVFlare-main | tests/unit_test/lighter/poc_commands_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from nvflare.lighter.spec import Participant
class TestParticipant:
@pytest.mark.parametrize(
"type,invalid_name",
[
("server", "server_"),
("server", "server@"),
("server", "-server"),
("client", "client!"),
("client", "client@"),
("admin", "admin"),
("admin", "admin@example_1.com"),
("overseer", "overseer_"),
],
)
def test_invalid_name(self, type, invalid_name):
with pytest.raises(ValueError):
_ = Participant(name=invalid_name, org="org", type=type)
@pytest.mark.parametrize(
"invalid_org",
[("org-"), ("org@"), ("org!"), ("org~")],
)
def test_invalid_org(self, invalid_org):
with pytest.raises(ValueError):
_ = Participant(name="server", type="server", org=invalid_org)
@pytest.mark.parametrize(
"invalid_type",
[("type1"), ("type@"), ("type!"), ("type~"), ("gateway"), ("firewall")],
)
def test_invalid_type(self, invalid_type):
with pytest.raises(ValueError):
_ = Participant(name="server", type=invalid_type, org="org")
| NVFlare-main | tests/unit_test/lighter/participant_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/data/custom_drivers/com/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, List
from nvflare.fuel.f3.drivers.base_driver import BaseDriver
from nvflare.fuel.f3.drivers.connector_info import ConnectorInfo
from nvflare.fuel.f3.drivers.driver_params import DriverCap
class WarpDriver(BaseDriver):
"""A dummy driver to test custom driver loading"""
def __init__(self):
super().__init__()
@staticmethod
def supported_transports() -> List[str]:
return ["warp"]
@staticmethod
def capabilities() -> Dict[str, Any]:
return {DriverCap.SEND_HEARTBEAT.value: True, DriverCap.SUPPORT_SSL.value: False}
def listen(self, connector: ConnectorInfo):
self.connector = connector
def connect(self, connector: ConnectorInfo):
self.connector = connector
def shutdown(self):
self.close_all()
@staticmethod
def get_urls(scheme: str, resources: dict) -> (str, str):
return "warp:enterprise"
| NVFlare-main | tests/unit_test/data/custom_drivers/com/example/warp_driver.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/data/custom_drivers/com/example/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/tool/__init__.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import shutil
import tempfile
from unittest.mock import MagicMock, patch
import pytest
from requests import Response
from nvflare.fuel.utils.network_utils import get_open_ports
from nvflare.tool.package_checker.utils import (
NVFlareRole,
check_overseer_running,
check_response,
get_required_args_for_overseer_agent,
try_bind_address,
try_write_dir,
)
def _mock_response(code) -> Response:
resp = MagicMock(spec=Response)
resp.json.return_value = {}
resp.status_code = code
return resp
class TestUtils:
def test_try_write_exist(self):
tempdir = tempfile.mkdtemp()
assert try_write_dir(tempdir) is None
shutil.rmtree(tempdir)
def test_try_write_non_exist(self):
tempdir = tempfile.mkdtemp()
shutil.rmtree(tempdir)
assert try_write_dir(tempdir) is None
def test_try_write_exception(self):
with patch("os.path.exists", side_effect=OSError("Test")):
assert try_write_dir("hello").args == OSError("Test").args
def test_try_bind_address(self):
assert try_bind_address(host="localhost", port=get_open_ports(1)[0]) is None
def test_try_bind_address_error(self):
host = "localhost"
port = get_open_ports(1)[0]
with patch("socket.socket.bind", side_effect=OSError("Test")):
assert try_bind_address(host=host, port=port).args == OSError("Test").args
@pytest.mark.parametrize("resp, result", [(None, False), (_mock_response(200), True), (_mock_response(404), False)])
def test_check_response(self, resp, result):
assert check_response(resp=resp) == result
def test_check_overseer_running(self):
with patch("nvflare.tool.package_checker.utils._create_http_session") as mock2:
mock2.return_value = None
with patch("nvflare.tool.package_checker.utils._prepare_data") as mock3:
mock3.return_value = None
with patch("nvflare.tool.package_checker.utils._send_request") as mock4:
mock4.return_value = _mock_response(200)
resp, err = check_overseer_running(
startup="test",
overseer_agent_args={"overseer_end_point": "random"},
role="",
)
assert resp.status_code == 200
@pytest.mark.parametrize(
"overseer_agent_class, role, result",
[
(
"nvflare.ha.overseer_agent.HttpOverseerAgent",
NVFlareRole.SERVER,
["overseer_end_point", "role", "project", "name", "fl_port", "admin_port"],
),
(
"nvflare.ha.overseer_agent.HttpOverseerAgent",
NVFlareRole.CLIENT,
["overseer_end_point", "role", "project", "name"],
),
(
"nvflare.ha.overseer_agent.HttpOverseerAgent",
NVFlareRole.ADMIN,
["overseer_end_point", "role", "project", "name"],
),
("nvflare.ha.dummy_overseer_agent.DummyOverseerAgent", NVFlareRole.SERVER, ["sp_end_point"]),
("nvflare.ha.dummy_overseer_agent.DummyOverseerAgent", NVFlareRole.CLIENT, ["sp_end_point"]),
("nvflare.ha.dummy_overseer_agent.DummyOverseerAgent", NVFlareRole.ADMIN, ["sp_end_point"]),
],
)
def test_get_required_args_for_overseer_agent(self, overseer_agent_class, role, result):
assert get_required_args_for_overseer_agent(overseer_agent_class, role) == result
| NVFlare-main | tests/unit_test/tool/package_checker/utils_test.py |
# Copyright (c) 2022, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/tool/package_checker/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import patch
import pytest
from pyhocon import ConfigFactory as CF
from nvflare.fuel_opt.utils.pyhocon_loader import PyhoconConfig
from nvflare.tool.job.job_cli import _update_client_app_config_script, convert_args_list_to_dict
class TestJobCLI:
@pytest.mark.parametrize("inputs, result", [(["a=1", "b=2", "c = 3"], dict(a="1", b="2", c="3"))])
def test_convert_args_list_to_dict(self, inputs, result):
r = convert_args_list_to_dict(inputs)
assert r == result
def test__update_client_app_config_script(self):
with patch("nvflare.fuel.utils.config_factory.ConfigFactory.load_config") as mock2:
conf = CF.parse_string(
"""
{
format_version = 2
app_script = "python custom/cifar10.py"
app_config = ""
executors = [
{
tasks = ["train"]
executor {
name = "PTFilePipeLauncherExecutor"
args {
launcher_id = "launcher"
heartbeat_timeout = 60
}
}
}
],
task_result_filters= []
task_data_filters = []
components = [
{
id = "launcher"
name = "SubprocessLauncher"
args.script = "{app_script} {app_config} "
}
]
}
"""
)
mock2.return_value = PyhoconConfig(conf, "/tmp/my_job/app/config/config_fed_client.conf")
app_config = ["trainer.batch_size=1024", "eval_iters=100", "lr=0.1"]
config, config_path = _update_client_app_config_script("/tmp/my_job/", app_config)
assert config.get("app_config") == "--trainer.batch_size 1024 --eval_iters 100 --lr 0.1"
| NVFlare-main | tests/unit_test/tool/job/job_cli_test.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/tool/job/__init__.py |
# Copyright (c) 2023, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| NVFlare-main | tests/unit_test/tool/job/config/__init__.py |
Subsets and Splits